def write_local_mirror_index(self, setup, mirror_index_path): import subprocess import utils_setup # We need access to the sortRestApi function... from layer_index import Layer_Index li = Layer_Index() # We want to move to a generic named branch, now that we've done the fixups. try: cmd = [setup.tools['git'], 'checkout', '--orphan', setup.base_branch ] utils_setup.run_cmd(cmd, environment=setup.env, cwd=mirror_index_path, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except Exception: cmd = [setup.tools['git'], 'checkout', setup.base_branch ] utils_setup.run_cmd(cmd, log=2, environment=setup.env, cwd=mirror_index_path) cmd = [setup.tools['git'], 'reset', '--hard' ] utils_setup.run_cmd(cmd, log=2, environment=setup.env, cwd=mirror_index_path) # Remove obsolete entries only for (dirpath, _, filenames) in os.walk(mirror_index_path): if dirpath.endswith('/.git') or '/.git/' in dirpath: continue for filename in filenames: if filename not in self.indexes and filename not in self.xmls: logger.debug('ws mirror-index remove obsolete %s' % os.path.join(dirpath, filename)) os.remove(os.path.join(dirpath, filename)) for entry in self.indexes: logger.debug('Writing windshare index %s...' % entry) fpath = os.path.join(mirror_index_path, entry) json.dump(li.sortRestApi(self.indexes[entry]), open(fpath, 'wt'), indent=4) for entry in self.xmls: logger.debug('Writing windshare xml %s...' % entry) os.makedirs(os.path.join(mirror_index_path, 'xml'), exist_ok=True) fpath = os.path.join(mirror_index_path, 'xml', entry) with open(fpath, 'wt') as fout: for _line in self.xmls[entry]: fout.write(_line + '\n') cmd = [setup.tools['git'], 'add', '-A', '.'] utils_setup.run_cmd(cmd, log=2, environment=setup.env, cwd=mirror_index_path) try: cmd = [setup.tools['git'], 'diff-index', '--quiet', 'HEAD', '--'] utils_setup.run_cmd(cmd, environment=setup.env, cwd=mirror_index_path, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except Exception: # We expect to fail to this code logger.debug('Updating windshare mirror-index') cmd = [setup.tools['git'], 'commit', '-m', 'Updated index - %s' % (setup.setup_args)] utils_setup.run_cmd(cmd, log=2, environment=setup.env, cwd=mirror_index_path)
def load_layer_index(self): # Load Layer_Index mirror_index_path = None if not (self.base_branch == "master" or self.base_branch == "master-wr"): from windshare import Windshare ws = Windshare(debug=self.debug_lvl) # Determine if this is a windshare install (ws_base_url, ws_base_folder, ws_entitlement_url) = ws.get_windshare_urls(self.base_url) if ws_base_url and ws_base_url != "" and ws.load_folders(ws_entitlement_url): logger.plain('Detected Windshare configuration. Processing entitlements and indexes.') for folder in ws.folders: mirror_index_path = ws.load_mirror_index(self, ws_base_url, folder) ws.write_local_mirror_index(self, mirror_index_path) # We need to adjust the base_url so everything works properly... self.base_url = ws_base_url # Adjust the location of the buildtools (was based on the original base_url) if self.buildtools_remote: self.buildtools_remote = ws_base_folder + '/' + self.buildtools_remote else: logger.debug('No Windshare configuration detected.') else: logger.debug('Windshare configuration disabled, building %s.' % self.base_branch) # Check if we have a mirror-index, and load it if we do... if not mirror_index_path: mirror_index_path = self.load_mirror_index(self.base_url + '/mirror-index') # Mirror also has a copy of the associated XML bits if mirror_index_path: self.xml_dir = os.path.join(mirror_index_path, 'xml') # Setup replace strings as late as possible. The various self.* values # may be modified prior to this place. replace = [] replace = replace + settings.REPLACE replace = replace + [ ( '#INSTALL_DIR#', self.install_dir ), ( '#BASE_URL#', self.base_url ), ( '#BASE_BRANCH#', self.base_branch ), ] self.index = Layer_Index(indexcfg=settings.INDEXES, base_branch=self.base_branch, replace=replace, mirror=mirror_index_path)
def load_layer_index(self): # Load Layer_Index # Check if we have a mirror-index, and load it if we do... mirror_index_path = self.load_mirror_index(self.base_url + '/mirror-index') # Mirror also has a copy of the associated XML bits if mirror_index_path: self.xml_dir = os.path.join(mirror_index_path, 'xml') # Setup replace strings as late as possible. The various self.* values # may be modified prior to this place. replace = [] replace = replace + settings.REPLACE replace = replace + [ ( '#INSTALL_DIR#', self.install_dir ), ( '#BASE_URL#', self.base_url ), ( '#BASE_BRANCH#', self.base_branch ), ] self.index = Layer_Index(indexcfg=settings.INDEXES, base_branch=self.base_branch, replace=replace, mirror=mirror_index_path)
'CACHE': None, 'BRANCH': 'WRLINUX_9_BASE', }, ] OUTPUT = '/tmp/transform' OUTPUT_FMT = 'django' #OUTPUT_FMT = 'restapi' SPLIT = False import os import sys from layer_index import Layer_Index index = Layer_Index(INDEXES, base_branch=None, replace=REPLACE) for lindex in index.index: print('Dump %s as %s (split=%s)...' % (lindex['CFG']['DESCRIPTION'], OUTPUT_FMT, SPLIT)) os.makedirs(OUTPUT, exist_ok=True) if OUTPUT_FMT == 'django': index.serialize_django_export(lindex, OUTPUT + '/' + lindex['CFG']['DESCRIPTION'], split=SPLIT) elif OUTPUT_FMT == 'restapi': index.serialize_index(lindex, OUTPUT + '/' + lindex['CFG']['DESCRIPTION'], split=SPLIT) else:
def main(): global subset_folders global branch if subset_file: subset_folders = {} with open(subset_file, 'rt') as f: for line in f: if line.startswith('#'): continue lsplit = line.split() if not lsplit: continue if len(lsplit) != 2: logger.critical("Subset Folders, invalid line: %s" % (line)) return 1 subset_folders[lsplit[0]] = lsplit[1] if os.path.exists(dest): logger.critical('Destination directory %s already exists. Please choose a different destination.' % (dest)) return 1 # We have to run this against a mirror, check for a mirror-index mirror_path = 'mirror-index' if not os.path.exists(mirror_path): logger.critical('No %s found. Is this a mirror?' % mirror_path) return 1 dst_base_mirror = os.path.join(dest, mirror_path) # Find the base branch of the mirror-index to set a default cmd = ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] _ret = subprocess.Popen(cmd, cwd=mirror_path, close_fds=True, stdout=subprocess.PIPE) branch = "" output = "" while True: output = _ret.stdout.readline() if not output and _ret.poll() is not None: break branch += output.decode('utf-8') _ret.wait() branch = branch.strip() if not branch or branch == "": logger.critical('Unable to determine base branch.') return 1 # Create the destination os.makedirs(dest, exist_ok=False) #### First process items NOT included in the index or default.xml # Duplicate the setup dir to a bare repo src = os.path.join(setup_dir, '.git') dst = os.path.join(dest, os.path.basename(setup_dir)) push_or_copy(os.path.basename(setup_dir), src, dst, branch) # Duplicate the git-repo.git src = 'git-repo' dst = os.path.join(dest, os.path.basename(src)) push_or_copy('git-repo', src, dst) #### Now load the index and create a list of things we need to parse # this is the list of things that MAY be in the default.xml file, we need # to have a list to later process that file and exclude things we've already # done. processed_list = [] ### Create the target mirror-index... # Transform and export the mirror index logger.plain('Transforming index...') index = Layer_Index(indexcfg=settings.INDEXES, base_branch=branch, replace=settings.REPLACE, mirror=mirror_path) branchid = -1 base_branch = branch bitbake_branch = branch for lindex in index.index: if 'CFG' in lindex: base_branch = lindex['CFG']['BRANCH'] bitbake_branch = branch for b in lindex['branches']: if 'name' in b and b['name'] == base_branch: branchid = b['id'] if 'bitbake_branch' in b and b['bitbake_branch'] != "": bitbake_branch = b['bitbake_branch'] break logger.info('Discovered base_branch: %s (%s)' % (base_branch, branchid)) logger.info('Discovered bitbake_branch: %s' % bitbake_branch) for layer in lindex['layerItems']: logger.info('Processing layer %s...' % layer['name']) # Identify, manipulate and copy the layer... if 'vcs_url' in layer: full_url = layer['vcs_url'].replace('#BASE_URL#/', '') base_url = layer['vcs_url'].split('/')[-1] layer['vcs_url'] = '#BASE_URL#' + '/' + base_url layer['vcs_web_url'] = '' layer['vcs_web_tree_base_url'] = '' layer['vcs_web_file_base_url'] = '' layer['mailing_list_url'] = '' # Find actual_branch if one is there revision = base_branch for lb in lindex['layerBranches']: if lb['branch'] == branchid and lb['layer'] == layer['id']: if lb['actual_branch'] != "": revision = lb['actual_branch'] src = full_url dst = os.path.join(dest, os.path.basename(src)) if src not in processed_list: push_or_copy(layer['name'], src, dst, revision) processed_list.append(src) xml_dir = get_xml_dir(layer['name'], dst_base_mirror) def xml_dest_dir(_xml_dir, _name): if not _xml_dir: return None return os.path.join(_xml_dir, _name) src = os.path.join(mirror_path, 'xml', '%s.inc' % layer['name']) if os.path.exists(src): xml_dst = xml_dest_dir(xml_dir, '%s.inc' % layer['name']) for name in transform_xml(src, xml_dst): dst = os.path.join(dest, os.path.basename(name)) if name not in processed_list: push_or_copy(layer['name'], name, dst) processed_list.append(name) src = os.path.join(mirror_path, 'xml', '%s.xml' % layer['name']) if os.path.exists(src): xml_dst = xml_dest_dir(xml_dir, '%s.xml' % layer['name']) for name in transform_xml(src, xml_dst): dst = os.path.join(dest, os.path.basename(name)) if name not in processed_list: push_or_copy(layer['name'], name, dst) processed_list.append(name) # Bitbake is always processed with openembedded-core if layer['name'] == 'openembedded-core': src = os.path.join(os.path.dirname(full_url), 'bitbake') dst = os.path.join(dest, os.path.basename(src)) if src not in processed_list: push_or_copy(layer['name'], src, dst, bitbake_branch) processed_list.append(src) src = os.path.join(mirror_path, 'xml', 'bitbake.inc') if os.path.exists(src): xml_dst = xml_dest_dir(xml_dir, 'bitbake.inc') for name in transform_xml(src, xml_dst): dst = os.path.join(dest, os.path.basename(name)) if name not in processed_list: push_or_copy(layer['name'], name, dst) processed_list.append(name) src = os.path.join(mirror_path, 'xml', 'bitbake.xml') if os.path.exists(src): xml_dst = xml_dest_dir(xml_dir, 'bitbake.xml') for name in transform_xml(src, xml_dst): dst = os.path.join(dest, os.path.basename(name)) if name not in processed_list: push_or_copy(layer['name'], name, dst) processed_list.append(name) # dst_base_mirror may not exist if we're subsetting... os.makedirs(dst_base_mirror, exist_ok=True) index.serialize_index(lindex, os.path.join(dst_base_mirror, lindex['CFG']['DESCRIPTION']), split=True, IncludeCFG=True, mirror=True, base_url=base_url) # Since serialize can't subset, we do it manually... # if the rules change in layer_index.py, adjust them here.. if subset_folders: base_branch = branch if 'CFG' in lindex: base_branch = lindex['CFG']['BRANCH'] for layer in lindex['layerItems']: json = "%s__%s__%s.json" % (lindex['CFG']['DESCRIPTION'], base_branch, layer['name']) json = json.translate(str.maketrans('/ ', '__')) src = os.path.join(dst_base_mirror, json) mirror_dir = get_mirror_dir(layer['name'], dst_base_mirror) if not mirror_dir: # Skipped item, remove it and continue logger.plain('rm %s' % src) os.remove(src) continue dst = os.path.join(mirror_dir, json) os.makedirs(os.path.dirname(dst), exist_ok=True) logger.plain('mv %s -> %s' % (src, dst)) os.rename(src, dst) # Directory is expected to be empty, remove it. os.rmdir(dst_base_mirror) #### Now process anythign else we've not yet processed logger.info('Processing left-overs...') # Now process the default.xml, and process anything not previous processed... tree = ET.parse('default.xml') root = tree.getroot() default_revision = None base_url = None for child in root: if child.tag == 'remote': if 'fetch' in child.attrib: base_url = child.attrib['fetch'] if child.tag == 'default': if 'revision' in child.attrib: default_revision = child.attrib['revision'] if child.tag != 'project': continue src = child.attrib['name'] if src in processed_list or src + '.git' in processed_list: continue dst = os.path.join(dest, os.path.basename(src)) revision = None if not ('bare' in child.attrib and child.attrib['bare'] == 'True'): revision = default_revision if 'revision' in child.attrib: revision = child.attrib['revision'] push_or_copy(os.path.basename(src), src, dst, revision) #### Update the mirror-index repositories (git add/git commit) logger.plain('Updating mirror-index repositories...') # git add file. if subset_folders: index_list = [] for layer in subset_folders: if subset_folders[layer] == "[SKIP]": continue dst_mirror = get_mirror_dir(layer, dst_base_mirror) if dst_mirror not in index_list: index_list.append(dst_mirror) for dst_mirror in index_list: update_mirror(dst_mirror) else: update_mirror(dst_base_mirror) logger.plain('Done') return 0
'CACHE' : None, #'BRANCH' : 'WRLINUX_9_BASE', }, ] def usage(): print("usage: %s <branch>" % sys.argv[0]) sys.exit(1) if len(sys.argv) < 2: usage() base_branch=sys.argv[1] index = Layer_Index(INDEXES, base_branch=base_branch, replace=REPLACE) for lindex in index.index: dep_out = [] branchid = index.getBranchId(lindex, index.getIndexBranch(default=base_branch, lindex=lindex)) if branchid: for lb in lindex['layerBranches']: if lb['branch'] == branchid: for layer in index.find_layer(lindex, layerBranch=lb): name = layer['name'] (required, recommended) = index.getDependencies(lindex, lb) reqnames = [] recnames = [] for req in required: for layer in index.find_layer(lindex, layerBranch=req): reqnames.append(layer['name'])
import sys import settings from layer_index import Layer_Index mirror_path = 'mirror-index' if not os.path.exists(mirror_path): if not os.path.exists(mirror_path + '.git'): print('No %s found. Is this a mirror?' % mirror_path) sys.exit(1) else: mirror_path = mirror_path + '.git' index = Layer_Index(indexcfg=settings.INDEXES, base_branch=None, replace=settings.REPLACE, mirror=mirror_path) for lindex in index.index: for branch in lindex['branches']: basebranch = branch['name'] for litem in lindex['layerItems']: for lbranch in lindex['layerBranches']: if lbranch['layer'] == litem['id']: branch = basebranch if lbranch['actual_branch'] != "": branch = lbranch['actual_branch'] print('%s %s %s %s' % (litem['name'], litem['vcs_url'], branch, lbranch['vcs_last_rev'])) break
class Setup(): tool_list = ['repo', 'git'] default_xml = 'default.xml' default_repo_quiet = '--quiet' class_config_dir = 'config' class_log_dir = 'log' check_repo_install_dir = '.repo/repo/.git' check_repo_sync_file = '.repo/projects/' replacement = {} def __init__(self): # Set various default values # Default -j for repo init self.jobs = str(settings.REPO_JOBS) # Pull in the defaults from the environment (set by setup.sh) self.base_url = os.getenv('OE_BASEURL') self.base_branch = os.getenv('OE_BASEBRANCH') # Real project or a mirror? self.mirror = False # Default configuration self.distros = [settings.DEFAULT_DISTRO] self.machines = [settings.DEFAULT_MACHINE] self.layers = [] self.recipes = [] self.all_layers = False self.no_recommend = False self.no_network = False self.allowed_network = None self.remotes = {} self.requiredlayers = [] self.recommendedlayers = [] # Default quiet: self.quiet = self.default_repo_quiet self.repo_verbose = False # Default depth self.depth = None # Default to NOT force-sync self.force_sync = None self.debug_lvl = 0 # Set the install_dir # Use the path from this file. Note bin has to be dropped. self.install_dir = os.path.abspath( os.path.dirname(os.path.abspath(__file__)) + '/../') # Default location for the related XML files self.xml_dir = os.path.join(self.install_dir, 'data/xml') # Set the directory where we're running. self.project_dir = os.getcwd() self.conf_dir = os.path.join(self.project_dir, self.class_config_dir) # Environment setup self.env = os.environ.copy() self.setup_env() # Check for all the tools and create a dictionary of the path self.tools = {i: self.get_path(i) for i in self.tool_list} # Config flags self.list_distros = False self.list_machines = False self.list_layers = False self.list_recipes = False def exit(self, ret=0): logger.debug("setup.py finished (ret=%s)" % (ret)) sys.exit(ret) def start_file_logging(self): log_dir = os.path.join(self.conf_dir, self.class_log_dir) if not os.path.exists(log_dir): os.makedirs(log_dir) log_file = '%s/%s.log' % ( log_dir, time.strftime('%Y-%m-%d-%H:%M:%S+0000', time.gmtime())) logger_setup.setup_logging_file(log_file) def main(self, orig_args): parser = Argparse_Setup(self) # We want to default to help mode lacking any args. if not orig_args or not orig_args[1:]: orig_args.append('--help') parser.evaluate_args(orig_args[1:]) self.setup_args = " ".join(orig_args[1:]) self.start_file_logging() if not self.base_url: logger.error( 'Unable to determine base url, you may need to specify --base-url=' ) if not self.base_branch: logger.error( 'Unable to determine base branch, you may need to specify --base-branch=' ) if not self.base_url or not self.base_branch: self.exit(1) self.load_layer_index() if self.list_distros: self.index.list_distros(self.base_branch) if self.list_machines: self.index.list_machines(self.base_branch) if self.list_layers: self.index.list_layers(self.base_branch) if self.list_recipes: self.index.list_recipes(self.base_branch) if self.list_distros or self.list_machines or self.list_layers or self.list_recipes: sys.exit(0) logger.debug('setup.py started') logger.debug('Calling setup main with arguments %s', str(orig_args)) # Log debug which may have been missed due to log level. logger.debug("PATH=%s" % self.env["PATH"]) logger.debug("Tools are:") for key in self.tools: logger.debug("%s -> %s", key, self.tools[key]) logger.plain('Setting distro to "%s"' % (",".join(self.distros))) logger.plain('Setting machine to "%s"' % (",".join(self.machines))) if self.layers != []: logger.plain('Setting layers to "%s"' % (",".join(self.layers))) if self.recipes != []: logger.plain('Setting recipes to "%s"' % (",".join(self.recipes))) self.process_layers() self.project_setup() self.__prep_replacements() if self.mirror != True: # We only want to do this if we're not mirroring... self.update_project() else: # Setup an index for others to use if we're mirroring... self.update_mirror() self.update_mirror_index() self.update_manifest() self.update_gitignore() self.commit_files() self.repo_sync() self.exit(0) def load_layer_index(self): # Load Layer_Index replace = [] replace = replace + settings.REPLACE replace = replace + [ ('#INSTALL_DIR#', self.install_dir), ('#BASE_URL#', self.base_url), ('#BASE_BRANCH#', self.base_branch), ] # See if there is a mirror index available from the BASE_URL mirror_index_path = None mirror_index = os.path.join(self.conf_dir, 'mirror-index') cmd = [ self.tools['git'], 'ls-remote', self.base_url + '/mirror-index', self.base_branch ] ret = subprocess.Popen(cmd, env=self.env, cwd=self.project_dir, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() if (ret.returncode == 0): logger.plain('Loading the mirror index from %s (%s)...' % (self.base_url + '/mirror-index', self.base_branch)) # This MIGHT be a valid mirror.. if not os.path.exists(mirror_index): os.makedirs(mirror_index) cmd = [self.tools['git'], 'init'] utils_setup.run_cmd(cmd, environment=self.env, cwd=mirror_index) cmd = [ self.tools['git'], 'fetch', '-f', '-n', '-u', self.base_url + '/mirror-index', self.base_branch + ':' + self.base_branch ] ret = subprocess.Popen(cmd, env=self.env, cwd=mirror_index, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() if (ret.returncode == 0): logger.debug('Found mirrored index.') cmd = [self.tools['git'], 'checkout', self.base_branch] utils_setup.run_cmd(cmd, environment=self.env, cwd=mirror_index) cmd = [self.tools['git'], 'reset', '--hard'] utils_setup.run_cmd(cmd, environment=self.env, cwd=mirror_index) mirror_index_path = mirror_index # Mirror also has a copy of the associated XML bits self.xml_dir = os.path.join(mirror_index, 'xml') self.index = Layer_Index(indexcfg=settings.INDEXES, base_branch=self.base_branch, replace=replace, mirror=mirror_index_path) def process_layers(self): from collections import deque # We allow duplicates in the queue, they're filtered later # Queue of required objects requiredQueue = deque([]) # Queue of recommended objects recommendedQueue = deque([]) logger.debug('Starting') # if this switches to false, we have to exit at the end of this function allfound = True # It all starts with BASE_LAYERS, so always include this. (only from index 0) lindex = self.index.index[0] branchid = self.index.getBranchId(lindex, self.get_branch(lindex=lindex)) if branchid: for lname in settings.BASE_LAYERS.split(): base_layerBranch = self.index.getLayerBranch(lindex, branchid, name=lname) if not base_layerBranch or not branchid: raise Exception( 'Unable to find base layer: %s in the Layer_Index' % (lname)) for lb in base_layerBranch: requiredQueue.append((lindex, lb)) # process the configuration arguments (find the layers we need for the project) # if an item is 'layer:item', then the 'foo' part must match a layer name. def procConfig(layer=None, distro=None, machine=None, recipe=None): item = ["", layer][layer != None] item = item + ["", distro][distro != None] item = item + ["", machine][machine != None] item = item + ["", recipe][recipe != None] type = ["", 'layer'][layer != None] type = type + ["", 'distro'][distro != None] type = type + ["", 'machine'][machine != None] type = type + ["", 'recipe'][recipe != None] if (':' in item): # User told us which layer, so ignore the other bits -- they can be used later... layer = item.split(':')[0] distro = None machine = None recipe = None # TODO: We do not actually verify the item we asked for (if a layer was specified) is available found = False for lindex in self.index.index: branchid = self.index.getBranchId( lindex, self.get_branch(lindex=lindex)) if not branchid: continue for layerBranch in self.index.getLayerBranch( lindex, branchid, name=layer, distro=distro, machine=machine, recipe=recipe) or []: requiredQueue.append((lindex, layerBranch)) found = True if found: break if not found: logger.critical('%s "%s" not found' % (type, item)) return False return True for l in self.layers: if not procConfig(layer=l): allfound = False for l in self.distros: if not procConfig(distro=l): allfound = False for l in self.machines: if not procConfig(machine=l): allfound = False for l in self.recipes: if not procConfig(recipe=l): allfound = False # Add all layers -- if necessary if self.all_layers == True: for lindex in self.index.index: branchid = self.index.getBranchId( lindex, self.get_branch(lindex=lindex)) if not branchid: continue for l in lindex['layerItems']: for layerBranch in self.index.getLayerBranch( lindex, branchid, layerItem=l) or []: requiredQueue.append((lindex, layerBranch)) if not allfound: logger.critical('Please correct the missing items, exiting.') self.exit(1) # Compute requires and recommended layers... # List of 'collection' and layer 'name'. This list is used to avoid # including duplicates. Collection is matched first, then name -- as not # all layer indexes may contain 'collection' depCacheCol = [] depCacheName = [] def checkCache(lindex, layerBranch, addCache=False): (collection, name, vcs_url) = self.index.getLayerInfo(lindex, layerBranch=layerBranch) if collection in depCacheCol or name in depCacheName: return True if addCache: if collection: depCacheCol.append(collection) if name: depCacheName.append(name) return False def resolveIndexOrder(lindex, layerBranch, Queue): # We want to recompute the dependency in INDEXES order... (collection, name, vcs_url) = self.index.getLayerInfo(lindex, layerBranch) found = False for pindex in self.index.index: # We already know it'll be in this index, so we just use it as-is... if pindex == lindex: break # Look for the collection (or name if no collection) in the indexes in # priority order... pbranchid = self.index.getBranchId( pindex, self.get_branch(lindex=pindex)) if collection: new_layerBranches = self.index.getLayerBranch( pindex, pbranchid, collection=collection) if new_layerBranches and new_layerBranches != []: for lb in new_layerBranches: logger.info( 'Resolving dependency %s from %s to %s from %s' % (name, lindex['CFG']['DESCRIPTION'], name, pindex['CFG']['DESCRIPTION'])) Queue.append((pindex, lb)) lindex = None layerBranch = None break if name: new_layerBranches = self.index.getLayerBranch(pindex, pbranchid, name=name) if new_layerBranches and new_layerBranches != []: for lb in new_layerBranches: logger.info( 'Resolving dependency %s from %s to %s from %s' % (name, lindex['CFG']['DESCRIPTION'], name, pindex['CFG']['DESCRIPTION'])) Queue.append((pindex, lb)) lindex = None layerBranch = None break return (lindex, layerBranch) while requiredQueue: (lindex, layerBranch) = requiredQueue.popleft() (lindex, layerBranch) = resolveIndexOrder(lindex, layerBranch, requiredQueue) if not lindex or not layerBranch: continue if not checkCache(lindex, layerBranch, True): self.requiredlayers.append((lindex, layerBranch)) (required, recommended) = self.index.getDependencies( lindex, layerBranch) for dep in required: requiredQueue.append((lindex, dep)) for dep in recommended: recommendedQueue.append((lindex, dep)) while recommendedQueue: (lindex, layerBranch) = recommendedQueue.popleft() (lindex, layerBranch) = resolveIndexOrder(lindex, layerBranch, recommendedQueue) if not lindex or not layerBranch: continue if not checkCache(lindex, layerBranch, True): self.recommendedlayers.append((lindex, layerBranch)) (required, recommended) = self.index.getDependencies( lindex, layerBranch) for dep in required + recommended: recommendedQueue.append((lindex, dep)) # Also compute the various remotes try: from urllib.request import urlopen, URLError from urllib.parse import urlparse except ImportError: from urllib2 import urlopen, URLError from urlparse import urlparse self.remotes['base'] = self.base_url def process_remote(lindex, layerBranch): for layer in self.index.find_layer(lindex, id=layerBranch['layer']): vcs_url = layer['vcs_url'] found = False for remote in self.remotes: if vcs_url.startswith(self.remotes[remote]): found = True break if not found: url = urlparse(vcs_url) if not url.scheme: self.remotes['local'] = '/' found = True if not found: for (remoteurl, remotename) in settings.REMOTES: if vcs_url.startswith(remoteurl): self.remotes[remotename] = remoteurl found = True break if not found: self.remotes[url.scheme + '_' + url.netloc.translate( str.maketrans( '/:', '__'))] = url.scheme + '://' + url.netloc for (lindex, layerBranch) in self.requiredlayers + self.recommendedlayers: process_remote(lindex, layerBranch) def display_layer(lindex, layerBranch): branchid = self.index.getBranchId(lindex, self.get_branch(lindex=lindex)) for layer in self.index.find_layer(lindex, id=layerBranch['layer']): vcs_url = layer['vcs_url'] path = 'layers/' + "".join(vcs_url.split('/')[-1:]) if (layer['name'] == 'openembedded-core'): bitbakeBranch = self.index.getBranch( lindex, layerBranch['branch'])['bitbake_branch'] logger.debug( 'bitbake: %s %s %s' % (settings.BITBAKE, path + '/bitbake', bitbakeBranch)) actual_branch = layerBranch[ 'actual_branch'] or self.index.getBranch(lindex, branchid)['name'] logger.debug('%s: %s %s %s' % (layer['name'], vcs_url, path, actual_branch)) logger.debug('Computed required layers:') for (lindex, layerBranch) in self.requiredlayers: display_layer(lindex, layerBranch) logger.debug('Computed recommended layers:%s' % (["", ' (skipping)'][self.no_recommend == True])) for (lindex, layerBranch) in self.recommendedlayers: display_layer(lindex, layerBranch) # Recommends are disabled, filter it... if self.no_recommend == True: self.recommendedlayers = [] logger.debug('Done') def project_setup(self): logger.debug('Starting') # Put local project setup rules here... logger.debug('Done') def update_project(self): logger.debug('Starting') if not os.path.exists(self.project_dir + '/.templateconf'): tmplconf = open(self.project_dir + '/.templateconf', 'w') tmplconf.write('# Project template settings\n') tmplconf.write('TEMPLATECONF=${TEMPLATECONF:-$OEROOT/config}\n') tmplconf.close() self.copySample(self.install_dir + '/data/samples/README.sample', self.project_dir + '/README') self.copySample( self.install_dir + '/data/samples/bblayers.conf.sample', self.project_dir + '/config/bblayers.conf.sample') self.copySample(self.install_dir + '/data/samples/conf-notes.sample', self.project_dir + '/config/conf-notes.txt') self.copySample(self.install_dir + '/data/samples/local.conf.sample', self.project_dir + '/config/local.conf.sample') if os.path.exists(self.install_dir + '/data/samples/site.conf.sample'): self.copySample( self.install_dir + '/data/samples/site.conf.sample', self.project_dir + '/config/site.conf.sample') def update_mirror(self): self.copySample( self.install_dir + '/data/samples/README-MIRROR.sample', self.project_dir + '/README') def __prep_replacements(self): self.replacement['layers'] = [] self.replacement['machines'] = {} self.replacement['distros'] = {} def addLayer(lindex, layerBranch): branchid = self.index.getBranchId(lindex, self.get_branch(lindex=lindex)) paths = [] for layer in self.index.find_layer(lindex, id=layerBranch['layer']): vcs_url = layer['vcs_url'] path = 'layers/' + "".join(vcs_url.split('/')[-1:]) if layerBranch['vcs_subdir']: path += '/' + layerBranch['vcs_subdir'] paths.append(path) return paths # Add layers to 'LAYERS' for (lindex, layerBranch) in self.requiredlayers + self.recommendedlayers: self.replacement['layers'] = self.replacement['layers'] + addLayer( lindex, layerBranch) # Add machines to 'MACHINES' for (lindex, layerBranch) in self.requiredlayers + self.recommendedlayers: for machine in lindex['machines']: if machine['layerbranch'] == layerBranch['id']: desc = machine['description'] or machine['name'] self.replacement['machines'][machine['name']] = desc # Add distro to 'DISTROS' for (lindex, layerBranch) in self.requiredlayers + self.recommendedlayers: for distro in lindex['distros']: if distro['layerbranch'] == layerBranch['id']: desc = distro['description'] or distro['name'] self.replacement['distros'][distro['name']] = desc def copySample(self, src, dst): src = open(src, 'r') dst = open(dst, 'w') for line in src: if '####LAYERS####' in line: for l in self.replacement['layers']: dst.write( line.replace('####LAYERS####', '##OEROOT##/%s' % (l))) continue if '####SETUP_ARGS####' in line: dst.write(line.replace('####SETUP_ARGS####', self.setup_args)) continue if '####MACHINES####' in line: for (name, desc) in sorted(self.replacement['machines'].items(), key=lambda t: t[0]): dst.write('# %s\n' % desc.strip()) dst.write(line.replace('####MACHINES####', name)) continue if '####DEFAULTMACHINE####' in line: name = self.machines[0] if ':' in name: name = ':'.join(name.split(':')[1:]) dst.write(line.replace('####DEFAULTMACHINE####', name)) continue if '####DISTROS####' in line: for (name, desc) in sorted(self.replacement['distros'].items(), key=lambda t: t[0]): dst.write('# %s\n' % desc.strip()) dst.write(line.replace('####DISTROS####', name)) continue if '####DEFAULTDISTRO####' in line: name = self.distros[0] if ':' in name: name = ':'.join(name.split(':')[1:]) dst.write(line.replace('####DEFAULTDISTRO####', name)) continue dst.write(line) src.close() dst.close() def update_mirror_index(self): logger.debug('Starting') path = os.path.join(self.project_dir, 'mirror-index') logger.plain('Exporting mirror-index %s...' % (path)) if not os.path.exists(path): cmd = [self.tools['git'], 'init', path] if self.quiet == self.default_repo_quiet: cmd.append(self.quiet) utils_setup.run_cmd(cmd, environment=self.env, cwd=self.project_dir) cmd = [self.tools['git'], 'checkout', '-b', self.base_branch] ret = subprocess.Popen(cmd, env=self.env, cwd=path, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() if (ret.returncode != 0): # if we failed, then simply try to switch branches cmd = [self.tools['git'], 'checkout', self.base_branch] ret = subprocess.Popen(cmd, env=self.env, cwd=path, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() # Make sure the directory is empty, use -f to ignore failures for (dirpath, dirnames, filenames) in os.walk(path): if dirpath.endswith('/.git') or path + '/.git' in dirpath: continue for filename in filenames: os.remove(os.path.join(dirpath, filename)) # Construct a list of all layers we've downloaded, by url, including sublayers not activated url_cache = {} for (lindex, layerBranch) in self.requiredlayers + self.recommendedlayers: for layer in self.index.find_layer(lindex, id=layerBranch['layer']): vcs_url = layer['vcs_url'] if not vcs_url in url_cache: url_cache[vcs_url] = [] url_cache[vcs_url].append((lindex, layerBranch['branch'])) # Serialize the information for each of the layers (and their sublayers) for vcs_url in url_cache: for (lindex, branchid) in url_cache[vcs_url]: for layer in lindex['layerItems']: if layer['vcs_url'] in url_cache: for lb in self.index.getLayerBranch(lindex, branchid=branchid, layerItem=layer): self.index.serialize_index( lindex, os.path.join(path, lindex['CFG']['DESCRIPTION']), split=True, layerBranches=[lb], IncludeCFG=True, mirror=True, base_url=self.base_url) name = layer['name'] destdir = os.path.join(path, 'xml') srcfile = os.path.join(self.xml_dir, '%s.inc' % (name)) if os.path.exists(srcfile): os.makedirs(destdir, exist_ok=True) shutil.copy(srcfile, destdir) srcfile = os.path.join(self.xml_dir, '%s.xml' % (name)) if os.path.exists(srcfile): os.makedirs(destdir, exist_ok=True) shutil.copy(srcfile, destdir) # Special processing for the openembedded-core layer if name == 'openembedded-core': srcfile = os.path.join(self.xml_dir, 'bitbake.inc') if os.path.exists(srcfile): os.makedirs(destdir, exist_ok=True) shutil.copy(srcfile, destdir) srcfile = os.path.join(self.xml_dir, 'bitbake.xml') if os.path.exists(srcfile): os.makedirs(destdir, exist_ok=True) shutil.copy(srcfile, destdir) # git add file. cmd = [self.tools['git'], 'add', '-A', '.'] utils_setup.run_cmd(cmd, environment=self.env, cwd=path) cmd = [self.tools['git'], 'diff-index', '--quiet', 'HEAD', '--'] ret = subprocess.Popen(cmd, env=self.env, cwd=path, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() if (ret.returncode != 0): logger.debug('Updating mirror-index') cmd = [ self.tools['git'], 'commit', '-m', 'Updated index - %s' % (self.setup_args) ] utils_setup.run_cmd(cmd, environment=self.env, cwd=path) logger.debug('Done') def update_manifest(self): logger.debug('Starting') fxml = open(os.path.join(self.project_dir, self.default_xml), 'w') fxml.write('<manifest>\n') remote = 'base' fxml.write(' <remote name="%s" fetch="%s"/>\n' % (remote, self.remotes[remote])) fxml.write(' <default revision="%s" remote="%s" sync-j="%s"/>\n' % (self.base_branch, remote, self.jobs)) for remote in sorted(self.remotes): if remote == 'base': continue fxml.write(' <remote name="%s" fetch="%s"/>\n' % (remote, self.remotes[remote])) def open_xml_tag(name, url, remote, path, revision): fxml.write( ' <project name="%s" remote="%s" path="%s" revision="%s">\n' % (url, remote, path, revision)) def inc_xml(name, url, remote, path, revision): # incfile is included inline and has to work as elements of the 'project' incfile = os.path.join(self.xml_dir, '%s.inc' % (name)) logger.debug('Looking for %s' % (incfile)) if os.path.exists(incfile): fbase = open(incfile, 'r') for line in fbase: fxml.write(line) fbase.close() def close_xml_tag(name, url, remote, path, revision): fxml.write(' </project>\n') def add_xml(name, url, remote, path, revision): # xmlfile is included after the entry and is completely standalone xmlfile = os.path.join(self.xml_dir, '%s.xml' % (name)) logger.debug('Looking for %s' % (xmlfile)) if os.path.exists(xmlfile): fbase = open(xmlfile, 'r') for line in fbase: fxml.write(line) fbase.close() def write_xml(name, url, remote, path, revision): open_xml_tag(name, url, remote, path, revision) inc_xml(name, url, remote, path, revision) close_xml_tag(name, url, remote, path, revision) add_xml(name, url, remote, path, revision) def process_xml_layers(allLayers): def process_xml_layer(lindex, layerBranch): branchid = self.index.getBranchId( lindex, self.get_branch(lindex=lindex)) for layer in self.index.find_layer(lindex, id=layerBranch['layer']): revision = layerBranch[ 'actual_branch'] or self.index.getBranch( lindex, branchid)['name'] vcs_url = layer['vcs_url'] for remote in self.remotes: if vcs_url.startswith(self.remotes[remote]): break url = vcs_url[len(self.remotes[remote]):] url = url.strip('/') path = 'layers/' + "".join(url.split('/')[-1:]) entry = { 'name': layer['name'], 'remote': remote, 'path': path, 'revision': revision, } if url not in cache: cache[url] = [] if entry['name'] == 'openembedded-core': bitbakeBranch = self.index.getBranch( lindex, layerBranch['branch'])['bitbake_branch'] bitbake_entry = { 'name': 'bitbake', 'remote': remote, 'path': path + '/bitbake', 'revision': bitbakeBranch, } if settings.BITBAKE not in cache: cache[settings.BITBAKE] = [] cache[settings.BITBAKE].append(bitbake_entry) cache[url].append(entry) # We need to construct a list of layers with same urls... cache = {} for (lindex, layerBranch) in allLayers: process_xml_layer(lindex, layerBranch) from collections import OrderedDict for url in OrderedDict(sorted(cache.items(), key=lambda t: t[0])): name = cache[url][0]['name'] remote = cache[url][0]['remote'] path = cache[url][0]['path'] revision = cache[url][0]['revision'] open_xml_tag(name, url, remote, path, revision) for entry in cache[url]: inc_xml(entry['name'], url, remote, path, revision) close_xml_tag(name, url, remote, path, revision) for entry in cache[url]: add_xml(entry['name'], url, remote, path, revision) process_xml_layers(self.requiredlayers + self.recommendedlayers) fxml.write('</manifest>\n') fxml.close() logger.debug('Done') def update_gitignore(self): logger.debug('Starting') import xml.etree.ElementTree as ET ign_list = [ '.repo*', '*.pyc', '*.pyo', '*.swp', '*.orig', '*.rej', '*~', '/environment-setup-*', '/layers/*', ] tree = ET.parse(os.path.join(self.project_dir, 'default.xml')) root = tree.getroot() for linkfile in root.iter('linkfile'): ign_list.append(linkfile.attrib['dest']) with open(os.path.join(self.project_dir, '.gitignore'), 'a+') as f: f.seek(0) existed = f.readlines() for l in ign_list: item = '%s\n' % l if item not in existed: f.write(item) logger.debug('Done') def commit_files(self): logger.debug('Starting') # List of all files that may change due to config filelist = [ 'README', 'default.xml', '.gitignore', '.gitconfig', ] # If we are mirroring, skip all of these... if self.mirror != True: filelist.append('.templateconf') filelist.append('config/bblayers.conf.sample') filelist.append('config/conf-notes.txt') filelist.append('config/local.conf.sample') if os.path.exists('config/site.conf.sample'): filelist.append('config/site.conf.sample') # Add log dir if it contains files if os.listdir('config/log'): filelist.append('config/log') # git init if not os.path.exists(self.project_dir + '/.git'): cmd = [self.tools['git'], 'init', self.project_dir] if self.quiet == self.default_repo_quiet: cmd.append(self.quiet) utils_setup.run_cmd(cmd, environment=self.env, cwd=self.conf_dir) # Add self.install_dir as a submodule if it is in self.project_dir if self.install_dir.startswith(self.project_dir + '/'): logger.debug('Add %s as a submodule' % self.install_dir) cmd = [self.tools['git'], 'submodule', 'add', \ './' + os.path.relpath(self.install_dir, self.project_dir)] utils_setup.run_cmd(cmd, environment=self.env, cwd=self.project_dir) filelist.append(self.install_dir) filelist.append('.gitmodules') # git add manifest. (Since these files are new, always try to add them) cmd = [self.tools['git'], 'add', '--'] + filelist utils_setup.run_cmd(cmd, environment=self.env, cwd=self.project_dir) cmd = [self.tools['git'], 'diff-index', '--quiet', 'HEAD', '--' ] + filelist ret = subprocess.Popen(cmd, env=self.env, cwd=self.project_dir, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() if (ret.returncode != 0): logger.plain('Updated project configuration') # Command failed -- so self.default_xml changed... cmd = [ self.tools['git'], 'commit', '-m', 'Configuration change - %s' % (self.setup_args), '--' ] + filelist utils_setup.run_cmd(cmd, environment=self.env, cwd=self.project_dir) logger.debug('Done') def repo_sync(self): logger.debug('Starting') if os.path.exists( os.path.join(self.project_dir, self.check_repo_install_dir)): cmd = ['-j', self.jobs] self.call_repo_sync(cmd) else: # repo init cmd = ['-m', self.default_xml, '-u', self.project_dir] if self.mirror == True: cmd.append('--mirror') cmd.append('--no-repo-verify') self.call_repo_init(cmd) # repo sync cmd = ['-j', self.jobs] self.call_initial_repo_sync(cmd) logger.debug('Done') def setup_env(self): self.set_repo_git_env() self.add_bin_path() def add_bin_path(self): self.env["PATH"] = self.install_dir + "/bin:" + self.env["PATH"] def set_repo_git_env(self): # Set HOME to install_dir to use install_dir/.gitconfig settings. Otherwise the user will # be prompted for information. self.env["HOME"] = self.project_dir def call_repo_init(self, args): logger.debug('Starting') repo = self.tools['repo'] directory = os.path.join(self.project_dir, self.check_repo_install_dir) if os.path.exists(directory): logger.info( 'Done: detected repo init already run since %s exists' % directory) return cmd = args cmd.insert(0, repo) cmd.insert(1, 'init') if self.depth: cmd.append(self.depth) log_it = 1 if self.repo_verbose is not True and self.quiet == self.default_repo_quiet: cmd.append(self.quiet) log_it = 0 try: utils_setup.run_cmd(cmd, environment=self.env, log=log_it) except Exception as e: raise logger.debug('Done') # This only exists to check if we have fully sync'ed the project # Updating should use call_repo_sync def call_initial_repo_sync(self, args): logger.debug('Starting') sync_file = os.path.join(self.project_dir, self.check_repo_sync_file) local_only = 0 orig_args = list(args) if os.path.exists(sync_file): logger.info('Detected repo sync already run since %s exists' % sync_file) logger.info('Only running local update.') args.append('--local-only') local_only = 1 try: self.call_repo_sync(args) except Exception as e: if not local_only: raise else: logger.info('Using --local-only failed. Trying full sync.') try: self.call_repo_sync(orig_args) except Exception as e2: raise logger.debug('Done') def call_repo_sync(self, args): logger.debug('Starting') repo = self.tools['repo'] cmd = args cmd.insert(0, repo) cmd.insert(1, 'sync') if self.force_sync: cmd.append(self.force_sync) log_it = 1 if self.repo_verbose is not True and self.quiet == self.default_repo_quiet: cmd.append(self.quiet) log_it = 0 utils_setup.run_cmd(cmd, environment=self.env, log=log_it) logger.debug('Done') def get_branch(self, lindex=None): if lindex: return self.index.getIndexBranch(default=self.base_branch, lindex=lindex) return self.base_branch def get_path(self, tool): cmd = self.which(tool) if (not cmd): logger.critical('Cannot find %s in path!', tool) logger.critical('Path was: %s', os.environ['PATH']) self.exit(1) return cmd # Helpers: Set_*, which.. def set_repo_verbose(self, verbose): self.repo_verbose = verbose def set_jobs(self, jobs): logger.debug('Setting jobs to %s' % jobs) self.jobs = jobs def set_depth(self, depth): if int(depth) <= 1: logger.info('repo depth %s is invalid, setting to 2' % depth) depth = '2' logger.debug('Setting depth to %s' % depth) self.depth = '--depth=%s' % depth def set_force_sync(self, sync): logger.debug('Setting force-sync to %s' % sync) if sync is True: self.force_sync = '--force-sync' def set_debug(self): self.set_debug_env() self.quiet = None logger.setLevel(logging.DEBUG) logger.debug('logging level set to DEBUG') def set_base_url(self, url): logger.debug('Setting base-url to %s' % url) self.base_url = url def set_base_branch(self, branch): logger.debug('Setting base-branch to %s' % branch) self.base_branch = branch def set_debug_env(self): self.env["REPO_CURL_VERBOSE"] = '1' def touch(self, fn): logger.debug("Creating %s", fn) open(fn, 'a').close() ''' When this is python3.3, use built in version''' def which(self, program): path = self.env["PATH"] for path in path.split(os.path.pathsep): fullpath = os.path.join(path, program) if os.path.exists(fullpath) and os.access(fullpath, os.X_OK): return fullpath return None
def load_layer_index(self): # Load Layer_Index replace = [] replace = replace + settings.REPLACE replace = replace + [ ('#INSTALL_DIR#', self.install_dir), ('#BASE_URL#', self.base_url), ('#BASE_BRANCH#', self.base_branch), ] # See if there is a mirror index available from the BASE_URL mirror_index_path = None mirror_index = os.path.join(self.conf_dir, 'mirror-index') cmd = [ self.tools['git'], 'ls-remote', self.base_url + '/mirror-index', self.base_branch ] ret = subprocess.Popen(cmd, env=self.env, cwd=self.project_dir, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() if (ret.returncode == 0): logger.plain('Loading the mirror index from %s (%s)...' % (self.base_url + '/mirror-index', self.base_branch)) # This MIGHT be a valid mirror.. if not os.path.exists(mirror_index): os.makedirs(mirror_index) cmd = [self.tools['git'], 'init'] utils_setup.run_cmd(cmd, environment=self.env, cwd=mirror_index) cmd = [ self.tools['git'], 'fetch', '-f', '-n', '-u', self.base_url + '/mirror-index', self.base_branch + ':' + self.base_branch ] ret = subprocess.Popen(cmd, env=self.env, cwd=mirror_index, close_fds=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) ret.wait() if (ret.returncode == 0): logger.debug('Found mirrored index.') cmd = [self.tools['git'], 'checkout', self.base_branch] utils_setup.run_cmd(cmd, environment=self.env, cwd=mirror_index) cmd = [self.tools['git'], 'reset', '--hard'] utils_setup.run_cmd(cmd, environment=self.env, cwd=mirror_index) mirror_index_path = mirror_index # Mirror also has a copy of the associated XML bits self.xml_dir = os.path.join(mirror_index, 'xml') self.index = Layer_Index(indexcfg=settings.INDEXES, base_branch=self.base_branch, replace=replace, mirror=mirror_index_path)
work_list.append('%s %s' % (_dst, _branch)) # We assume this program is located in the bin directory dst = os.path.dirname(os.path.dirname(sys.argv[0])) # Branch the setup program.... git_branch(_dst=dst, _orig_branch=branch, _branch=dest_branch) completed.append(dst) # Transform and export the mirror index git_branch(mirror_path, branch, dest_branch) completed.append(mirror_path) index = Layer_Index(indexcfg=settings.INDEXES, base_branch=branch, replace=settings.REPLACE, mirror=mirror_path) cmd = ['git', 'checkout', dest_branch] utils_setup.run_cmd(cmd, cwd=mirror_path) logger.info('Loading default.xml') tree = ET.parse('default.xml') root = tree.getroot() logger.info('Branching based on default.xml') default_revision = None base_url = None for child in root: if child.tag == 'remote': if 'fetch' in child.attrib: