def main(): if LooseVersion(git.__version__) < '0.3.1': logger.error( "Version of GitPython is too old, please install GitPython (python-git) 0.3.1 or later in order to use this script" ) sys.exit(1) parser = optparse.OptionParser(usage=""" %prog [options]""") parser.add_option("-b", "--branch", help="Specify branch to update", action="store", dest="branch", default='master') parser.add_option("-l", "--layer", help="Layer to update", action="store", dest="layer") parser.add_option( "-r", "--reload", help="Reload recipe data instead of updating since last update", action="store_true", dest="reload") parser.add_option( "", "--fullreload", help="Discard existing recipe data and fetch it from scratch", action="store_true", dest="fullreload") parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("", "--nocheckout", help="Don't check out branches", action="store_true", dest="nocheckout") parser.add_option("", "--stop-on-error", help="Stop on first parsing error", action="store_true", default=False, dest="stop_on_error") parser.add_option("-i", "--initial", help="Print initial values parsed from layer.conf only", action="store_true") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") parser.add_option( "", "--keep-temp", help="Preserve temporary directory at the end instead of deleting it", action="store_true") options, args = parser.parse_args(sys.argv) if len(args) > 1: logger.error('unexpected argument "%s"' % args[1]) parser.print_help() sys.exit(1) if options.fullreload: options.reload = True utils.setup_django() import settings from layerindex.models import LayerItem, LayerBranch, Recipe, RecipeFileDependency, Machine, Distro, BBAppend, BBClass, IncFile from django.db import transaction logger.setLevel(options.loglevel) branch = utils.get_branch(options.branch) if not branch: logger.error("Specified branch %s is not valid" % options.branch) sys.exit(1) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) bitbakepath = os.path.join(fetchdir, 'bitbake') layer = utils.get_layer(options.layer) urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) layerbranch = layer.get_layerbranch(options.branch) branchname = options.branch branchdesc = options.branch if layerbranch: if layerbranch.actual_branch: branchname = layerbranch.actual_branch branchdesc = "%s (%s)" % (options.branch, branchname) # Collect repo info repo = git.Repo(repodir) if repo.bare: logger.error('Repository %s is bare, not supported' % repodir) sys.exit(1) topcommit = repo.commit('origin/%s' % branchname) if options.nocheckout: topcommit = repo.commit('HEAD') tinfoil = None tempdir = None try: with transaction.atomic(): newbranch = False if not layerbranch: # LayerBranch doesn't exist for this branch, create it newbranch = True layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = branch layerbranch_source = layer.get_layerbranch(branch) if not layerbranch_source: layerbranch_source = layer.get_layerbranch(None) if layerbranch_source: layerbranch.vcs_subdir = layerbranch_source.vcs_subdir layerbranch.save() if layerbranch_source: for maintainer in layerbranch_source.layermaintainer_set.all( ): maintainer.pk = None maintainer.id = None maintainer.layerbranch = layerbranch maintainer.save() if layerbranch.vcs_subdir and not options.nocheckout: # Find latest commit in subdirectory # A bit odd to do it this way but apparently there's no other way in the GitPython API topcommit = next( repo.iter_commits('origin/%s' % branchname, paths=layerbranch.vcs_subdir), None) layerdir = os.path.join(repodir, layerbranch.vcs_subdir) layerdir_start = os.path.normpath(layerdir) + os.sep layerrecipes = Recipe.objects.filter(layerbranch=layerbranch) layermachines = Machine.objects.filter(layerbranch=layerbranch) layerdistros = Distro.objects.filter(layerbranch=layerbranch) layerappends = BBAppend.objects.filter(layerbranch=layerbranch) layerclasses = BBClass.objects.filter(layerbranch=layerbranch) layerincfiles = IncFile.objects.filter(layerbranch=layerbranch) if layerbranch.vcs_last_rev != topcommit.hexsha or options.reload or options.initial: # Check out appropriate branch if not options.nocheckout: utils.checkout_layer_branch(layerbranch, repodir, logger=logger) logger.info("Collecting data for layer %s on branch %s" % (layer.name, branchdesc)) try: (tinfoil, tempdir) = recipeparse.init_parser( settings, branch, bitbakepath, nocheckout=options.nocheckout, logger=logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) logger.debug('Using temp directory %s' % tempdir) # Clear the default value of SUMMARY so that we can use DESCRIPTION instead if it hasn't been set tinfoil.config_data.setVar('SUMMARY', '') # Clear the default value of DESCRIPTION so that we can see where it's not set tinfoil.config_data.setVar('DESCRIPTION', '') # Clear the default value of HOMEPAGE ('unknown') tinfoil.config_data.setVar('HOMEPAGE', '') # Set a blank value for LICENSE so that it doesn't cause the parser to die (e.g. with meta-ti - # why won't they just fix that?!) tinfoil.config_data.setVar('LICENSE', '') layerconfparser = layerconfparse.LayerConfParse( logger=logger, tinfoil=tinfoil) layer_config_data = layerconfparser.parse_layer(layerdir) if not layer_config_data: logger.info( "Skipping update of layer %s for branch %s - conf/layer.conf may have parse issues" % (layer.name, branchdesc)) layerconfparser.shutdown() sys.exit(1) utils.set_layerbranch_collection_version(layerbranch, layer_config_data, logger=logger) if options.initial: # Use print() rather than logger.info() since "-q" makes it print nothing. for i in [ "BBFILE_COLLECTIONS", "LAYERVERSION", "LAYERDEPENDS", "LAYERRECOMMENDS" ]: print( '%s = "%s"' % (i, utils.get_layer_var( layer_config_data, i, logger))) sys.exit(0) # Set up for recording patch info utils.setup_core_layer_sys_path(settings, branch.name) skip_patches = False try: import oe.recipeutils except ImportError: logger.warn( 'Failed to find lib/oe/recipeutils.py in layers - patch information will not be collected' ) skip_patches = True utils.add_dependencies(layerbranch, layer_config_data, logger=logger) utils.add_recommends(layerbranch, layer_config_data, logger=logger) layerbranch.save() try: config_data_copy = recipeparse.setup_layer( tinfoil.config_data, fetchdir, layerdir, layer, layerbranch, logger) except recipeparse.RecipeParseError as e: logger.error(str(e)) sys.exit(1) if layerbranch.vcs_last_rev and not options.reload: try: diff = repo.commit( layerbranch.vcs_last_rev).diff(topcommit) except Exception as e: logger.warn( "Unable to get diff from last commit hash for layer %s - falling back to slow update: %s" % (layer.name, str(e))) diff = None else: diff = None # We handle recipes specially to try to preserve the same id # when recipe upgrades happen (so that if a user bookmarks a # recipe page it remains valid) layerrecipes_delete = [] layerrecipes_add = [] # Check if any paths should be ignored because there are layers within this layer removedirs = [] for root, dirs, files in os.walk(layerdir): for diritem in dirs: if os.path.exists( os.path.join(root, diritem, 'conf', 'layer.conf')): removedirs.append( os.path.join(root, diritem) + os.sep) if diff: # Apply git changes to existing recipe list if layerbranch.vcs_subdir: subdir_start = os.path.normpath( layerbranch.vcs_subdir) + os.sep else: subdir_start = "" updatedrecipes = set() dirtyrecipes = set() other_deletes = [] other_adds = [] for diffitem in diff.iter_change_type('R'): oldpath = diffitem.a_blob.path newpath = diffitem.b_blob.path skip = False for removedir in removedirs: # FIXME what about files moved into removedirs? if oldpath.startswith(removedir): skip = True break if skip: continue if oldpath.startswith(subdir_start): if not newpath.startswith(subdir_start): logger.debug( "Treating rename of %s to %s as a delete since new path is outside layer" % (oldpath, newpath)) other_deletes.append(diffitem) continue (oldtypename, oldfilepath, oldfilename) = recipeparse.detect_file_type( oldpath, subdir_start) (newtypename, newfilepath, newfilename) = recipeparse.detect_file_type( newpath, subdir_start) if oldtypename != newtypename: # This is most likely to be a .inc file renamed to a .bb - and since # there may be another recipe deleted at the same time we probably want # to consider that, so just treat it as a delete and an add logger.debug( "Treating rename of %s to %s as a delete and add (since type changed)" % (oldpath, newpath)) other_deletes.append(diffitem) other_adds.append(diffitem) elif oldtypename == 'recipe': results = layerrecipes.filter( filepath=oldfilepath).filter( filename=oldfilename) if len(results): recipe = results[0] logger.debug("Rename recipe %s to %s" % (recipe, newpath)) recipe.filepath = newfilepath recipe.filename = newfilename recipe.save() update_recipe_file( tinfoil, config_data_copy, os.path.join(layerdir, newfilepath), recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) updatedrecipes.add( os.path.join(oldfilepath, oldfilename)) updatedrecipes.add( os.path.join(newfilepath, newfilename)) else: logger.warn( "Renamed recipe %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'bbappend': results = layerappends.filter( filepath=oldfilepath).filter( filename=oldfilename) if len(results): logger.debug( "Rename bbappend %s to %s" % (results[0], os.path.join(newfilepath, newfilename))) results[0].filepath = newfilepath results[0].filename = newfilename results[0].save() else: logger.warn( "Renamed bbappend %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'machine': results = layermachines.filter( name=oldfilename) if len(results): logger.debug("Rename machine %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed machine %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'distro': results = layerdistros.filter(name=oldfilename) if len(results): logger.debug("Rename distro %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed distro %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'bbclass': results = layerclasses.filter(name=oldfilename) if len(results): logger.debug("Rename class %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed class %s could not be found" % oldpath) other_adds.append(diffitem) elif oldtypename == 'incfile': results = layerincfiles.filter( path=os.path.join(oldfilepath, oldfilename)) if len(results): logger.debug("Rename inc file %s to %s" % (results[0], newfilename)) results[0].name = newfilename results[0].save() else: logger.warn( "Renamed inc file %s could not be found" % oldpath) other_adds.append(diffitem) deps = RecipeFileDependency.objects.filter( layerbranch=layerbranch).filter(path=oldpath) for dep in deps: dirtyrecipes.add(dep.recipe) for diffitem in itertools.chain(diff.iter_change_type('D'), other_deletes): path = diffitem.a_blob.path if path.startswith(subdir_start): skip = False for removedir in removedirs: if path.startswith(removedir): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type( path, subdir_start) if typename == 'recipe': values = layerrecipes.filter( filepath=filepath).filter( filename=filename).values( 'id', 'filepath', 'filename', 'pn') if len(values): layerrecipes_delete.append(values[0]) logger.debug("Mark %s for deletion" % values[0]) updatedrecipes.add( os.path.join(values[0]['filepath'], values[0]['filename'])) else: logger.warn( "Deleted recipe %s could not be found" % path) elif typename == 'bbappend': layerappends.filter(filepath=filepath).filter( filename=filename).delete() elif typename == 'machine': layermachines.filter(name=filename).delete() elif typename == 'distro': layerdistros.filter(name=filename).delete() elif typename == 'bbclass': layerclasses.filter(name=filename).delete() elif typename == 'incfile': layerincfiles.filter(path=os.path.join( filepath, filename)).delete() for diffitem in itertools.chain(diff.iter_change_type('A'), other_adds): path = diffitem.b_blob.path if path.startswith(subdir_start): skip = False for removedir in removedirs: if path.startswith(removedir): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type( path, subdir_start) if typename == 'recipe': layerrecipes_add.append( os.path.join(repodir, path)) logger.debug("Mark %s for addition" % path) updatedrecipes.add( os.path.join(filepath, filename)) elif typename == 'bbappend': append = BBAppend() append.layerbranch = layerbranch append.filename = filename append.filepath = filepath append.save() elif typename == 'machine': machine = Machine() machine.layerbranch = layerbranch machine.name = filename update_machine_conf_file( os.path.join(repodir, path), machine) machine.save() elif typename == 'distro': distro = Distro() distro.layerbranch = layerbranch distro.name = filename update_distro_conf_file( os.path.join(repodir, path), distro, config_data_copy) distro.save() elif typename == 'bbclass': bbclass = BBClass() bbclass.layerbranch = layerbranch bbclass.name = filename bbclass.save() elif typename == 'incfile': incfile = IncFile() incfile.layerbranch = layerbranch incfile.path = os.path.join(filepath, filename) incfile.save() for diffitem in diff.iter_change_type('M'): path = diffitem.b_blob.path if path.startswith(subdir_start): skip = False for removedir in removedirs: if path.startswith(removedir): skip = True break if skip: continue (typename, filepath, filename) = recipeparse.detect_file_type( path, subdir_start) if typename == 'recipe': logger.debug("Mark %s for update" % path) results = layerrecipes.filter( filepath=filepath).filter( filename=filename)[:1] if results: recipe = results[0] update_recipe_file( tinfoil, config_data_copy, os.path.join(layerdir, filepath), recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) recipe.save() updatedrecipes.add(recipe.full_path()) elif typename == 'machine': results = layermachines.filter(name=filename) if results: machine = results[0] update_machine_conf_file( os.path.join(repodir, path), machine) machine.save() elif typename == 'distro': results = layerdistros.filter(name=filename) if results: distro = results[0] update_distro_conf_file( os.path.join(repodir, path), distro, config_data_copy) distro.save() deps = RecipeFileDependency.objects.filter( layerbranch=layerbranch).filter(path=path) for dep in deps: dirtyrecipes.add(dep.recipe) for recipe in dirtyrecipes: if not recipe.full_path() in updatedrecipes: update_recipe_file( tinfoil, config_data_copy, os.path.join(layerdir, recipe.filepath), recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) else: # Collect recipe data from scratch layerrecipe_fns = [] if options.fullreload: layerrecipes.delete() else: # First, check which recipes still exist layerrecipe_values = layerrecipes.values( 'id', 'filepath', 'filename', 'pn') for v in layerrecipe_values: if v['filepath'].startswith('../'): # FIXME: These recipes were present due to a bug (not handling renames # to paths outside the layer) - this can be removed at some point in the future preserve = False else: root = os.path.join(layerdir, v['filepath']) fullpath = os.path.join(root, v['filename']) if os.path.exists(fullpath): preserve = True for removedir in removedirs: if fullpath.startswith(removedir): preserve = False break else: preserve = False if preserve: # Recipe still exists, update it results = layerrecipes.filter(id=v['id'])[:1] recipe = results[0] update_recipe_file(tinfoil, config_data_copy, root, recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) else: # Recipe no longer exists, mark it for later on layerrecipes_delete.append(v) layerrecipe_fns.append(fullpath) layermachines.delete() layerdistros.delete() layerappends.delete() layerclasses.delete() for root, dirs, files in os.walk(layerdir): if '.git' in dirs: dirs.remove('.git') for diritem in dirs[:]: fullpath = os.path.join(root, diritem) + os.sep if fullpath in removedirs: dirs.remove(diritem) for f in files: fullpath = os.path.join(root, f) (typename, _, filename) = recipeparse.detect_file_type( fullpath, layerdir_start) if typename == 'recipe': if fullpath not in layerrecipe_fns: layerrecipes_add.append(fullpath) elif typename == 'bbappend': append = BBAppend() append.layerbranch = layerbranch append.filename = f append.filepath = os.path.relpath( root, layerdir) append.save() elif typename == 'machine': machine = Machine() machine.layerbranch = layerbranch machine.name = filename update_machine_conf_file(fullpath, machine) machine.save() elif typename == 'distro': distro = Distro() distro.layerbranch = layerbranch distro.name = filename update_distro_conf_file( fullpath, distro, config_data_copy) distro.save() elif typename == 'bbclass': bbclass = BBClass() bbclass.layerbranch = layerbranch bbclass.name = filename bbclass.save() elif typename == 'incfile': incfile = IncFile() incfile.layerbranch = layerbranch incfile.path = os.path.relpath( fullpath, layerdir) incfile.save() for added in layerrecipes_add: # This is good enough without actually parsing the file (pn, pv) = split_recipe_fn(added) oldid = -1 for deleted in layerrecipes_delete: if deleted['pn'] == pn: oldid = deleted['id'] layerrecipes_delete.remove(deleted) break if oldid > -1: # Reclaim a record we would have deleted results = Recipe.objects.filter(id=oldid)[:1] recipe = results[0] logger.debug("Reclaim %s for %s %s" % (recipe, pn, pv)) else: # Create new record logger.debug("Add new recipe %s" % added) recipe = Recipe() recipe.layerbranch = layerbranch recipe.filename = os.path.basename(added) root = os.path.dirname(added) recipe.filepath = os.path.relpath(root, layerdir) update_recipe_file(tinfoil, config_data_copy, root, recipe, layerdir_start, repodir, options.stop_on_error, skip_patches) recipe.save() for deleted in layerrecipes_delete: logger.debug("Delete %s" % deleted) results = Recipe.objects.filter(id=deleted['id'])[:1] recipe = results[0] recipe.delete() # Save repo info layerbranch.vcs_last_rev = topcommit.hexsha layerbranch.vcs_last_commit = datetime.fromtimestamp( topcommit.committed_date) else: logger.info("Layer %s is already up-to-date for branch %s" % (layer.name, branchdesc)) layerbranch.vcs_last_fetch = datetime.now() layerbranch.save() if options.dryrun: raise DryRunRollbackException() except KeyboardInterrupt: logger.warn("Update interrupted, changes to %s rolled back" % layer.name) sys.exit(254) except SystemExit: raise except DryRunRollbackException: pass except: import traceback logger.error(traceback.format_exc().rstrip()) sys.exit(1) finally: if tinfoil and (LooseVersion(bb.__version__) > LooseVersion("1.27")): tinfoil.shutdown() if tempdir: if options.keep_temp: logger.debug('Preserving temp directory %s' % tempdir) else: logger.debug('Deleting temp directory') utils.rmtree_force(tempdir) sys.exit(0)
def main(): valid_layer_name = re.compile('[-\w]+$') parser = optparse.OptionParser(usage=""" %prog [options] <url> [name]""") utils.setup_django() layer_type_help, layer_type_choices = get_layer_type_choices() parser.add_option("-s", "--subdir", help="Specify subdirectory", action="store", dest="subdir") parser.add_option("-t", "--type", help=layer_type_help, choices=layer_type_choices, action="store", dest="layer_type", default='') parser.add_option("-n", "--dry-run", help="Don't write any data back to the database", action="store_true", dest="dryrun") parser.add_option("-d", "--debug", help="Enable debug output", action="store_const", const=logging.DEBUG, dest="loglevel", default=logging.INFO) parser.add_option("", "--github-auth", help="Specify github username:password", action="store", dest="github_auth") parser.add_option("-q", "--quiet", help="Hide all output except error messages", action="store_const", const=logging.ERROR, dest="loglevel") parser.add_option("-a", "--actual-branch", help="Set actual branch", action="store", dest="actual_branch") options, args = parser.parse_args(sys.argv) if len(args) < 2: print("Please specify URL of repository for layer") sys.exit(1) layer_url = args[1] if len(args) > 2: layer_name = args[2] else: if options.subdir: layer_name = options.subdir else: layer_name = [x for x in layer_url.split('/') if x][-1] if layer_name.endswith('.git'): layer_name = layer_name[:-4] if not valid_layer_name.match(layer_name): logger.error( 'Invalid layer name "%s" - Layer name can only include letters, numbers and dashes.', layer_name) sys.exit(1) if options.github_auth: if not ':' in options.github_auth: logger.error( '--github-auth value must be specified as username:password') sys.exit(1) splitval = options.github_auth.split(':') github_login = splitval[0] github_password = splitval[1] else: github_login = None github_password = None import settings from layerindex.models import LayerItem, LayerBranch, LayerDependency, LayerMaintainer from django.db import transaction logger.setLevel(options.loglevel) fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) if not os.path.exists(fetchdir): os.makedirs(fetchdir) master_branch = utils.get_branch('master') core_layer = None try: with transaction.atomic(): # Fetch layer logger.info('Fetching repository %s' % layer_url) layer = LayerItem() layer.name = layer_name layer.status = 'P' layer.summary = 'tempvalue' layer.description = layer.summary set_vcs_fields(layer, layer_url) urldir = layer.get_fetch_dir() repodir = os.path.join(fetchdir, urldir) out = None try: if not os.path.exists(repodir): out = utils.runcmd("git clone %s %s" % (layer.vcs_url, urldir), fetchdir, logger=logger) else: out = utils.runcmd("git fetch", repodir, logger=logger) except Exception as e: logger.error("Fetch failed: %s" % str(e)) sys.exit(1) actual_branch = 'master' if (options.actual_branch): actual_branch = options.actual_branch try: out = utils.runcmd("git checkout origin/%s" % actual_branch, repodir, logger=logger) except subprocess.CalledProcessError: actual_branch = None branches = utils.runcmd("git branch -r", repodir, logger=logger) for line in branches.splitlines(): if 'origin/HEAD ->' in line: actual_branch = line.split('-> origin/')[-1] break if not actual_branch: logger.error( "Repository has no master branch nor origin/HEAD") sys.exit(1) out = utils.runcmd("git checkout origin/%s" % actual_branch, repodir, logger=logger) layer_paths = [] if options.subdir: layerdir = os.path.join(repodir, options.subdir) if not os.path.exists(layerdir): logger.error( "Subdirectory %s does not exist in repository for master branch" % options.subdir) sys.exit(1) if not os.path.exists(os.path.join(layerdir, 'conf/layer.conf')): logger.error( "conf/layer.conf not found in subdirectory %s" % options.subdir) sys.exit(1) layer_paths.append(layerdir) else: if os.path.exists(os.path.join(repodir, 'conf/layer.conf')): layer_paths.append(repodir) # Find subdirs with a conf/layer.conf for subdir in os.listdir(repodir): subdir_path = os.path.join(repodir, subdir) if os.path.isdir(subdir_path): if os.path.exists( os.path.join(subdir_path, 'conf/layer.conf')): layer_paths.append(subdir_path) if not layer_paths: logger.error( "conf/layer.conf not found in repository or first level subdirectories - is subdirectory set correctly?" ) sys.exit(1) if 'github.com' in layer.vcs_url: json_data, owner_json_data = get_github_layerinfo( layer.vcs_url, github_login, github_password) for layerdir in layer_paths: layer.pk = None if layerdir != repodir: subdir = os.path.relpath(layerdir, repodir) if len(layer_paths) > 1: layer.name = subdir else: subdir = '' if LayerItem.objects.filter(name=layer.name).exists(): if LayerItem.objects.filter(name=layer.name).exclude( vcs_url=layer.vcs_url).exists(): conflict_list = LayerItem.objects.filter( name=layer.name).exclude(vcs_url=layer.vcs_url) conflict_list_urls = [] for conflict in conflict_list: conflict_list_urls.append(conflict.vcs_url) cln = ', '.join(conflict_list_urls) logger.error( 'A layer named "%s" already exists in the database. Possible name collision with %s.vcs_url = %s' % (layer.name, layer.name, cln)) sys.exit(1) else: logger.info( 'The layer named "%s" already exists in the database. Skipping this layer with same vcs_url' % layer.name) layer_paths = [x for x in layer_paths if x != layerdir] continue logger.info('Creating layer %s' % layer.name) # Guess layer type if not specified if options.layer_type: layer.layer_type = options.layer_type elif layer.name in ['openembedded-core', 'meta-oe']: layer.layer_type = 'A' elif glob.glob(os.path.join(layerdir, 'conf/distro/*.conf')): layer.layer_type = 'D' elif glob.glob(os.path.join(layerdir, 'conf/machine/*.conf')): layer.layer_type = 'B' else: layer.layer_type = 'M' layer.save() layerbranch = LayerBranch() layerbranch.layer = layer layerbranch.branch = master_branch if layerdir != repodir: layerbranch.vcs_subdir = subdir if actual_branch: layerbranch.actual_branch = actual_branch layerbranch.save() if layer.name != settings.CORE_LAYER_NAME: if not core_layer: core_layer = utils.get_layer(settings.CORE_LAYER_NAME) if core_layer: logger.debug('Adding dep %s to %s' % (core_layer.name, layer.name)) layerdep = LayerDependency() layerdep.layerbranch = layerbranch layerdep.dependency = core_layer layerdep.save() layerconfparser = LayerConfParse(logger=logger) try: config_data = layerconfparser.parse_layer(layerdir) if config_data: utils.add_dependencies(layerbranch, config_data, logger=logger) utils.add_recommends(layerbranch, config_data, logger=logger) finally: layerconfparser.shutdown() # Get some extra meta-information readme_files = glob.glob(os.path.join(layerdir, 'README*')) if (not readme_files) and subdir: readme_files = glob.glob(os.path.join(repodir, 'README*')) maintainer_files = glob.glob( os.path.join(layerdir, 'MAINTAINERS')) if (not maintainer_files) and subdir: maintainer_files = glob.glob( os.path.join(repodir, 'MAINTAINERS')) maintainers = [] if readme_files: (desc, maintainers, deps) = readme_extract(readme_files[0]) if desc: layer.summary = layer.name layer.description = desc if maintainer_files: maintainers.extend(maintainers_extract(readme_files[0])) if (not maintainers) and 'github.com' in layer.vcs_url: if json_data: layer.summary = json_data['description'] layer.description = layer.summary if owner_json_data: owner_name = owner_json_data.get('name', None) owner_email = owner_json_data.get('email', None) if owner_name and owner_email: maintainers.append('%s <%s>' % (owner_name, owner_email)) if layer.name == 'openembedded-core': layer.summary = 'Core metadata' elif layer.name == 'meta-oe': layer.summary = 'Additional shared OE metadata' layer.description = layer.summary if maintainers: maint_re = re.compile( r'^"?([^"@$<>]+)"? *<([^<> ]+)>[ -]*(.+)?$') for maintentry in maintainers: res = maint_re.match(maintentry) if res: maintainer = LayerMaintainer() maintainer.layerbranch = layerbranch maintainer.name = res.group(1).strip() maintainer.email = res.group(2) if res.group(3): maintainer.responsibility = res.group( 3).strip() maintainer.save() layer.save() if not layer_paths: logger.error('No layers added.') sys.exit(1) if options.dryrun: raise DryRunRollbackException() except DryRunRollbackException: pass sys.exit(0)