def load(self, basedir, usecache=True, rebuildcache=False): pkgspec = ['PKG.yml', 'pkg.yml'] import cPickle as pickle CACHE = os.path.join(basedir, '.PKGs.cache.%s' % g_dist_codename) # Lock the CACHE file with onlu.Lock(CACHE + ".lock"): if usecache: if os.path.exists(CACHE): if rebuildcache: logger.debug("Removing package cache %s" % CACHE) os.unlink(CACHE) else: logger.debug("Loading from package cache %s" % CACHE) self.package_groups = pickle.load(open(CACHE, "rb")) # Validate and update the cache for pg in self.package_groups: pg.reload() # Update cache and return pickle.dump(self.package_groups, open(CACHE, "wb")) return else: if os.path.exists(CACHE): logger.debug("Removing package cache %s" % CACHE) os.unlink(CACHE) for root, dirs, files in os.walk(basedir): for f in files: if f in pkgspec: if "%s.disabled" % f in files: logger.warn("Skipping %s due to .disabled file)." % os.path.join(root, f)) else: pg = OnlPackageGroup() try: logger.debug('Loading package file %s...' % os.path.join(root, f)) pg.load(os.path.join(root, f)) logger.debug(' Loaded package file %s' % os.path.join(root, f)) self.package_groups.append(pg) except OnlPackageError, e: logger.error("%s: " % e) logger.warn("Skipping %s due to errors." % os.path.join(root, f))
def __init__(self, root, packagedir='packages'): """Initialize a repo object. root : The root directory that should be used for this repository.""" root = os.path.join(root, g_dist_codename) if not os.path.exists(root): os.makedirs(root) # The package subdirectory goes here self.repo = os.path.join(root, packagedir) # The extract cache goes here self.extracts = os.path.join(root, 'extracts') # All access to the repository is locked self.lock = onlu.Lock(os.path.join(root, '.lock'))
def build(self, dir_=None): """Build all packages in the current group. dir_ : The output directory for the package group. The default is the package group parent directory. The option to build individual packages is not provided. The assumption is that the packages defined in the group are related and should always be built together. It is also assumed that all packages in the group have a common build step. That build step is performed once, and all packages are then built from the artifacts as defined in the package specifications. This assures there are not mismatches in the contents of packages from the same group and that there are no unecessary invocations of the build steps. """ products = [] with onlu.Lock(os.path.join(self._pkgs['__directory'], '.lock')): self.gmake_locked("", 'Build') for p in self.packages: products.append(p.build(dir_=dir_)) if 'release' in self._pkgs: release_list = onlu.validate_src_dst_file_tuples( self._pkgs['__directory'], self._pkgs['release'], dict(), OnlPackageError) for f in release_list: release_dir = os.environ.get( 'ONLPM_OPTION_RELEASE_DIR', os.path.join(os.environ.get('ONL', 'RELEASE'))) dst = os.path.join(release_dir, g_dist_codename, f[1]) if not os.path.exists(dst): os.makedirs(dst) logger.info("Releasing %s -> %s" % (os.path.basename(f[0]), dst)) shutil.copy(f[0], dst) return products
def clean(self, dir_=None): with onlu.Lock(os.path.join(self._pkgs['__directory'], '.lock')): self.gmake_locked("clean", 'Clean')