def ResolveDuplicateUploads(self, pkgindexes): """Point packages at files that have already been uploaded. For each package in our index, check if there is an existing package that has already been uploaded to the same base URI, and that is no older than two weeks. If so, point that package at the existing file, so that we don't have to upload the file. Args: pkgindexes: A list of PackageIndex objects containing info about packages that have already been uploaded. Returns: A list of the packages that still need to be uploaded. """ db = {} now = int(time.time()) expires = now - TWO_WEEKS base_uri = gs.CanonicalizeURL(self.header['URI']) for pkgindex in pkgindexes: if gs.CanonicalizeURL(pkgindex.header['URI']) == base_uri: # pylint: disable=protected-access pkgindex._PopulateDuplicateDB(db, expires) uploads = [] base_uri = self.header['URI'] for pkg in self.packages: sha1 = pkg.get('SHA1') dup = db.get(sha1) # If the debug symbols are available locally but are not available in the # remote binhost, re-upload them. # Note: this should never happen as we would have pulled the debug symbols # from said binhost. if (sha1 and dup and dup.uri.startswith(base_uri) and (pkg.get('DEBUG_SYMBOLS') != 'yes' or dup.debug_symbols)): pkg['PATH'] = dup.uri[len(base_uri):].lstrip('/') pkg['MTIME'] = str(dup.mtime) if dup.debug_symbols: pkg['DEBUG_SYMBOLS'] = 'yes' else: pkg['MTIME'] = str(now) uploads.append(pkg) return uploads
def _PopulateDuplicateDB(self, db, expires): """Populate db with SHA1 -> URL mapping for packages. Args: db: Dictionary to populate with SHA1 -> URL mapping for packages. expires: The time at which prebuilts expire from the binhost. """ uri = gs.CanonicalizeURL(self.header['URI']) for pkg in self.packages: cpv, sha1, mtime = pkg['CPV'], pkg.get('SHA1'), pkg.get('MTIME') oldpkg = db.get(sha1, _Package(0, None)) if sha1 and mtime and int(mtime) > max(expires, oldpkg.mtime): path = pkg.get('PATH', cpv + '.tbz2') db[sha1] = _Package(int(mtime), '%s/%s' % (uri.rstrip('/'), path))
def ResolveDuplicateUploads(self, pkgindexes): """Point packages at files that have already been uploaded. For each package in our index, check if there is an existing package that has already been uploaded to the same base URI, and that is no older than two weeks. If so, point that package at the existing file, so that we don't have to upload the file. Args: pkgindexes: A list of PackageIndex objects containing info about packages that have already been uploaded. Returns: A list of the packages that still need to be uploaded. """ db = {} now = int(time.time()) expires = now - TWO_WEEKS base_uri = gs.CanonicalizeURL(self.header['URI']) for pkgindex in pkgindexes: if gs.CanonicalizeURL(pkgindex.header['URI']) == base_uri: # pylint: disable=W0212 pkgindex._PopulateDuplicateDB(db, expires) uploads = [] base_uri = self.header['URI'] for pkg in self.packages: sha1 = pkg.get('SHA1') dup = db.get(sha1) if sha1 and dup and dup.uri.startswith(base_uri): pkg['PATH'] = dup.uri[len(base_uri):].lstrip('/') pkg['MTIME'] = str(dup.mtime) else: pkg['MTIME'] = str(now) uploads.append(pkg) return uploads
def NormalizeGSPath(value): """Expand paths and make them absolute.""" return gs.CanonicalizeURL(value, strict=True).rstrip('/')
def NormalizeGSPath(value): """Normalize GS paths.""" url = gs.CanonicalizeURL(value, strict=True) return '%s%s' % (gs.BASE_GS_URL, os.path.normpath( url[len(gs.BASE_GS_URL):]))