def process_archives(config, incoming_dir, feed): """feed is the parsed XML being processed. Any archives are in 'incoming_dir'.""" # Pick a digest to check (maybe we should check all of them?) # Find required archives and check they're in 'incoming' archives = [] for impl in feed.implementations.values(): required_digest = pick_digest(impl) for method in impl.download_sources: archives += process_method(config, incoming_dir, impl, method, required_digest) # Copy to archives directory and upload config.upload_archives(archives) # Test uploads for archive in archives: if config.ARCHIVES_BASE_URL.startswith('http://TESTING/'): continue url = config.ARCHIVES_BASE_URL + archive.rel_url actual_size = urltest.get_size(url) if actual_size != archive.size: raise SafeException("Archive {url} has size {actual}, but expected {expected} bytes".format( url = url, actual = actual_size, expected = archive.size)) for archive in archives: sha1 = get_sha1(archive.source_path) config.archive_db.add(archive.basename, config.ARCHIVES_BASE_URL + archive.rel_url, sha1) return archives
def process_method(config, incoming_dir, impl, method, required_digest): archives = [] if not isinstance(method, model.Recipe): # turn an individual method into a single-step Recipe step = method method = model.Recipe() method.steps.append(step) has_external_archives = False for step in method.steps: if not hasattr(step, 'url'): continue archive = step.url if '/' in archive: has_external_archives = True url = archive actual_size = urltest.get_size(url) if actual_size != step.size: raise SafeException("External archive {url} has size {actual}, but expected {expected} bytes".format( url = url, actual = actual_size, expected = step.size)) continue # Hosted externally if not valid_simple_name.match(archive): raise SafeException("Illegal archive name '{name}'".format(name = archive)) archive_path = join(incoming_dir, archive) if not os.path.isfile(archive_path): raise SafeException("Referenced upload '{path}' not found".format(path = archive_path)) existing = config.archive_db.entries.get(archive, None) if existing is not None: new_sha1 = get_sha1(archive_path) if new_sha1 != existing.sha1: raise SafeException("A different archive with basename '{name}' is " "already in the repository: {archive}".format(name = archive, archive = existing)) else: archive_rel_url = paths.get_archive_rel_url(config, archive, impl) # Copy to archives directory backup_dir = config.LOCAL_ARCHIVES_BACKUP_DIR # note: may be relative; that's OK backup_target_dir = join(backup_dir, dirname(archive_rel_url)) paths.ensure_dir(backup_target_dir) copy_path = join(backup_dir, archive_rel_url) shutil.copyfile(archive_path, copy_path) stored_archive = Archive(abspath(copy_path), archive_rel_url, step.size, archive_path) actual_size = os.path.getsize(stored_archive.source_path) if step.size != actual_size: raise SafeException("Archive '{archive}' has size '{actual}', but XML says size should be {expected}".format( archive = archive, actual = actual_size, expected = step.size)) archives.append(stored_archive) step.url = os.path.abspath(archive_path) # (just used below to test it) if not has_external_archives: # Check archives unpack to give the correct digests impl.feed.local_path = "/is-local-hack.xml" try: blocker = config.zconfig.fetcher.cook(required_digest, method, config.zconfig.stores, impl_hint = impl, dry_run = True, may_use_mirror = False) tasks.wait_for_blocker(blocker) finally: impl.feed.local_path = None return archives
def default_test(archive, url): actual_size = urltest.get_size(url) if actual_size != archive.size: raise SafeException("Archive {url} has size {actual}, but expected {expected} bytes".format( url = url, actual = actual_size, expected = archive.size))
def _default_archive_test(archive, url): actual_size = urltest.get_size(url) if actual_size != archive.size: raise SafeException( "Archive {url} has size {actual}, but expected {expected} bytes". format(url=url, actual=actual_size, expected=archive.size))