def setup_package(name, source_dir, file_dir, tree_dir): # Package files tar_path = os.path.join(file_dir, '%s.tar.gz' % name) util.make_tarball(tar_path, source_dir) # Get the hash md5sum = util.hash_file(open(tar_path), hashlib.md5) # Template the package file source_xdp = os.path.join(source_dir, '%s.xpd.pyt' % name) package_xpd = os.path.join(tree_dir, '%s.xpd' % name) args = { 'filehash' : md5sum, 'filepath' : tar_path, # These are not being modified by us 'prefix' : '%(prefix)s', 'jobs' : '%(jobs)s', } # Find all patch files patch_files = [f for f in os.listdir(source_dir) if f.endswith('.patch')] patch_paths = [os.path.join(source_dir, f) for f in patch_files] # Compute their hashes patch_hashes = ['md5-' + util.hash_file(open(p), hashlib.md5) for p in patch_paths] # Build a set of keys to put their hashes back into the system hash_keys = {'hash-' + p: hash_val for (p, hash_val) in zip(patch_files, patch_hashes)} #print hash_keys args.update(hash_keys) # Template file util.template_file(source_xdp, package_xpd, args) # Copy the patch files into the tree directory for patch_path in patch_paths: shutil.copy(patch_path, tree_dir)
def fetch_file(filehash, url): """ Download the desired URL with the given hash """ # Make sure cache exists cache_dir = os.path.expanduser(os.path.join('~', '.xpkg', 'cache')) if not os.path.exists(cache_dir): os.makedirs(cache_dir) # Get the path where the file will be placed in our cache cache_path = os.path.join(cache_dir, filehash) # See if we need to download the file download_file = not os.path.exists(cache_path) if os.path.exists(cache_path): # Lets verify the hash of the existing file, it's mostly ok to do this # because we need to read the file off disk to unpack it and the OS will # cache it. # Get the hash types valid_types = set(['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']) # Get the information we need to do the hashing hash_typename, hex_hash = filehash.split('-') hash_type = getattr(hashlib, hash_typename) current_hash = util.hash_file(open(cache_path), hash_type=hash_type) # Check if the hashes don't match and if they don't make sure we can the # file if current_hash != hex_hash: download_file = True else: download_file = True # Download if needed if download_file: p = util.fetch_url(url, cache_path) print url,p return cache_path