def local_distribution(self): if self._local_dist is None: ensure_directory(os.path.join(self.tmpdir, "dummy")) info("Fetching %s from %s..." % (str(self.distribution), self.location)) dist = self.repository.environment.fetch_distribution(self.requirement, self.tmpdir, source=self.source, develop_ok=self.develop) location = dist.location distros = list(find_distributions(location)) if distros: self._local_dist = distros[0] elif os.path.isfile(location) and os.path.splitext(location) != \ ".py": # try to unpack the file unpack_dir = os.path.join(self.tmpdir, "unpack") info("Unpacking to %s..." % unpack_dir) unpack_archive(location, unpack_dir) distros = list(find_distributions(unpack_dir)) if distros: self._local_dist = distros[0] else: for path in glob(os.path.join(unpack_dir, "*")): distros = list(find_distributions(path)) if distros: self._local_dist = distros[0] break else: self._local_dist = Distribution.from_filename(location) return self._local_dist
def _extract_file_to_file_server(cls, file_server_root, archive_target_path): # extract application to file server tempdir = tempfile.mkdtemp("-blueprint-submit") try: try: archive_util.unpack_archive(archive_target_path, tempdir) except archive_util.UnrecognizedFormat: raise manager_exceptions.BadParametersError( "Blueprint archive is of an unrecognized format. " "Supported formats are: {0}".format(SUPPORTED_ARCHIVE_TYPES) ) archive_file_list = os.listdir(tempdir) if len(archive_file_list) != 1 or not path.isdir(path.join(tempdir, archive_file_list[0])): raise manager_exceptions.BadParametersError("archive must contain exactly 1 directory") application_dir_base_name = archive_file_list[0] # generating temporary unique name for app dir, to allow multiple # uploads of apps with the same name (as it appears in the file # system, not the app name field inside the blueprint. # the latter is guaranteed to be unique). generated_app_dir_name = "{0}-{1}".format(application_dir_base_name, uuid.uuid4()) temp_application_dir = path.join(tempdir, application_dir_base_name) temp_application_target_dir = path.join(tempdir, generated_app_dir_name) shutil.move(temp_application_dir, temp_application_target_dir) shutil.move(temp_application_target_dir, file_server_root) return generated_app_dir_name finally: shutil.rmtree(tempdir)
def _get_new(self, allow_redirects=True, chunk_size=512): """Retrieves the new archive and extracts it to self.updatedir.""" self.log.info("Retrieving new version") newurl = self.url+self.newfiles # Get new files http_get = requests.get(newurl, stream=True, allow_redirects=allow_redirects) http_get.raise_for_status() with open(self.newfiles, 'wb') as filehandle: for chunk in http_get.iter_content(chunk_size=chunk_size): if chunk: filehandle.write(chunk) # Unpack archive and remove it after extraction try: self.log.info("Unpacking downloaded archive") unpack_archive(self.newfiles, self.updatedir) except UnrecognizedFormat: self.log.error("Retrieved version archive is invalid!\n" "Please contact the software authors.\n" "Please include the invalid archive " "in a bug report.") os.rename(self.newfiles,self.newfiles+".dump") else: # Remove archive only if unpack operation succeeded self.log.info("Removing archive after extraction") os.remove(self.newfiles) # Signal that update is ready self.log.debug("Creating downloaded file marker") open(self.queue_replace,"w").close()
def download_src(self, url, source=None, archive=True): """ Download source and return path to it. :param url: url to source distribution :type url: string :param source_dir: source directory after unpacking (optional) :type source_dir: string :param archive: is source archive file or not :type archive: boolean :return: path to source directory """ tmp_dir = tempfile.gettempdir() source_file = os.path.join(tmp_dir, url.split('/')[-1]) urllib.request.urlretrieve(url, source_file) if source is None: source = source_file if archive: unpack_archive(source_file, tmp_dir) source = os.path.splitext(source)[0] if 'tar' in source: source = os.path.splitext(source)[0] return os.path.join(tmp_dir, source)
def extract_gz(self, archive_path, archivedir_write_path, file_name=None, open_archive_file=None, archive=None): """Extract gz files. Extracts a given file name or all the files in the gz. """ if file_name: open_archive_file = gzip.open(archive_path, 'r') file_obj = open_archive_file open_object = False self.write_fileobject(archivedir_write_path, file_name, file_obj=open_archive_file, open_object=False) if 'archive' in locals() and archive: archive.close() return [file_name] files_before = set(walk_relative_path(archivedir_write_path)) archive_util.unpack_archive(archive_path, archivedir_write_path) files_after = set(walk_relative_path(archivedir_write_path)) unpacked_files = files_after - files_before return list(unpacked_files)
def fixed_unpack_and_compile(self, egg_path, destination): from setuptools.archive_util import unpack_archive to_compile = [] to_chmod = [] def pf(src, dst): if dst.endswith('.py') and not src.startswith('EGG-INFO/'): to_compile.append(dst) to_chmod.append(dst) elif dst.endswith('.dll') or dst.endswith('.so'): to_chmod.append(dst) self.unpack_progress(src, dst) return not self.dry_run and dst or None unpack_archive(egg_path, destination, pf) self.byte_compile(to_compile) if not self.dry_run: for f in to_chmod: # mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755 mode = ((os.stat(f)[stat.ST_MODE]) | 0o444) & 0o7755 chmod(f, mode) to_compile = [] to_chmod = [] return
def get_filelist(self): ''' Unpack the archive if it is an archive, and return a list of file names that should be examined by OGR to determine if they are OGR supported file types. Cache the result to speed up subsequent calls. ''' if not hasattr(self, '_files'): name, extension=os.path.splitext(self.filename) # only handle a few types, since some of the others might mess # up some processing...like xlsx (which this will unpack.) logger.debug('Extension is %s', extension) if extension.lower() in ('.zip','.gz','.tgz'): try: # Ensure that the files are output in the working dir, and # subdirectories are omitted (so it's a flat dir structure) archive_util.unpack_archive(self.filename, self.working_dir, progress_filter=self._progress_filter) logger.debug('Unpacked archive %s to %s', self.filename, self.working_dir) files=[fn for fn in map(lambda dir: os.path.join(self.working_dir, dir), os.listdir(self.working_dir)) if not os.path.isdir(fn)] self._files=files except archive_util.UnrecognizedFormat, e: logger.debug('Specified file (%s) is not a recognized archive', self.filename) self._files=[self.filename,] else: self._files=[self.filename,]
def extract_directory(directory): """Extracts bz2 compressed directory at `directory` if directory is compressed. """ if not os.path.isdir(directory): head, _ = os.path.split(os.path.abspath(directory)) print('Unzipping...', end=' ', flush=True) unpack_archive(directory + '.tar.bz2', extract_dir=head)
def _unpack_eggs(egg_list): import os for pkg in egg_list: import pkg_resources eggs = pkg_resources.require(pkg) from setuptools.archive_util import unpack_archive for egg in eggs: if os.path.isdir(egg.location): sys.path.insert(0, egg.location) continue unpack_archive(egg.location, os.path.abspath(os.path.dirname(egg.location)))
def cache_package(spec, own_url): try: spec = Requirement.parse(spec) except ValueError: raise ArgumentError( "Not a URL, existing file, or requirement spec: %r" % (spec, )) try: # download and unpack source package path = tempfile.mkdtemp('.spynepi') logger.info("Downloading %r" % spec) dist = PackageIndex().fetch_distribution(spec, path, force_scan=True, source=True) archive_path = dist.location logger.info("Unpacking %r" % archive_path) unpack_archive(dist.location, path) # generate pypirc if possible if os.environ.has_key('HOME'): _generate_pypirc(own_url) else: # FIXME: ??? No idea. Hopefully setuptools knows better. pass # raise NotImplementedError("$HOME not defined, .pypirc not found.") # find setup.py in package. plagiarized from setuptools. setups = glob(os.path.join(path, '*', 'setup.py')) if not setups: raise ValidationError( "Couldn't find a setup script in %r editable distribution: %r" % (spec, os.path.join(path, '*', 'setup.py'))) if len(setups) > 1: raise ValidationError( "Multiple setup scripts found in %r editable distribution: %r" % (spec, setups)) # self-register the package. lib_dir = os.path.dirname(setups[0]) command = ["python", "setup.py", "register", "-r", REPO_NAME] logger.info('calling %r', command) subprocess.call(command, cwd=lib_dir, stdout=sys.stdout) # self-upload the package command = ["python", "-m", "spynepi.util.pypi.upload", archive_path] logger.info('calling %r', command) subprocess.call(command, cwd=lib_dir, stdin=sys.stdin, stdout=sys.stdout) finally: shutil.rmtree(path)
def copytree(self): # Copy the .egg-info tree to site-packages def skimmer(src,dst): # filter out source-control directories; note that 'src' is always # a '/'-separated path, regardless of platform. 'dst' is a # platform-specific path. for skip in '.svn/','CVS/': if src.startswith(skip) or '/'+skip in src: return None self.outputs.append(dst) log.debug("Copying %s to %s", src, dst) return dst unpack_archive(self.source, self.target, skimmer)
def cache_package(spec, own_url): try: spec = Requirement.parse(spec) except ValueError: raise ArgumentError("Not a URL, existing file, or requirement spec: %r" % (spec,)) try: # download and unpack source package path = tempfile.mkdtemp('.spynepi') logger.info("Downloading %r" % spec) dist = PackageIndex().fetch_distribution(spec, path, force_scan=True, source=True) archive_path = dist.location logger.info("Unpacking %r" % archive_path) unpack_archive(dist.location, path) # generate pypirc if possible if os.environ.has_key('HOME'): _generate_pypirc(own_url) else: # FIXME: ??? No idea. Hopefully setuptools knows better. pass # raise NotImplementedError("$HOME not defined, .pypirc not found.") # find setup.py in package. plagiarized from setuptools. setups = glob(os.path.join(path, '*', 'setup.py')) if not setups: raise ValidationError( "Couldn't find a setup script in %r editable distribution: %r" % (spec, os.path.join(path,'*', 'setup.py')) ) if len(setups)>1: raise ValidationError( "Multiple setup scripts found in %r editable distribution: %r" % (spec, setups) ) # self-register the package. lib_dir = os.path.dirname(setups[0]) command = ["python", "setup.py", "register", "-r", REPO_NAME] logger.info('calling %r', command) subprocess.call(command, cwd=lib_dir, stdout=sys.stdout) # self-upload the package command = ["python", "-m", "spynepi.util.pypi.upload", archive_path] logger.info('calling %r', command) subprocess.call(command, cwd=lib_dir, stdin=sys.stdin, stdout=sys.stdout) finally: shutil.rmtree(path)
def checkout_extension(name): log('Downloading extension %s to temporary folder', name) root = os.path.join(tdir, name) os.mkdir(root) checkout_path = PackageIndex().download(name, root) unpack_archive(checkout_path, root) path = None for fn in os.listdir(root): path = os.path.join(root, fn) if os.path.isdir(path): break log('Downloaded to %s', path) return path
def download_and_extract(self, url, md5sum, dest, extract_filter='*', strip_dirs=1): path, is_temp = Download(self.buildout['buildout'])(url, md5sum) files = [] def progress_filter(src, dst): if fnmatch(src, extract_filter): stripped = os.path.normpath(src).split(os.sep)[strip_dirs:] if stripped: files.append(os.path.join(dest, os.path.join( *stripped))) return files[-1] archive_util.unpack_archive(path, dest, progress_filter) return files
def setup_cmmi_process_test(): """Setup for the CMMI processing.""" rand_suffix = str(uuid.uuid1()) temp_root_test_dest = os.getcwd() + "/setuptools_cmmi/tests/data/temp_test_dest-" \ + rand_suffix dest_dir = temp_root_test_dest + "/usr" temp_work_dir = os.getcwd( ) + "/setuptools_cmmi/tests/data/workdir-" + rand_suffix src_dist = os.getcwd( ) + "/setuptools_cmmi/tests/data/freetds-1.00.15.tar.gz" unpack_archive(src_dist, temp_work_dir) return temp_work_dir, dest_dir, temp_root_test_dest
def unpackEgg(modulo): eggs = pkg_resources.require(modulo) for egg in eggs: if os.path.isdir(egg.location): sys.path.insert(0, egg.location) continue unpack_archive(egg.location, ".") eggpacks = set() eggspth = open("./eggs.pth", "w") for egg in eggs: eggspth.write(os.path.basename(egg.location)) eggspth.write("\n") eggpacks.update(egg.get_metadata_lines("top_level.txt")) eggspth.close() eggpacks.clear()
def _mk_zips(self): from setuptools import archive_util from setuptools.command.bdist_egg import make_zipfile if os.path.isdir(self.location): shutil.rmtree(self.location) os.mkdir(self.location) dsts = [] path = [] for src, names in self.ws.entry_keys.items(): if self.src_exclude.match(src): continue if not self.zip and filter(src.startswith, self.develop_paths): path.append(src) continue log.debug("Adding archive %r %r" % (src, names)) archive_util.unpack_archive( src, self.location, progress_filter=self.progress_filter(names)) # let us put the things in seperate paths so we dont have to # care if we are zipped or not, we just have to add any # subitem in the packcage directory to the paht, not the # package directory itself tmp = os.path.join(self.location, '.tmp') for name in os.listdir(self.location): if name == '.tmp': continue os.mkdir(tmp) d = os.path.join(self.location, name) td = os.path.join(tmp, name) os.rename(d, td) os.rename(tmp, d) if self.zip: for name in os.listdir(self.location): d = os.path.join(self.location, name) # hm we need to call this .egg because of # pkg_resources.resource_filename z = os.path.join(self.location, name + '.egg') make_zipfile(z, d) shutil.rmtree(d) for name in os.listdir(self.location): path.append(os.path.join(self.location, name)) self.options['path'] = '\n'.join(path) self.path = path
def copytree(self): # Copy the .egg-info tree to site-packages def skimmer(src, dst): # filter out source-control directories; note that 'src' is always # a '/'-separated path, regardless of platform. 'dst' is a # platform-specific path. for skip in ".svn/", "CVS/": if src.startswith(skip) or "/" + skip in src: return None if self.install_layout and self.install_layout in ["deb"] and src.startswith("SOURCES.txt"): log.info("Skipping SOURCES.txt") return None self.outputs.append(dst) log.debug("Copying %s to %s", src, dst) return dst unpack_archive(self.source, self.target, skimmer)
def copytree(self): # Copy the .egg-info tree to site-packages def skimmer(src, dst): # filter out source-control directories; note that 'src' is always # a '/'-separated path, regardless of platform. 'dst' is a # platform-specific path. for skip in ".svn/", "CVS/": if src.startswith(skip) or "/" + skip in src: return None if (self.install_layout and self.install_layout in ["deb"] and src.startswith("SOURCES.txt")): log.info("Skipping SOURCES.txt") return None self.outputs.append(dst) log.debug("Copying %s to %s", src, dst) return dst unpack_archive(self.source, self.target, skimmer)
def get_filelist(self): ''' Unpack the archive if it is an archive, and return a list of file names that should be examined by OGR to determine if they are OGR supported file types. Cache the result to speed up subsequent calls. ''' if not hasattr(self, '_files'): name, extension = os.path.splitext(self.filename) # only handle a few types, since some of the others might mess # up some processing...like xlsx (which this will unpack.) self.logger.debug('Extension is %s', extension) if extension.lower() in ('.zip', '.gz', '.tgz'): try: # Ensure that the files are output in the working dir, and # subdirectories are omitted (so it's a flat dir structure) archive_util.unpack_archive( self.filename, self.working_dir, progress_filter=self._progress_filter) self.logger.debug('Unpacked archive %s to %s', self.filename, self.working_dir) files = [ fn for fn in map( lambda dir: os.path.join(self.working_dir, dir), os.listdir(self.working_dir)) if not os.path.isdir(fn) ] self._files = files except archive_util.UnrecognizedFormat as e: self.logger.debug( 'Specified file (%s) is not a recognized archive', self.filename) self._files = [ self.filename, ] else: self._files = [ self.filename, ] self.logger.debug('File list is %s', self._files) return self._files
def _extract_file_to_file_server(cls, archive_path, destination_root): """ Extracting a package. :param destination_root: the root destination for the unzipped archive :param archive_path: the archive path :return: the full path for the extracted archive """ # Importing this archives in the global scope causes import loop from manager_rest.resources import SUPPORTED_ARCHIVE_TYPES # extract application to file server tempdir = tempfile.mkdtemp('-blueprint-submit') try: try: archive_util.unpack_archive(archive_path, tempdir) except archive_util.UnrecognizedFormat: raise manager_exceptions.BadParametersError( 'Blueprint archive is of an unrecognized format. ' 'Supported formats are: {0}'.format( SUPPORTED_ARCHIVE_TYPES)) archive_file_list = os.listdir(tempdir) if len(archive_file_list) != 1 or not os.path.isdir( os.path.join(tempdir, archive_file_list[0])): raise manager_exceptions.BadParametersError( 'archive must contain exactly 1 directory') application_dir_base_name = archive_file_list[0] # generating temporary unique name for app dir, to allow multiple # uploads of apps with the same name (as it appears in the file # system, not the app name field inside the blueprint. # the latter is guaranteed to be unique). generated_app_dir_name = '{0}-{1}'.format( application_dir_base_name, uuid.uuid4()) temp_application_dir = os.path.join(tempdir, application_dir_base_name) temp_application_target_dir = os.path.join(tempdir, generated_app_dir_name) shutil.move(temp_application_dir, temp_application_target_dir) shutil.move(temp_application_target_dir, destination_root) return generated_app_dir_name finally: shutil.rmtree(tempdir)
def extract_blueprint_archive_to_mgr(archive_path, destination_root): """ Extracting a package. :param destination_root: the root destination for the unzipped archive :param archive_path: the archive path :return: the full path for the extracted archive """ # Importing this archives in the global scope causes import loop from manager_rest.resources import SUPPORTED_ARCHIVE_TYPES # extract application to file server tempdir = tempfile.mkdtemp('-blueprint-submit') try: try: archive_util.unpack_archive(archive_path, tempdir) except archive_util.UnrecognizedFormat: raise manager_exceptions.BadParametersError( 'Blueprint archive is of an unrecognized format. ' 'Supported formats are: {0}'.format( SUPPORTED_ARCHIVE_TYPES)) archive_file_list = listdir(tempdir) if len(archive_file_list) != 1 or not path.isdir( path.join(tempdir, archive_file_list[0])): raise manager_exceptions.BadParametersError( 'archive must contain exactly 1 directory') application_dir_base_name = archive_file_list[0] # generating temporary unique name for app dir, to allow multiple # uploads of apps with the same name (as it appears in the file # system, not the app name field inside the blueprint. # the latter is guaranteed to be unique). generated_app_dir_name = '{0}-{1}'.format( application_dir_base_name, uuid.uuid4()) temp_application_dir = path.join(tempdir, application_dir_base_name) temp_application_target_dir = path.join(tempdir, generated_app_dir_name) shutil.move(temp_application_dir, temp_application_target_dir) shutil.move(temp_application_target_dir, destination_root) return generated_app_dir_name finally: shutil.rmtree(tempdir)
def install_eggs(self, spec, dist_filename, tmpdir): # Anything else, try to extract and build setup_base = tmpdir if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): unpack_archive(dist_filename, tmpdir, self.unpack_progress) elif os.path.isdir(dist_filename): setup_base = os.path.abspath(dist_filename) # Find the setup.py file setup_script = os.path.join(setup_base, 'setup.py') if not os.path.exists(setup_script): setups = glob(os.path.join(setup_base, '*', 'setup.py')) if not setups: raise DistutilsError("Couldn't find a setup script in %s" % os.path.abspath(dist_filename)) if len(setups) > 1: raise DistutilsError("Multiple setup scripts in %s" % os.path.abspath(dist_filename)) setup_script = setups[0] return self.build_and_install(setup_script, setup_base)
def fixed_unpack_and_compile(self, egg_path, destination): from setuptools.archive_util import unpack_archive to_compile = []; to_chmod = [] def pf(src,dst): if dst.endswith('.py') and not src.startswith('EGG-INFO/'): to_compile.append(dst) to_chmod.append(dst) elif dst.endswith('.dll') or dst.endswith('.so'): to_chmod.append(dst) self.unpack_progress(src,dst) return not self.dry_run and dst or None unpack_archive(egg_path, destination, pf) self.byte_compile(to_compile) if not self.dry_run: for f in to_chmod: # mode = ((os.stat(f)[stat.ST_MODE]) | 0555) & 07755 mode = ((os.stat(f)[stat.ST_MODE]) | 0444) & 07755 chmod(f, mode) to_compile = []; to_chmod = [] return
def update_self(): if len(sys.argv) > 1 and sys.argv[1] == '--updated': del sys.argv[1] return from setuptools.package_index import PackageIndex from setuptools.archive_util import unpack_archive tmpdir = tempfile.mkdtemp(prefix=TEMP_DIR_PREFIX) print('Downloading %s' % DEFAULT_URL) download = PackageIndex().download(DEFAULT_URL, tmpdir) print('Downloaded.') unpack_archive(download, tmpdir) unpack_dir = os.path.join(tmpdir, PACK_FILE_ROOT_DIR) move_files(unpack_dir, os.curdir, shutil.ignore_patterns('.*', '*.sln', '*.pyproj', '*.sample')) shutil.rmtree(tmpdir) print('Self updated.') if len(sys.argv) == 1: # only update self. sys.exit(0) else: restart(with_virtualenv=False)
def install_eggs(self, spec, dist_filename, tmpdir): # Anything else, try to extract and build setup_base = tmpdir if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): unpack_archive(dist_filename, tmpdir, self.unpack_progress) elif os.path.isdir(dist_filename): setup_base = os.path.abspath(dist_filename) # Find the setup.py file setup_script = os.path.join(setup_base, 'setup.py') if not os.path.exists(setup_script): setups = glob(os.path.join(setup_base, '*', 'setup.py')) if not setups: raise DistutilsError( "Couldn't find a setup script in %s" % os.path.abspath(dist_filename) ) if len(setups)>1: raise DistutilsError( "Multiple setup scripts in %s" % os.path.abspath(dist_filename) ) setup_script = setups[0] return self.build_and_install(setup_script, setup_base)
import pkg_resources eggs = pkg_resources.require("TurboGears") from setuptools.archive_util import unpack_archive for egg in eggs: if os.path.isdir(egg.location): sys.path.insert(0, egg.location) continue unpack_archive(egg.location, eggdir) eggpacks = set() eggspth = open("build/eggs.pth", "w") for egg in eggs: print egg eggspth.write(os.path.basename(egg.location)) eggspth.write("\n") eggpacks.update(egg.get_metadata_lines("top_level.txt")) eggspth.close() eggpacks.remove("pkg_resources") import zipfile oldzipfile = "dist/exe/library.zip" newzipfile = "dist/exe/small-library.zip" oldzip = zipfile.ZipFile(oldzipfile, "r") newzip = zipfile.ZipFile(newzipfile, "w", zipfile.ZIP_STORED) for entry in oldzip.infolist(): delim = entry.filename.find("/") if delim == -1: delim = entry.filename.find(".") if delim > -1:
def download_unpack_file(url, temp_work_dir): """Download and unpack the specified file.""" LOG.info("Starting download for {0}".format(url)) filehandle, _ = req.urlretrieve(url) unpack_archive(filehandle, temp_work_dir)
def smart_archive(args, dist, unpackdir): # Set pkgpath, pkgfile, pkgdir, unpackpath, pkgtype. # setup_path is optional. def check_filename(fname, isfile, leading_dir, single_py, setup_path): def first_dir(path, isfile): while True: path, fname = os.path.split(path) if path == '': if isfile: return False else: return fname elif path == '/': if isfile: return '/' else: return os.path.join('/', fname) isfile = False if leading_dir is None: leading_dir = first_dir(fname, isfile) elif leading_dir is False: pass elif leading_dir != first_dir(fname, isfile): leading_dir = False if isfile and os.path.splitext(fname)[-1] == '.py': if single_py is None: single_py = fname elif single_py is False: pass else: single_py = False if isfile and os.path.basename(fname) == 'setup.py': if setup_path is None: setup_path = fname elif len(fname) < len(setup_path): setup_path = fname return leading_dir, single_py, setup_path def unpackpath_cleanup(unpackpath): if os.path.islink(unpackpath): os.unlink(unpackpath) elif os.path.exists(unpackpath): shutil.rmtree(unpackpath) args['pkgpath'] = dist.location args['pkgfile'] = os.path.basename(dist.location) leading_dir = None single_py = None setup_path = None if tarfile.is_tarfile(dist.location): tf = tarfile.open(dist.location, 'r:*') for tfi in tf.getmembers(): leading_dir, single_py, setup_path = \ check_filename(tfi.name, tfi.isfile(), leading_dir, single_py, setup_path) tf.close() elif zipfile.is_zipfile(dist.location): zf = zipfile.ZipFile(dist.location) for zfn in zf.namelist(): leading_dir, single_py, setup_path = \ check_filename(zfn, True, leading_dir, single_py, setup_path) zf.close() else: raise RuntimeError, 'Unrecognized archive format: %s' % dist.location if leading_dir is None or single_py is None: raise RuntimeError, 'Empty package encountered: %s' % dist.location elif leading_dir is False: if dist.version == '': args['pkgdir'] = dist.project_name else: args['pkgdir'] = '%s-%s' % (dist.project_name, dist.version) args['unpackpath'] = os.path.join(unpackdir, args['pkgdir']) unpackpath_cleanup(args['unpackpath']) unpack_archive(dist.location, args['unpackpath']) else: args['pkgdir'] = leading_dir args['unpackpath'] = os.path.join(unpackdir, args['pkgdir']) unpackpath_cleanup(args['unpackpath']) unpack_archive(dist.location, unpackdir) if setup_path is not None: if setup_path == os.path.join(leading_dir, 'setup.py'): setup_path = 'setup.py' else: setup_path = None if setup_path is not None: args['setup_path'] = setup_path args['pkgtype'] = 'setup.py' elif single_py is not False: args['pkgtype'] = 'single.py' else: raise RuntimeError, 'Unsupported archive type'
def upload(ctx, **kwargs): client = get_rest_client() # extract the execution parameters blueprint_id = kwargs['blueprint_id'] app_file_name = kwargs['app_file_name'] url = kwargs['url'] file_server_root = kwargs['file_server_root'] validate_only = kwargs['validate_only'] labels = kwargs.get('labels') # Download the archive, one way or the other archive_target_path = tempfile.mkdtemp() try: if url: # download the blueprint archive from URL using requests: if not validate_only: client.blueprints.update( blueprint_id, update_dict={'state': BlueprintUploadState.UPLOADING}) with requests.get(url, stream=True, timeout=(5, None)) as resp: resp.raise_for_status() archive_file_path = os.path.join(archive_target_path, os.path.basename(url)) with open(archive_file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=8192): if chunk: f.write(chunk) # Upload the downloaded archive to the manager if not validate_only: client.blueprints.upload_archive( blueprint_id, archive_path=archive_file_path) else: # download the blueprint archive using REST archive_file_path = client.blueprints.download( blueprint_id, output_file=archive_target_path) except Exception as e: client.blueprints.update(blueprint_id, update_dict={ 'state': BlueprintUploadState.FAILED_UPLOADING, 'error': str(e), 'error_traceback': traceback.format_exc() }) remove(archive_target_path) raise ctx.logger.info('Blueprint archive uploaded. Extracting...') # Extract the archive so we can parse it if not validate_only: client.blueprints.update( blueprint_id, update_dict={'state': BlueprintUploadState.EXTRACTING}) try: archive_util.unpack_archive(archive_file_path, archive_target_path) except archive_util.UnrecognizedFormat: error_msg = 'Blueprint archive is of an unrecognized format. ' \ 'Supported formats are: ' \ '{0}'.format(SUPPORTED_ARCHIVE_TYPES) handle_failed_extracting(ctx, client, blueprint_id, error_msg, archive_target_path) except Exception as e: handle_failed_extracting(ctx, client, blueprint_id, str(e), archive_target_path) archive_file_list = os.listdir(archive_target_path) # ignore the archive file for now archive_file_list.remove(os.path.basename(archive_file_path)) # the other item in the archive dir is the extracted app, which is # supposed to consist of one folder in a properly-structured archive app_dir = os.path.join(archive_target_path, archive_file_list[0]) if len(archive_file_list) != 1 or not os.path.isdir(app_dir): error_msg = 'Archive must contain exactly 1 directory' handle_failed_extracting(ctx, client, blueprint_id, error_msg, archive_target_path) # get actual app file name if app_file_name: app_file_name = unquote(app_file_name) application_file = os.path.join(app_dir, app_file_name) if not os.path.isfile(application_file): error_msg = '{0} does not exist in the application ' \ 'directory'.format(app_file_name) handle_failed_extracting(ctx, client, blueprint_id, error_msg, archive_target_path) else: app_file_name = CONVENTION_APPLICATION_BLUEPRINT_FILE application_file = os.path.join(app_dir, app_file_name) if not os.path.isfile(application_file): error_msg = 'Application directory is missing blueprint.yaml and' \ ' application_file_name query parameter was not passed' handle_failed_extracting(ctx, client, blueprint_id, error_msg, archive_target_path) ctx.logger.info('Blueprint archive extracted. Parsing...') # Parse plan if not validate_only: client.blueprints.update( blueprint_id, update_dict={'state': BlueprintUploadState.PARSING}) dsl_location = os.path.join(app_dir, app_file_name) provider_context = client.manager.get_context()['context'] try: parser_context = extract_parser_context(provider_context, resolver_parameters={ 'file_server_root': file_server_root, 'client': client }) except dsl_parser_utils.ResolverInstantiationError as e: ctx.logger.critical(str(e)) client.blueprints.update(blueprint_id, update_dict={ 'state': BlueprintUploadState.FAILED_PARSING, 'error': str(e), 'error_traceback': traceback.format_exc() }) raise try: plan = tasks.parse_dsl(dsl_location, file_server_root, **parser_context) except (InvalidBlueprintImport, DSLParsingException) as e: error_msg = 'Invalid blueprint - {}'.format(e) ctx.logger.critical(error_msg) client.blueprints.update(blueprint_id, update_dict={ 'state': BlueprintUploadState.INVALID, 'error': error_msg, 'error_traceback': traceback.format_exc() }) raise except Exception as e: error_msg = 'Failed parsing blueprint - {}'.format(e) ctx.logger.critical(error_msg) client.blueprints.update(blueprint_id, update_dict={ 'state': BlueprintUploadState.FAILED_PARSING, 'error': error_msg, 'error_traceback': traceback.format_exc() }) raise finally: remove(archive_target_path) if validate_only: ctx.logger.info('Blueprint validated.') else: ctx.logger.info('Blueprint parsed. Updating DB with blueprint plan.') # Update DB with parsed plan update_dict = { 'plan': plan, 'main_file_name': app_file_name, 'state': BlueprintUploadState.UPLOADED, } if plan.get('description'): update_dict['description'] = plan['description'] if labels: update_dict['labels'] = labels client.blueprints.update(blueprint_id, update_dict=update_dict)
def unzip(src, dest, zip_ext=None, create_own_folder=False, tree=False): """Extract all content from an archive file to a destination folder. Arguments --------- src: str Absolute path to the archive file ('/path/to/archive_filename.zip') dest: str Asolute path to extract all content to ('/path/to/extract/') Keyword Arguments ----------------- zip_ext: list Valid zip file extensions. Default: ['.zip', '.gz'] create_own_folder: bool Create a sub-folder in 'dest' with the archive file name if True ('/path/to/extract/archive_filename/'). Default: False tree: bool Extract archive files within archive files (into their own sub-directory) if True. Default: False """ zip_ext = list(zip_ext or ['.zip', '.gz']) filename, ext = os.path.splitext(os.path.basename(src)) if ext not in zip_ext: raise ValueError("Invalid archive file extension {}: {}".format( ext, src)) if not check_directory(dest, write=True, execute=True): raise OSError("Directory not found or unwritable: {}".format(dest)) if create_own_folder: # double splitext for .tar.gz fname, ext = os.path.splitext(os.path.basename(filename)) if ext == '.tar': filename = fname dest = os.path.join(dest, filename) if not os.path.isdir(dest): os.makedirs(dest) unpack_archive(src, dest, drivers=(unpack_zipfile, unpack_tarfile)) # extract flat, don't extract archive files within archive files if not tree: return def find_archive_files(skip): found = [] # find archive files in dest that are not in skip for root, _, filenames in os.walk(dest): for basename in filenames: src_file = os.path.join(root, basename) _, ext = os.path.splitext(basename) if ext in zip_ext and src_file not in skip: found.append(src_file) return found skip = [] new_files = find_archive_files(skip) # keep walking dest until no new archive files are found while new_files: # unzip (flat) new archive files found in dest for src_file in new_files: dest_path = os.path.split(src_file)[0] unzip(src_file, dest_path, zip_ext=zip_ext, create_own_folder=True, tree=False) skip.append(src_file) new_files = find_archive_files(skip)
def test_unicode_files(tarfile_with_unicode, tmpdir): target = tmpdir / 'out' archive_util.unpack_archive(tarfile_with_unicode, six.text_type(target))
def unzip(src, dest, zip_ext=None, create_own_folder=False, tree=False): """Extract all content from an archive file to a destination folder. Arguments --------- src: str Absolute path to the archive file ('/path/to/archive_filename.zip') dest: str Asolute path to extract all content to ('/path/to/extract/') Keyword Arguments ----------------- zip_ext: list Valid zip file extensions. Default: ['.zip', '.gz'] create_own_folder: bool Create a sub-folder in 'dest' with the archive file name if True ('/path/to/extract/archive_filename/'). Default: False tree: bool Extract archive files within archive files (into their own sub-directory) if True. Default: False """ zip_ext = list(zip_ext or ['.zip', '.gz']) filename, ext = os.path.splitext(os.path.basename(src)) if ext not in zip_ext: raise ValueError("Invalid archive file extension {}: {}".format(ext, src)) if not check_directory(dest, write=True, execute=True): raise OSError("Directory not found or unwritable: {}".format(dest)) if create_own_folder: # double splitext for .tar.gz fname, ext = os.path.splitext(os.path.basename(filename)) if ext == '.tar': filename = fname dest = os.path.join(dest, filename) if not os.path.isdir(dest): os.makedirs(dest) unpack_archive(src, dest, drivers=(unpack_zipfile, unpack_tarfile)) # extract flat, don't extract archive files within archive files if not tree: return def find_archive_files(skip): found = [] # find archive files in dest that are not in skip for root, _, filenames in os.walk(dest): for basename in filenames: src_file = os.path.join(root, basename) _, ext = os.path.splitext(basename) if ext in zip_ext and src_file not in skip: found.append(src_file) return found skip = [] new_files = find_archive_files(skip) # keep walking dest until no new archive files are found while new_files: # unzip (flat) new archive files found in dest for src_file in new_files: dest_path = os.path.split(src_file)[0] unzip( src_file, dest_path, zip_ext=zip_ext, create_own_folder=True, tree=False ) skip.append(src_file) new_files = find_archive_files(skip)
def test_unicode_files(tarfile_with_unicode, tmpdir): target = tmpdir / 'out' archive_util.unpack_archive(tarfile_with_unicode, str(target))
for p in m.__path__[1:]: modulefinder.AddPackagePath(extra, p) except ImportError: # no build path setup, no worries. pass # hack to include simplejson egg in the build if using_simplejson: import pkg_resources eggs = pkg_resources.require("simplejson") from setuptools.archive_util import unpack_archive for egg in eggs: if os.path.isdir(egg.location): copytree(egg.location, ".") else: unpack_archive(egg.location, ".") rmtree("EGG-INFO") # windows specific options options = { "script": app + ".py", "icon_resources": [(1, os.path.join("resources", "main.ico"))], } resources = [ 'resources', ] # horrible monkey patch to make sdl mixer include work (say what?) # http://www.python-forum.org/pythonforum/viewtopic.php?f=3&t=19455&start=0 origIsSystemDLL = py2exe.build_exe.isSystemDLL
def smart_archive(args, dist, unpackdir): # Set pkgpath, pkgfile, pkgdir, unpackpath, pkgtype. # setup_path is optional. def check_filename(fname, isfile, leading_dir, single_py, setup_path): def first_dir(path, isfile): while True: path, fname = os.path.split(path) if path == '': if isfile: return False else: return fname elif path == '/': if isfile: return '/' else: return os.path.join('/', fname) isfile = False if leading_dir is None: leading_dir = first_dir(fname, isfile) elif leading_dir is False: pass elif leading_dir != first_dir(fname, isfile): leading_dir = False if isfile and os.path.splitext(fname)[-1] == '.py': if single_py is None: single_py = fname elif single_py is False: pass else: single_py = False if isfile and os.path.basename(fname) == 'setup.py': if setup_path is None: setup_path = fname elif len(fname) < len(setup_path): setup_path = fname return leading_dir, single_py, setup_path def unpackpath_cleanup(unpackpath): if os.path.islink(unpackpath): os.unlink(unpackpath) elif os.path.exists(unpackpath): shutil.rmtree(unpackpath) args['pkgpath'] = dist.location args['pkgfile'] = os.path.basename(dist.location) leading_dir = None; single_py = None; setup_path = None if tarfile.is_tarfile(dist.location): tf = tarfile.open(dist.location, 'r:*') for tfi in tf.getmembers(): leading_dir, single_py, setup_path = \ check_filename(tfi.name, tfi.isfile(), leading_dir, single_py, setup_path) tf.close() elif zipfile.is_zipfile(dist.location): zf = zipfile.ZipFile(dist.location) for zfn in zf.namelist(): leading_dir, single_py, setup_path = \ check_filename(zfn, True, leading_dir, single_py, setup_path) zf.close() else: raise RuntimeError, 'Unrecognized archive format: %s' % dist.location if leading_dir is None or single_py is None: raise RuntimeError, 'Empty package encountered: %s' % dist.location elif leading_dir is False: if dist.version == '': args['pkgdir'] = dist.project_name else: args['pkgdir'] = '%s-%s' % (dist.project_name, dist.version) args['unpackpath'] = os.path.join(unpackdir, args['pkgdir']) unpackpath_cleanup(args['unpackpath']) unpack_archive(dist.location, args['unpackpath']) else: args['pkgdir'] = leading_dir args['unpackpath'] = os.path.join(unpackdir, args['pkgdir']) unpackpath_cleanup(args['unpackpath']) unpack_archive(dist.location, unpackdir) if setup_path is not None: if setup_path == os.path.join(leading_dir, 'setup.py'): setup_path = 'setup.py' else: setup_path = None if setup_path is not None: args['setup_path'] = setup_path args['pkgtype'] = 'setup.py' elif single_py is not False: args['pkgtype'] = 'single.py' else: raise RuntimeError, 'Unsupported archive type'
# cx_Freeze can't handle 3rd-party packages packed in .egg files, so we have to extract them for it dependency_eggs_to_unpack = [ 'uavcan', 'qtpy', 'qtconsole', ] unpacked_eggs_dir = os.path.join('build', 'hatched_eggs') sys.path.insert(0, unpacked_eggs_dir) try: shutil.rmtree(unpacked_eggs_dir) except Exception: pass for dep in dependency_eggs_to_unpack: for egg in pkg_resources.require(dep): if not os.path.isdir(egg.location): unpack_archive(egg.location, unpacked_eggs_dir) import qtawesome import qtconsole import PyQt5 import zmq import pygments import IPython import ipykernel import jupyter_client import traitlets import numpy # Oh, Windows, never change. missing_dlls = glob.glob(os.path.join(os.path.dirname(numpy.core.__file__), '*.dll')) print('Missing DLL:', missing_dlls)
for p in m.__path__[1:]: modulefinder.AddPackagePath(extra, p) except ImportError: # no build path setup, no worries. pass # hack to include simplejson egg in the build if using_simplejson: import pkg_resources eggs = pkg_resources.require("simplejson") from setuptools.archive_util import unpack_archive for egg in eggs: if os.path.isdir(egg.location): copytree(egg.location, ".") else: unpack_archive(egg.location, ".") rmtree("EGG-INFO") # windows specific options options = { "script": app + ".py", "icon_resources": [(1, os.path.join("resources", "main.ico"))], } resources = ['resources',] # horrible monkey patch to make sdl mixer include work (say what?) # http://www.python-forum.org/pythonforum/viewtopic.php?f=3&t=19455&start=0 origIsSystemDLL = py2exe.build_exe.isSystemDLL def isSystemDLL(pathname): if os.path.basename(pathname).lower() in ("libogg-0.dll", "sdl_ttf.dll"): return 0