def extract_archive(from_path, to_path): assert os.path.isfile(from_path) archive.extract(path=from_path, to_path=to_path) return to_path
def unpack(self): log.info("unpacking", self.path_archive, "to", str(self.rootdir)) archive.extract(str(self.path_archive), to_path=str(self.rootdir)) pkgname, version = verlib.guess_pkgname_and_version(self.link.basename) subdir = "%s-%s" %(pkgname, version) inpkgdir = self.rootdir.join(subdir) assert inpkgdir.check(), inpkgdir self.path_unpacked = inpkgdir
def extract_archive(self, archive_path): from archive import extract from .. import temporary_directory_context, chdir with temporary_directory_context() as tempdir: extract(archive_path, tempdir) [extracted_dir] = glob('*') with chdir(extracted_dir): yield extracted_dir
def test_extract_function(self): endianzip = pathjoin(TEST_DIR, 'files', 'endian-4.0.0.zip') extract(endianzip, self.tmpdir) filepath = pathjoin(self.tmpdir, 'endian-4.0.0', 'waf') self.assertTrue(isfile(filepath)) self.assertTrue(os.access(filepath, os.X_OK))
def download_tdesk(): global dir global icon layout = [[ sg.InputCombo(['Telegram Desktop', 'Telegram Desktop Alpha'], readonly=True) ], [sg.OK()]] window = sg.Window('Telegram Desktop version', icon=icon).Layout(layout) event, number = window.Read() version = number[0] window.Close() if version == None: return 'exit' if version == 'Telegram Desktop': if os.name == 'nt': link = 'https://telegram.org/dl/desktop/win_portable' file_name = dir + 'telegram.zip' else: link = 'https://telegram.org/dl/desktop/linux' file_name = dir + 'telegram.tar.xz' if version == 'Telegram Desktop Alpha': if os.name == 'nt': link = 'https://telegram.org/dl/desktop/win_portable?beta=1' file_name = dir + 'telegram.zip' else: link = 'https://telegram.org/dl/desktop/linux?beta=1' file_name = dir + 'telegram.tar.xz' layout = [[sg.Text('Downloading Telegram Desktop...')], [ sg.ProgressBar(100, orientation='h', size=(20, 20), key='progressbar') ]] window = sg.Window('Downloading Telegram Desktop...', icon=icon).Layout(layout) progress_bar = window.FindElement('progressbar') event, values = window.Read(timeout=0) with open(file_name, 'wb') as f: response = requests.get(link, stream=True) total_length = response.headers.get('content-length') if total_length is None: f.write(response.content) else: dl = 0 total_length = int(total_length) for data in response.iter_content(chunk_size=4096): dl += len(data) f.write(data) percentage = int(100 * dl / total_length) progress_bar.UpdateBar(percentage) event, values = window.Read(timeout=0) extract(file_name, dir + 'bin/', method='insecure') os.remove(file_name) window.Close()
def create(self): """POST /photos: Create a new item""" f = self.request.params.get("files[]", None) if not isinstance(f, cgi.FieldStorage): return HTTPBadRequest() done = [] settings = self.request.registry.settings # extract to a tmpdir that we should delete immediately # after import is done. tmpdir = mkdtemp(dir=settings["upload_dir"]) try: try: fn = f.filename extract(f.file, tmpdir, safe=True, filename=fn) log.debug("file '%s' has been correctly extracted" % fn) except UnrecognizedArchiveFormat as e: # seems to be a single file, save it try: fdst = open(os.path.join(tmpdir, os.path.basename(fn)), "wb") shutil.copyfileobj(f.file, fdst) log.debug("file '%s' has been correctly copied" % fn) finally: if fdst: fdst.close() # walk in import directory to import all image files for dirpath, dirs, files in os.walk(tmpdir, topdown=False): for filename in files: abspath = os.path.join(dirpath, filename) log.debug("Importing image: %s" % abspath) try: info = self._import(abspath) result = {"name": f.filename, "size": get_size(f.file), "delete_type": "DELETE"} uri = None if isinstance(info, Photo): uri = info.uri _ = self.request.translate result["error"] = _("File already exists on server") else: uri = info["uri"] result["url"] = (self.request.static_path(settings["photos_dir"] + "/orig/" + uri),) result["thumbnail_url"] = (self.request.static_path(settings["photos_dir"] + "/scaled/" + uri),) result["delete_url"] = (self.request.route_path("photos_delete", _query=[("uri", uri)]),) done.append(result) except Exception as e: # TODO: log error in session (flash message) log.exception("Error while importing image, skip" "file: %s" % abspath) except Exception, e: # TODO: log error in session (flash message) raise e
def extract_to_temp( file: FileStorage, ignore_filter: IgnoreFilterManager, handle_ignore: IgnoreHandling = IgnoreHandling.keep) -> str: """Extracts the contents of file into a temporary directory. :param file: The archive to extract. :param ignore_filter: The files and directories that should be ignored. :param handle_ignore: Determines how ignored files should be handled. :returns: The pathname of the new temporary directory. """ tmpfd, tmparchive = tempfile.mkstemp() try: os.remove(tmparchive) tmparchive += os.path.basename( secure_filename('archive_' + file.filename)) tmpdir = tempfile.mkdtemp() file.save(tmparchive) if handle_ignore == IgnoreHandling.error: arch = archive.Archive(tmparchive) wrong_files = ignore_filter.get_ignored_files_in_archive(arch) if wrong_files: raise IgnoredFilesException(invalid_files=wrong_files) arch.extract(to_path=tmpdir, method='safe') else: archive.extract(tmparchive, to_path=tmpdir, method='safe') if handle_ignore == IgnoreHandling.delete: ignore_filter.delete_from_dir(tmpdir) except (tarfile.ReadError, zipfile.BadZipFile): raise APIException( 'The given archive could not be extracted', "The given archive doesn't seem to be an archive", APICodes.INVALID_ARCHIVE, 400, ) except (InvalidFile, archive.UnsafeArchive) as e: raise APIException( 'The given archive contains invalid files', str(e), APICodes.INVALID_FILE_IN_ARCHIVE, 400, ) finally: os.close(tmpfd) os.remove(tmparchive) return tmpdir
def extract(archive_filename, output_dirname, sha1=None, verbose=True): """Extracts `archive_filename` in `output_dirname`. Supported archives: ------------------- * Zip formats and equivalents: .zip, .egg, .jar * Tar and compressed tar formats: .tar, .tar.gz, .tgz, .tar.bz2, .tz2 """ if verbose: print "Extracting '%s' to '%s'" % (archive_filename, output_dirname) if sha1 is not None: if verbose: print " SHA-1 verification..." verify_sha1(archive_filename, sha1) archive.extract(archive_filename, output_dirname, verbose=verbose)
def extract(archive_filename, output_dirname, md5sum=None): """This function extracts 'archive_filename' into 'output_dirname'. It is addapted from is adapted from https://github.com/ivanov/scikits.data/blob/master/datasets/utils/download_and_extract.py, under the BSD 3 clause license. Supported archives: ------------------- * Zip formats and equivalents: .zip, .egg, .jar * Tar and compressed tar formats: .tar, .tar.gz, .tgz, .tar.bz2, .tz2 """ import archive if md5sum is not None: if verbose: print(" SHA-1 verification...") verify_md5sum(archive_filename, md5sum) archive.extract(archive_filename, output_dirname)
def update(args): archive_cfg = read_subcmd_config('archive') rename_cfg = read_subcmd_config('rename') args.archive_dir = args.archive_dir or archive_cfg['archive_dir'] if not os.path.isdir(args.archive_dir): raise RuntimeError("archive-dir {} is not a directory".format( args.archive_dir)) files = filter(tarfile.is_tarfile, collect_files(args.archive_dir)) latest = natsorted(files, reverse=True)[0] args.archive = [latest] args.extract_dir = None archive.extract(args) rename.rename(args) args.path = [rename_cfg['out_dir']] archive.archive(args)
def import_course_from_file(filename, repo_id, user_id): """ Import OLX from .zip or tar.gz. Imports from a file and deletes the file. Args: filename (unicode): Path to archive file (zip or .tar.gz) repo_id (int): Primary key of repository course belongs to user_id (int): Primary key of user importing the course Raises: ValueError: Unable to extract or read archive contents. Returns: None A valid OLX archive has a single occurrence of the file course.xml in its root directory, or no course.xml in its root and a single occurrence of course.xml in one or more of the root directory's children. """ tempdir = mkdtemp() try: extract(path=filename, to_path=tempdir, method="safe") except ArchiveException as ex: log.debug("failed to extract: %s", ex) remove(filename) raise ValueError("Invalid OLX archive, unable to extract.") course_imported = False if "course.xml" in listdir(tempdir): import_course_from_path(tempdir, repo_id, user_id) course_imported = True else: for path in listdir(tempdir): if exists(join(tempdir, path, 'course.xml')): import_course_from_path(join(tempdir, path), repo_id, user_id) course_imported = True rmtree(tempdir) remove(filename) if course_imported is False: raise ValueError("Invalid OLX archive, no courses found.")
def handle_bundle_upload(bundle_id): bundle = Bundle.objects.get(pk=bundle_id) file = bundle.get_temp_path() # Figure out the most likely mime-type mime_type = magic.from_file(file, mime=True) extension = mimetypes.guess_extension(mime_type) if extension in archive_extensions: new_path = file + extension # Save the new file name (needed for the BundleVersion) new_file_name = os.path.basename(new_path) # Treat it as an archive. Rename it to that, then extract os.rename(file, new_path) try: # Extract it to a directory, same path as the filename archive.extract(new_path, to_path=file) # Now go through the extracted files, make BundleFiles from them process_files_in_dir(bundle, file, None) except archive.ArchiveException: pass elif mime_type.startswith('text/'): # Should be a plain text file - create a CodeFile for it bundle_file = BundleFile(bundle=bundle, name=bundle.file_name, full_path=bundle.file_name, file_size=os.path.getsize(file), version=bundle.latest_version) bundle_file.save_file_contents(open(file, 'rt'), original_filename=bundle.file_name) new_file_name = os.path.basename(file) # Create the new BundleVersion BundleVersion.objects.create(bundle=bundle, file_name=new_file_name, version=bundle.latest_version) bundle.done_uploading = True bundle.save()
def enable_local_cached_repository(configuration, filepath): """ Enables a local cached repository from an archive. Parameters configuration a valid Craft Configuration object. filepath absolute filesystem path of the repository's archive Raises EnableError if it was not possible to enable the local cached repository. """ db = configuration.db() try: mkdir(db + 'available') except OSError: pass if not archive.extract(filepath, db + 'available'): raise EnableError(filepath)
def enable_local_cached_repository(configuration, filepath): """ Enables a local cached repository from an archive. Parameters configuration a valid Craft Configuration object. filepath absolute filesystem path of the repository's archive Raises EnableError if it was not possible to enable the local cached repository. """ db = configuration.db() try: mkdir(db+'available') except OSError: pass if not archive.extract(filepath, db+'available'): raise EnableError(filepath)
def file2struct(fileName, path, table_id, plugin_lookout_fields): fileExt = fileName.split('.')[-1] filePath = os.path.join(path, fileName) if fileExt in ('xls', 'xlsx', ): xlsx = load_workbook(filePath) first_sheet_name = xlsx.get_sheet_names()[0] sheet = xlsx.get_sheet_by_name(first_sheet_name) header = sheet.rows[0] for idx, cell in enumerate(header): values = [i.value for i in sheet.columns[idx]][1:] field_type = guess_type(values) ret = plugin_lookout_fields.validate_and_insert( table_id = table_id, field_name = cell.value.lower().replace(' ', ''), field_comment = cell.value, field_type = field_type ) if ret.get('error'): plugin_lookout_fields.insert( table_id = table_id, field_name = 'field_%s' % idx, field_label = cell.value, field_comment = str(ret.error), field_type = field_type ) elif fileExt in ('zip', 'egg', 'jar', 'tar', 'gz', 'tgz', 'bz2', 'tz2', ): # uncompress the archive in new_dir new_dir = '.'.join(fileName.split('.')[:-1]) new_path = os.path.join(path, new_dir) try: os.mkdir(new_path) except OSError, error: pass else: if fileExt in ('gz', 'bz2', ): filePath_new = '.'.join(filePath.split('.')[:-1] + ['tar', fileExt]) os.rename(filePath, filePath_new) filePath = filePath_new extract(filePath, new_path) main_shp = [i for i in os.listdir(new_path) if i.split('.')[-1]=='shp'][0] shp_path = os.path.join(new_path, main_shp) driver = ogr.GetDriverByName('ESRI Shapefile') source = driver.Open(shp_path, 0) layer = source.GetLayer() # inspect field names and types ESRITypes = dict(String='string', Real='double', Date='date') layer_defn = layer.GetLayerDefn() layer_infos = [(layer_defn.GetFieldDefn(i).GetName(), ESRITypes[layer_defn.GetFieldDefn(i).GetTypeName()]) for i in xrange(layer_defn.GetFieldCount())] # setup geometry field ret = plugin_lookout_fields.validate_and_insert( table_id = table_id, field_name = 'the_geom', field_label = 'Geometric feature', field_type = 'geometry' ) if ret.errors: raise Exception(str(ret.errors)) # setup attributes fields for field_name, field_type in layer_infos: ret = plugin_lookout_fields.validate_and_insert( table_id = table_id, field_name = field_name.lower(), field_label = field_name, field_type = field_type ) if ret.errors: raise Exception(str(ret.errors))
def initFromFile(fileName, path, table_id, db, ext_table): filePath = os.path.join(path, fileName) fileExt = fileName.split('.')[-1] if fileExt in ('xls', 'xlsx', ): xlsx = load_workbook(filePath) first_sheet_name = xlsx.get_sheet_names()[0] sheet = xlsx.get_sheet_by_name(first_sheet_name) if ext_table._db(ext_table).count(): ext_table.drop() error = None for index,row in enumerate(sheet.rows[1:]): values = [cell.value for cell in row] fields = [r.field_name for r in db(db.plugin_lookout_fields.table_id==table_id).select(db.plugin_lookout_fields.field_name)] kwargs = dict([(k,v) for k,v in zip(fields, values)]) ret = ext_table.validate_and_insert(**kwargs) if ret.errors: db.rollback() error = dict([(k, (kwargs[k], ret.errors[k])) for k in ret.errors]) raise Exception(str(error)) elif fileExt in ('zip', 'egg', 'jar', 'tar', 'gz', 'tgz', 'bz2', 'tz2', ): # uncompress the archive in new_dir new_dir = '.'.join(fileName.split('.')[:-1]) new_path = os.path.join(path, new_dir) try: os.mkdir(new_path) except OSError, error: pass else: if fileExt in ('gz', 'bz2', ): filePath_new = '.'.join(filePath.split('.')[:-1] + ['tar', fileExt]) os.rename(filePath, filePath_new) filePath = filePath_new extract(filePath, new_path) main_shp = [i for i in os.listdir(new_path) if i.split('.')[-1]=='shp'][0] shp_path = os.path.join(new_path, main_shp) driver = ogr.GetDriverByName('ESRI Shapefile') source = driver.Open(shp_path, 0) layer = source.GetLayer() # inspect field names and types ESRITypes = dict(String='string', Real='double', Date='date') layer_defn = layer.GetLayerDefn() layer_infos = [(layer_defn.GetFieldDefn(i).GetName(), ESRITypes[layer_defn.GetFieldDefn(i).GetTypeName()]) for i in xrange(layer_defn.GetFieldCount())] for index in xrange(layer.GetFeatureCount()): feature = layer.GetFeature(index) kwargs = dict([(fn[0].lower(), feature.GetField(fn[0])) for fn in layer_infos if feature.GetField(fn[0]) not in (None, '', '0000/00/00', )]) if not hasattr(ext_table['the_geom'], 'st_asgeojson'): kwargs['the_geom'] = feature.GetGeometryRef().ExportToWkt() # tested with postgis and web2py 1.99.7 else: kwargs['the_geom'] = feature.GetGeometryRef().ExportToWkb() # to be tested with web2py trunk with gis support ret = ext_table.validate_and_insert(**kwargs) if ret.errors: error = dict([(k, (kwargs[k], ret.errors[k])) for k in ret.errors]) raise IOError(str(error))
def wix_context(self): from archive import extract with utils.temporary_directory_context() as tempdir: wix_archive = self.get_wix35_binaries_zip_from_the_internet() extract(wix_archive) yield tempdir
def _install(configuration, installed, package, filepath): """ Performs a low-level package installation. Parameters configuration a valid Craft Configuration object. installed Set having all currently installed units on the system. package the Package unit to be installed. filepath absolute filesystem path of the package's archive to be installed. Raises InstallError if any error occurs during the installation. OSError if, in case an operation has failed, it is not possible to cleanly recover from it. Returns True if the installation was successfully completed. """ architecture = package.architecture name = package.name version = package.version db = configuration.db() package_directory = db+'installed/'+name+'/'+version+'/'+architecture craft_directories = [ db+'installed/', db+'installed/'+name, db+'installed/'+name+'/'+version ] for each in craft_directories: try: mkdir(each) except OSError: pass if package in installed: message.warning("'{0}' is already installed. Aborting...".format(package)) raise InstallError(package) try: mkdir(package_directory) except OSError: message.warning("failed to create internal directory while installing '{0}'. Aborting...".format(package)) raise InstallError(package) try: chdir(package_directory) except OSError: message.warning("could not access the directory belonging to package '{0}'. Aborting...".format(package)) raise InstallError(package) sha1 = package.has_checksum('sha1') if sha1: if not filepath: message.warning("missing archive filepath for package '{0}'. Aborting...".format(package)) raise InstallError(package) if not checksum.sha1(filepath, sha1): message.warning("inconsistent archive provided for package '{0}'. Aborting...".format(package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) package_files = archive.getfiles(filepath) if not package_files: message.warning("empty archive provided for package '{0}'. Aborting...".format(package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) try: package_files_dump_handle = open('files', 'w') except IOError: message.warning("could not write the file list for package '{0}'. Aborting...".format(package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) else: for each in package_files: package_files_dump_handle.write(each+'\n') package_files_dump_handle.close() if not archive.extract(filepath, configuration.root()): message.warning("could not extract the archive provided for package '{0}'. Aborting...".format(package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) try: if not dump.package(package, 'metadata.yml'): message.warning("failed to write metadata.yml for package '{0}'. Aborting...".format(package)) raise InstallError(package) except IOError: raise installed.add(package) return True
def test_extract_function(self): extract(self.archive_path, self.tmpdir) self.check_files(self.tmpdir)
def test_extract_function_fileobject(self): f = open(self.archive_path, 'rb') extract(f, self.tmpdir, ext=self.ext) self.check_files(self.tmpdir)
def test_extract_function(self): extract(self.archive_path, self.tmpdir, ext=self.ext) self.check_files(self.tmpdir)
def test_extract_function_no_to_path(self): os.chdir(self.tmpdir) extract(self.archive_path, ext=self.ext) self.check_files(self.tmpdir)
def _install(configuration, installed, package, filepath): """ Performs a low-level package installation. Parameters configuration a valid Craft Configuration object. installed Set having all currently installed units on the system. package the Package unit to be installed. filepath absolute filesystem path of the package's archive to be installed. Raises InstallError if any error occurs during the installation. OSError if, in case an operation has failed, it is not possible to cleanly recover from it. Returns True if the installation was successfully completed. """ architecture = package.architecture name = package.name version = package.version db = configuration.db() package_directory = db + 'installed/' + name + '/' + version + '/' + architecture craft_directories = [ db + 'installed/', db + 'installed/' + name, db + 'installed/' + name + '/' + version ] for each in craft_directories: try: mkdir(each) except OSError: pass if package in installed: message.warning( "'{0}' is already installed. Aborting...".format(package)) raise InstallError(package) try: mkdir(package_directory) except OSError: message.warning( "failed to create internal directory while installing '{0}'. Aborting..." .format(package)) raise InstallError(package) try: chdir(package_directory) except OSError: message.warning( "could not access the directory belonging to package '{0}'. Aborting..." .format(package)) raise InstallError(package) sha1 = package.has_checksum('sha1') if sha1: if not filepath: message.warning( "missing archive filepath for package '{0}'. Aborting...". format(package)) raise InstallError(package) if not checksum.sha1(filepath, sha1): message.warning( "inconsistent archive provided for package '{0}'. Aborting...". format(package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) package_files = archive.getfiles(filepath) if not package_files: message.warning( "empty archive provided for package '{0}'. Aborting...".format( package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) try: package_files_dump_handle = open('files', 'w') except IOError: message.warning( "could not write the file list for package '{0}'. Aborting...". format(package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) else: for each in package_files: package_files_dump_handle.write(each + '\n') package_files_dump_handle.close() if not archive.extract(filepath, configuration.root()): message.warning( "could not extract the archive provided for package '{0}'. Aborting..." .format(package)) try: rmtree(package_directory) except OSError: raise raise InstallError(package) try: if not dump.package(package, 'metadata.yml'): message.warning( "failed to write metadata.yml for package '{0}'. Aborting...". format(package)) raise InstallError(package) except IOError: raise installed.add(package) return True
def test_extract_function_no_to_path(self): os.chdir(self.tmpdir) extract(self.archive_path) self.check_files(self.tmpdir)
def test_extract_function_fileobject(self): f = open(self.archive_path) extract(f, self.tmpdir) self.check_files(self.tmpdir)
def install(self): """Installer""" logger = logging.getLogger(self.name) options = self.options parts = self.buildout['buildout']['parts-directory'] name = 'buildout-node' node_dir = os.path.join(parts, self.name) if not os.path.isdir(node_dir): os.makedirs(node_dir) node_binary = self.get_binary(options) if node_binary is None: args = {} if 'url' not in options: args = dict( v=self.get_version(options), a='x86_64' in os.uname() and 'x64' or 'x86', ) if sys.platform.startswith('linux'): args['p'] = 'linux' elif sys.platform == 'darwin': args['p'] = 'darwin' if 'p' in args: options['url'] = url = self.binary_format.format(**args) logger.info('Using binary distribution at %s', url) from zc.buildout.download import Download from archive import extract # Use the buildout download infrastructure manager = Download(options=self.buildout['buildout']) # The buildout download utility expects us to know whether or # not we have a download cache, which causes fun errors. This # is probably a bug, but this test should be safe regardless. if manager.download_cache: filename = manager.download_cached(url)[0] else: filename = manager.download(url)[0] destination = self.get_node_directory(options) # Finally, extract the archive. The binary distribution urls # are defined in this file, so we can safely assume they're # gzipped tarballs. This prevents an error when downloaded # into a temporary file. extract(filename, destination, ext=".tar.gz") else: if 'url' not in options: options['url'] = url = self.source_format.format(**args) logger.info('Using source distribution at %s', options['url']) import zc.recipe.cmmi options['environment'] = ( 'PYTHONPATH=tools:deps/v8/tools:../../deps/v8/tools' ) node = zc.recipe.cmmi.Recipe( self.buildout, name, options) node.install() node_binary = self.get_binary(options) node_bin = os.path.dirname(node_binary) npms = options.get('npms', '') if npms: npms = ' '.join([npm.strip() for npm in npms.split() if npm.strip()]) cmd = ( 'export HOME=%(node_dir)s;' 'export PATH=%(node_bin)s:$PATH;' 'echo "prefix=$HOME\n" > $HOME/.npmrc;' 'echo "cache=%(cache)s\n" >> $HOME/.npmrc;' '%(node_bin)s/npm set color false;' '%(node_bin)s/npm set unicode false;' '%(node_bin)s/npm install -g %(npms)s') % { 'node_dir': shell_quote(node_dir), 'node_bin': shell_quote(node_bin), 'cache': os.path.expanduser('~/.npm'), 'npms': npms} p = subprocess.Popen(cmd, shell=True) p.wait() return self.install_scripts()
def extract_firmware(): print("I: Extracting firmware") if not os.path.isdir(firmware_target_folder): os.mkdir("firmware_" + firmware["product"] + "_" + firmware["version"]) extract(firmware_target_name, firmware_target_folder)
def install(self): """Installer""" logger = logging.getLogger(self.name) options = self.options parts = self.buildout['buildout']['parts-directory'] name = 'buildout-node' node_dir = os.path.join(parts, self.name) if not os.path.isdir(node_dir): os.makedirs(node_dir) node_binary = self.get_binary(options) if node_binary is None: args = {} if 'url' not in options: args = dict( v=self.get_version(options), a='x86_64' in os.uname() and 'x64' or 'x86', ) if sys.platform.startswith('linux'): args['p'] = 'linux' elif sys.platform == 'darwin': args['p'] = 'darwin' if 'p' in args: binary_url = options.get('binary-url', self.binary_format) options['url'] = url = binary_url.format(**args) logger.info('Using binary distribution at %s', url) from zc.buildout.download import Download from archive import extract # Use the buildout download infrastructure manager = Download(options=self.buildout['buildout'], offline=self.buildout['buildout'].get('offline') == 'true') # The buildout download utility expects us to know whether or # not we have a download cache, which causes fun errors. This # is probably a bug, but this test should be safe regardless. if manager.download_cache: filename = manager.download_cached(url)[0] else: filename = manager.download(url)[0] destination = self.get_node_directory(options) # Finally, extract the archive. The binary distribution urls # are defined in this file, so we can safely assume they're # gzipped tarballs. This prevents an error when downloaded # into a temporary file. extract(filename, destination, ext=".tar.gz") else: if 'url' not in options: options['url'] = url = self.source_format.format(**args) logger.info('Using source distribution at %s', options['url']) import zc.recipe.cmmi options['environment'] = ( 'PYTHONPATH=tools:deps/v8/tools:../../deps/v8/tools' ) node = zc.recipe.cmmi.Recipe( self.buildout, name, options) node.install() node_binary = self.get_binary(options) node_bin = os.path.dirname(node_binary) npms = options.get('npms', '') if npms: npms = ' '.join([npm.strip() for npm in npms.split() if npm.strip()]) cmd_data = {'node_dir': shell_quote(node_dir), 'node_bin': shell_quote(node_bin), 'cache': os.path.expanduser('~/.npm'), 'npms': npms} cmd_prefix = ( 'export HOME=%(node_dir)s;' 'export PATH=%(node_bin)s:"$PATH";' 'echo "prefix=$HOME" > $HOME/.npmrc;' 'echo "cache=%(cache)s" >> $HOME/.npmrc;' '%(node_bin)s/npm set color false;' '%(node_bin)s/npm set unicode false;') % cmd_data if self.buildout['buildout'].get('offline') == 'true': cmd = cmd_prefix + \ '%(node_bin)s/npm ls %(npms)s --global --json' % cmd_data import zc.buildout try: output = subprocess.check_output(cmd, shell=True) output_json = json.loads(output) installed_npms = output_json.get('dependencies') # if npm reports a discrepancy, error out if not installed_npms or \ len(installed_npms) != len(npms.split()): raise zc.buildout.UserError( "Couldn't install %r npms in offline mode" % npms) logger.debug('Using existing npm install for %r' % npms) except subprocess.CalledProcessError: # npm fails if install has not yet happened raise zc.buildout.UserError( "Couldn't install %r npms in offline mode" % npms) else: cmd = cmd_prefix + \ '%(node_bin)s/npm install -g %(npms)s' % cmd_data p = subprocess.Popen(cmd, shell=True) p.wait() return self.install_scripts()
def install(self): """Installer""" logger = logging.getLogger(self.name) options = self.options parts = self.buildout['buildout']['parts-directory'] name = 'buildout-node' node_dir = os.path.join(parts, self.name) if not os.path.isdir(node_dir): os.makedirs(node_dir) node_binary = self.get_binary(options) if node_binary is None: args = {} if 'url' not in options: args = dict( v=self.get_version(options), a='x86_64' in os.uname() and 'x64' or 'x86', ) if sys.platform.startswith('linux'): args['p'] = 'linux' elif sys.platform == 'darwin': args['p'] = 'darwin' if 'p' in args: options['url'] = url = self.binary_format.format(**args) logger.info('Using binary distribution at %s', url) from zc.buildout.download import Download from archive import extract # Use the buildout download infrastructure manager = Download(options=self.buildout['buildout']) # The buildout download utility expects us to know whether or # not we have a download cache, which causes fun errors. This # is probably a bug, but this test should be safe regardless. if manager.download_cache: filename = manager.download_cached(url)[0] else: filename = manager.download(url)[0] destination = os.path.join( self.buildout['buildout']['parts-directory'], name) # Finally, extract the archive. The binary distribution urls # are defined in this file, so we can safely assume they're # gzipped tarballs. This prevents an error when downloaded # into a temporary file. extract(filename,destination,ext=".tar.gz") else: if 'url' not in options: options['url'] = url = self.source_format.format(**args) logger.info('Using source distribution at %s', options['url']) import hexagonit.recipe.cmmi options['environment'] = ( 'PYTHONPATH=tools:deps/v8/tools:../../deps/v8/tools' ) node = hexagonit.recipe.cmmi.Recipe(self.buildout, name, options) node.install() node_binary = self.get_binary(options) node_bin = os.path.dirname(node_binary) scripts = options.get('scripts', '').split() scripts = [script.strip() for script in scripts if script.strip()] npms = options.get('npms', '') if npms: npms = ' '.join([npm.strip() for npm in npms.split() if npm.strip()]) p = subprocess.Popen(( 'export HOME=%(node_dir)s;' 'export PATH=%(node_bin)s:$PATH;' 'echo "prefix=$HOME\n" > $HOME/.npmrc;' '%(node_bin)s/npm set color false;' '%(node_bin)s/npm set unicode false;' '%(node_bin)s/npm install -sg %(npms)s') % {'node_dir':shell_quote(node_dir), 'node_bin':shell_quote(node_bin), 'npms':npms}, shell=True) p.wait() for script in scripts: if script in ['node']: continue filename = os.path.join(node_bin, script) if os.path.isfile(filename): fd = open(filename) data = fd.read() fd.close() fd = open(filename, 'w') fd.seek(0) data = data.split('\n') data[0] = '#!%s' % node_binary fd.write('\n'.join(data)) fd.close() for script in ('node', 'npm'): if script not in scripts: scripts.append(script) node_path = options.get('node-path', '').split() node_path.insert(0, os.path.join(node_dir, 'lib', 'node_modules')) node_path = ':'.join(node_path) options['initialization'] = ( 'import os;\nos.environ["NODE_PATH"] = %r' % node_path ) paths = [os.path.join(node_dir, 'bin'), node_bin] all_scripts = [] for p in paths: if os.path.isdir(p): all_scripts.extend(os.listdir(p)) typos = [] for script in scripts: if script not in all_scripts: typos.append(script) if typos: import zc.buildout typos = ', '.join([repr(s) for s in typos]) all_scripts = [repr(s) for s in all_scripts] all_scripts = ', '.join(sorted(all_scripts)) raise zc.buildout.UserError(( 'Script(s) {0} not found in {1[0]};{1[1]}.\n' 'You may have a typo in your buildout config.\n' 'Available scripts are: {2}' ).format(typos, paths, all_scripts)) options['eggs'] = 'gp.recipe.node' options['arguments'] = '%r, (%r, %r), sys.argv[0]' % ( node_binary, os.path.join(node_dir, 'bin'), node_bin, ) options['scripts'] = '\n'.join(scripts) options['entry-points'] = '\n'.join([ '%s=gp.recipe.node.script:main' % s for s in scripts ]) from zc.recipe.egg import Scripts rscripts = Scripts(self.buildout, self.name, options) return rscripts.install()
def test_extract_function_filename(self): extract(self.archive_path, self.tmpdir, filename=self.filename) self.check_files(self.tmpdir)