Esempio n. 1
0
def unpack_to_tmp(url, archive_file, mime_type):
	"""Creates a temporary directory and unpacks the archive to it in "unpacked".
	Permissions are correct for importing into the cache.
	Returns the tmpdir."""
	if not mime_type:
		mime_type = unpack.type_from_url(url)
		assert mime_type, "Can't guess MIME type from {url}".format(url = url)

	tmpdir = tempfile.mkdtemp('-0template')
	try:
		# Must be readable to helper process running as 'zeroinst'...
		old_umask = os.umask(0o022)
		try:
			unpack_dir = os.path.join(tmpdir, 'unpacked')
			os.mkdir(unpack_dir)

			with open(archive_file, 'rb') as stream:
				unpack.unpack_archive(url, stream, unpack_dir,
						      type = mime_type, start_offset = 0)
				manifest.fixup_permissions(unpack_dir)
		finally:
			os.umask(old_umask)
	except:
		support.ro_rmtree(tmpdir)
		raise
	return tmpdir
Esempio n. 2
0
    def add_dir_to_cache(self, required_digest, path, try_helper=False):
        """Copy the contents of path to the cache.
		@param required_digest: the expected digest
		@type required_digest: str
		@param path: the root of the tree to copy
		@type path: str
		@param try_helper: attempt to use privileged helper before user cache (since 0.26)
		@type try_helper: bool
		@raise BadDigest: if the contents don't match the given digest."""
        if self.lookup(required_digest):
            logger.info(_("Not adding %s as it already exists!"),
                        required_digest)
            return

        tmp = self.get_tmp_dir_for(required_digest)
        try:
            _copytree2(path, tmp)
            self.check_manifest_and_rename(required_digest,
                                           tmp,
                                           try_helper=try_helper)
        except:
            logger.warn(_("Error importing directory."))
            logger.warn(_("Deleting %s"), tmp)
            support.ro_rmtree(tmp)
            raise
Esempio n. 3
0
	def testCopy(self):
		sha1 = manifest.get_algorithm('sha1')
		sha1new = manifest.get_algorithm('sha1new')
		source = os.path.join(self.tmp, 'badname')
		os.mkdir(source)

		self.populate_sample(source)

		lines = list(sha1new.generate_manifest(source))
		self.assertEqual(['F f7ff9e8b7bb2e09b70935a5d785e0cc5d9d0abf0 2 5 MyFile',
				   'S 570b0ce957ab43e774c82fca0ea3873fc452278b 19 a symlink',
				   'D /My Dir',
				   'F 0236ef92e1e37c57f0eb161e7e2f8b6a8face705 2 10 !a file!',
				   'X b4ab02f2c791596a980fd35f51f5d92ee0b4705c 2 10 !a file!.exe'],
				lines)
		digest = sha1.getID(manifest.add_manifest_file(source, sha1))

		copy = tempfile.mktemp()
		os.mkdir(copy)
		try:
			# Source must be in the form alg=value
			try:
				cli.do_copy([source, copy])
				assert 0
			except BadDigest as ex:
				assert 'badname' in str(ex)
			source, badname = os.path.join(self.tmp, digest), source
			os.chmod(badname, 0o755)		# can't rename RO directories on MacOS X
			os.rename(badname, source)
			os.chmod(source, 0o555)

			# Can't copy sha1 implementations (unsafe)
			try:
				cli.do_copy([source, copy])
			except SafeException as ex:
				assert 'sha1' in str(ex)

			# Already have a .manifest
			try:
				manifest.add_manifest_file(source, sha1new)
				assert 0
			except SafeException as ex:
				assert '.manifest' in str(ex)

			os.chmod(source, 0o700)
			os.unlink(os.path.join(source, '.manifest'))

			# Switch to sha1new
			digest = sha1new.getID(manifest.add_manifest_file(source, sha1new))
			source, badname = os.path.join(self.tmp, digest), source
			os.chmod(badname, 0o755)
			os.rename(badname, source)
			os.chmod(source, 0o555)

			cli.do_copy([source, copy])

			with open(os.path.join(copy, digest, 'MyFile'), 'rt') as stream:
				self.assertEqual('Hello', stream.read())
		finally:
			support.ro_rmtree(copy)
Esempio n. 4
0
	def tearDown(self):
		shutil.rmtree(self.config_home)
		support.ro_rmtree(self.cache_home)
		shutil.rmtree(self.cache_system)
		shutil.rmtree(self.gnupg_home)

		os.environ['PATH'] = self.old_path
Esempio n. 5
0
	def tearDown(self):
		BaseTest.tearDown(self)

		support.ro_rmtree(self.store_parent)
		support.ro_rmtree(self.tmp)

		cli.stores = None
Esempio n. 6
0
	def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0, try_helper = False, dry_run = False):
		"""@type required_digest: str
		@type data: file
		@type url: str
		@type extract: str | None
		@type type: str | None
		@type start_offset: int
		@type try_helper: bool
		@type dry_run: bool"""
		from . import unpack

		if self.lookup(required_digest):
			logger.info(_("Not adding %s as it already exists!"), required_digest)
			return

		tmp = self.get_tmp_dir_for(required_digest)
		try:
			unpack.unpack_archive(url, data, tmp, extract, type = type, start_offset = start_offset)
		except:
			import shutil
			shutil.rmtree(tmp)
			raise

		try:
			self.check_manifest_and_rename(required_digest, tmp, extract, try_helper = try_helper, dry_run = dry_run)
		except Exception:
			#warn(_("Leaving extracted directory as %s"), tmp)
			support.ro_rmtree(tmp)
			raise
Esempio n. 7
0
    def setUp(self):
        www.HTTP_ROOT = '<HTTP_ROOT>'
        www.REPO_ROOT = '<REPO_ROOT>'

        for name, path in _ENVS.items():
            os.environ[name] = os.path.join(_ROOT, path)
        if os.path.exists(os.environ['XDG_CACHE_HOME']):
            support.ro_rmtree(os.environ['XDG_CACHE_HOME'])
        shutil.rmtree(_ROOT, ignore_errors=True)
        for name, path in _ENVS.items():
            os.makedirs(os.environ[name], mode=0700)

        reload(basedir)
        assert basedir.xdg_config_home == os.environ['XDG_CONFIG_HOME']
        iface_cache.iface_cache.__init__()
        download._downloads = {}

        logger = logging.getLogger()
        for i in logger.handlers:
            logger.removeHandler(i)
        logging.basicConfig(filename=os.path.join(_ROOT, 'debug.log'),
                            level=logging.DEBUG)

        self.logger = logging.getLogger('test')

        policy = Policy('')
        policy.network_use = model.network_full
        policy.freshness = 60
        policy.save_config()

        self._child = None
Esempio n. 8
0
    def tearDown(self):
        BaseTest.tearDown(self)

        support.ro_rmtree(self.store_parent)
        support.ro_rmtree(self.tmp)

        cli.stores = None
Esempio n. 9
0
	def check_manifest_and_rename(self, required_digest, tmp, extract = None, try_helper = False, dry_run = False):
		"""Check that tmp[/extract] has the required_digest.
		On success, rename the checked directory to the digest, and
		make the whole tree read-only.
		@type required_digest: str
		@type tmp: str
		@type extract: str | None
		@param try_helper: attempt to use privileged helper to import to system cache first (since 0.26)
		@type try_helper: bool
		@param dry_run: just print what we would do to stdout (and delete tmp)
		@type dry_run: bool
		@raise BadDigest: if the input directory doesn't match the given digest"""
		if extract:
			extracted = os.path.join(tmp, extract)
			if not os.path.isdir(extracted):
				raise Exception(_('Directory %s not found in archive') % extract)
		else:
			extracted = tmp

		from . import manifest

		manifest.fixup_permissions(extracted)

		alg, required_value = manifest.splitID(required_digest)
		actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg))
		if actual_digest != required_digest:
			raise BadDigest(_('Incorrect manifest -- archive is corrupted.\n'
					'Required digest: %(required_digest)s\n'
					'Actual digest: %(actual_digest)s\n') %
					{'required_digest': required_digest, 'actual_digest': actual_digest})

		if try_helper:
			if self._add_with_helper(required_digest, extracted, dry_run = dry_run):
				support.ro_rmtree(tmp)
				return
			logger.info(_("Can't add to system store. Trying user store instead."))

		logger.info(_("Caching new implementation (digest %s) in %s"), required_digest, self.dir)

		final_name = os.path.join(self.dir, required_digest)
		if os.path.isdir(final_name):
			logger.warning(_("Item %s already stored.") % final_name) # not really an error
			return

		if dry_run:
			print(_("[dry-run] would store implementation as {path}").format(path = final_name))
			self.dry_run_names.add(required_digest)
			support.ro_rmtree(tmp)
			return
		else:
			# If we just want a subdirectory then the rename will change
			# extracted/.. and so we'll need write permission on 'extracted'

			os.chmod(extracted, 0o755)
			os.rename(extracted, final_name)
			os.chmod(final_name, 0o555)

		if extract:
			os.rmdir(tmp)
Esempio n. 10
0
    def check_manifest_and_rename(self,
                                  required_digest,
                                  tmp,
                                  extract=None,
                                  try_helper=False):
        """Check that tmp[/extract] has the required_digest.
		On success, rename the checked directory to the digest, and
		make the whole tree read-only.
		@param try_helper: attempt to use privileged helper to import to system cache first (since 0.26)
		@type try_helper: bool
		@raise BadDigest: if the input directory doesn't match the given digest"""
        if extract:
            extracted = os.path.join(tmp, extract)
            if not os.path.isdir(extracted):
                raise Exception(
                    _('Directory %s not found in archive') % extract)
        else:
            extracted = tmp

        from . import manifest

        manifest.fixup_permissions(extracted)

        alg, required_value = manifest.splitID(required_digest)
        actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg))
        if actual_digest != required_digest:
            raise BadDigest(
                _('Incorrect manifest -- archive is corrupted.\n'
                  'Required digest: %(required_digest)s\n'
                  'Actual digest: %(actual_digest)s\n') % {
                      'required_digest': required_digest,
                      'actual_digest': actual_digest
                  })

        if try_helper:
            if self._add_with_helper(required_digest, extracted):
                support.ro_rmtree(tmp)
                return
            logger.info(
                _("Can't add to system store. Trying user store instead."))

        logger.info(_("Caching new implementation (digest %s) in %s"),
                    required_digest, self.dir)

        final_name = os.path.join(self.dir, required_digest)
        if os.path.isdir(final_name):
            raise Exception(_("Item %s already stored.") %
                            final_name)  # XXX: not really an error

        # If we just want a subdirectory then the rename will change
        # extracted/.. and so we'll need write permission on 'extracted'

        os.chmod(extracted, 0o755)
        os.rename(extracted, final_name)
        os.chmod(final_name, 0o555)

        if extract:
            os.rmdir(tmp)
Esempio n. 11
0
    def testCopy(self):
        sha1 = manifest.get_algorithm('sha1')
        sha1new = manifest.get_algorithm('sha1new')
        source = os.path.join(self.tmp, 'badname')
        os.mkdir(source)

        self.populate_sample(source)

        lines = list(sha1new.generate_manifest(source))
        self.assertEquals([
            'F f7ff9e8b7bb2e09b70935a5d785e0cc5d9d0abf0 2 5 MyFile',
            'S 570b0ce957ab43e774c82fca0ea3873fc452278b 19 a symlink',
            'D /My Dir',
            'F 0236ef92e1e37c57f0eb161e7e2f8b6a8face705 2 10 !a file!',
            'X b4ab02f2c791596a980fd35f51f5d92ee0b4705c 2 10 !a file!.exe'
        ], lines)
        digest = sha1.getID(manifest.add_manifest_file(source, sha1))

        copy = tempfile.mktemp()
        os.mkdir(copy)
        try:
            # Source must be in the form alg=value
            try:
                cli.do_copy([source, copy])
                assert 0
            except BadDigest as ex:
                assert 'badname' in str(ex)
            source, badname = os.path.join(self.tmp, digest), source
            os.rename(badname, source)

            # Can't copy sha1 implementations (unsafe)
            try:
                cli.do_copy([source, copy])
            except SafeException as ex:
                assert 'sha1' in str(ex)

            # Already have a .manifest
            try:
                manifest.add_manifest_file(source, sha1new)
                assert 0
            except SafeException as ex:
                assert '.manifest' in str(ex)

            os.chmod(source, 0o700)
            os.unlink(os.path.join(source, '.manifest'))

            # Switch to sha1new
            digest = sha1new.getID(manifest.add_manifest_file(source, sha1new))
            source, badname = os.path.join(self.tmp, digest), source
            os.rename(badname, source)

            cli.do_copy([source, copy])

            self.assertEquals(
                'Hello',
                file(os.path.join(copy, digest, 'MyFile')).read())
        finally:
            support.ro_rmtree(copy)
Esempio n. 12
0
	def tearDown(self):
		assert self.config.handler.ex is None, self.config.handler.ex

		shutil.rmtree(self.config_home)
		support.ro_rmtree(self.cache_home)
		shutil.rmtree(self.cache_system)
		shutil.rmtree(self.gnupg_home)

		os.environ['PATH'] = self.old_path
Esempio n. 13
0
    def tearDown(self):
        assert self.config.handler.ex is None, self.config.handler.ex

        shutil.rmtree(self.config_home)
        support.ro_rmtree(self.cache_home)
        shutil.rmtree(self.cache_system)
        shutil.rmtree(self.gnupg_home)

        os.environ['PATH'] = self.old_path
Esempio n. 14
0
def unpack_archive_over(url, data, destdir, extract = None, type = None, start_offset = 0):
	"""Like unpack_archive, except that we unpack to a temporary directory first and
	then move things over, checking that we're not following symlinks at each stage.
	Use this when you want to unpack an unarchive into a directory which already has
	stuff in it.
	@note: Since 0.49, the leading "extract" component is removed (unlike unpack_archive).
	@since: 0.28"""
	import stat
	tmpdir = mkdtemp(dir = destdir)
	assert extract is None or os.sep not in extract, extract
	try:
		mtimes = []

		unpack_archive(url, data, tmpdir, extract, type, start_offset)

		if extract is None:
			srcdir = tmpdir
		else:
			srcdir = os.path.join(tmpdir, extract)
			assert not os.path.islink(srcdir)

		stem_len = len(srcdir)
		for root, dirs, files in os.walk(srcdir):
			relative_root = root[stem_len + 1:] or '.'
			target_root = os.path.join(destdir, relative_root)
			try:
				info = os.lstat(target_root)
			except OSError as ex:
				if ex.errno != errno.ENOENT:
					raise	# Some odd error.
				# Doesn't exist. OK.
				os.mkdir(target_root)
			else:
				if stat.S_ISLNK(info.st_mode):
					raise SafeException(_('Attempt to unpack dir over symlink "%s"!') % relative_root)
				elif not stat.S_ISDIR(info.st_mode):
					raise SafeException(_('Attempt to unpack dir over non-directory "%s"!') % relative_root)
			mtimes.append((relative_root, os.lstat(os.path.join(srcdir, root)).st_mtime))

			for s in dirs:	# Symlinks are counted as directories
				src = os.path.join(srcdir, relative_root, s)
				if os.path.islink(src):
					files.append(s)

			for f in files:
				src = os.path.join(srcdir, relative_root, f)
				dest = os.path.join(destdir, relative_root, f)
				if os.path.islink(dest):
					raise SafeException(_('Attempt to unpack file over symlink "%s"!') %
							os.path.join(relative_root, f))
				os.rename(src, dest)

		for path, mtime in mtimes[1:]:
			os.utime(os.path.join(destdir, path), (mtime, mtime))
	finally:
		ro_rmtree(tmpdir)
Esempio n. 15
0
def unpack_archive_over(url, data, destdir, extract = None, type = None, start_offset = 0):
	"""Like unpack_archive, except that we unpack to a temporary directory first and
	then move things over, checking that we're not following symlinks at each stage.
	Use this when you want to unpack an unarchive into a directory which already has
	stuff in it.
	@note: Since 0.49, the leading "extract" component is removed (unlike unpack_archive).
	@since: 0.28"""
	import stat
	tmpdir = mkdtemp(dir = destdir)
	assert extract is None or os.sep not in extract, extract
	try:
		mtimes = []

		unpack_archive(url, data, tmpdir, extract, type, start_offset)

		if extract is None:
			srcdir = tmpdir
		else:
			srcdir = os.path.join(tmpdir, extract)
			assert not os.path.islink(srcdir)

		stem_len = len(srcdir)
		for root, dirs, files in os.walk(srcdir):
			relative_root = root[stem_len + 1:] or '.'
			target_root = os.path.join(destdir, relative_root)
			try:
				info = os.lstat(target_root)
			except OSError as ex:
				if ex.errno != errno.ENOENT:
					raise	# Some odd error.
				# Doesn't exist. OK.
				os.mkdir(target_root)
			else:
				if stat.S_ISLNK(info.st_mode):
					raise SafeException(_('Attempt to unpack dir over symlink "%s"!') % relative_root)
				elif not stat.S_ISDIR(info.st_mode):
					raise SafeException(_('Attempt to unpack dir over non-directory "%s"!') % relative_root)
			mtimes.append((relative_root, os.lstat(os.path.join(srcdir, root)).st_mtime))

			for s in dirs:	# Symlinks are counted as directories
				src = os.path.join(srcdir, relative_root, s)
				if os.path.islink(src):
					files.append(s)

			for f in files:
				src = os.path.join(srcdir, relative_root, f)
				dest = os.path.join(destdir, relative_root, f)
				if os.path.islink(dest):
					raise SafeException(_('Attempt to unpack file over symlink "%s"!') %
							os.path.join(relative_root, f))
				os.rename(src, dest)

		for path, mtime in mtimes[1:]:
			os.utime(os.path.join(destdir, path), (mtime, mtime))
	finally:
		ro_rmtree(tmpdir)
Esempio n. 16
0
    def tearDown(self):
        if self.config.handler.ex:
            support.raise_with_traceback(self.config.handler.ex, self.config.handler.tb)

        shutil.rmtree(self.config_home)
        support.ro_rmtree(self.cache_home)
        shutil.rmtree(self.cache_system)
        shutil.rmtree(self.gnupg_home)

        os.environ["PATH"] = self.old_path
Esempio n. 17
0
	def tearDown(self):
		if self.config.handler.ex:
			support.raise_with_traceback(self.config.handler.ex, self.config.handler.tb)

		shutil.rmtree(self.config_home)
		support.ro_rmtree(self.cache_home)
		shutil.rmtree(self.cache_system)
		shutil.rmtree(self.gnupg_home)

		os.environ['PATH'] = self.old_path
Esempio n. 18
0
    def cook(self,
             required_digest,
             recipe,
             stores,
             force=False,
             impl_hint=None):
        """Follow a Recipe.
		@param impl_hint: the Implementation this is for (if any) as a hint for the GUI
		@see: L{download_impl} uses this method when appropriate"""
        # Maybe we're taking this metaphor too far?

        # Start a download for each ingredient
        blockers = []
        steps = []
        try:
            for stepdata in recipe.steps:
                cls = StepRunner.class_for(stepdata)
                step = cls(stepdata, impl_hint=impl_hint)
                step.prepare(self, blockers)
                steps.append(step)

            while blockers:
                yield blockers
                tasks.check(blockers)
                blockers = [b for b in blockers if not b.happened]

            if self.external_store:
                # Note: external_store will not yet work with non-<archive> steps.
                streams = [step.stream for step in steps]
                self._add_to_external_store(required_digest, recipe.steps,
                                            streams)
            else:
                # Create an empty directory for the new implementation
                store = stores.stores[0]
                tmpdir = store.get_tmp_dir_for(required_digest)
                try:
                    # Unpack each of the downloaded archives into it in turn
                    for step in steps:
                        step.apply(tmpdir)
                    # Check that the result is correct and store it in the cache
                    store.check_manifest_and_rename(required_digest, tmpdir)
                    tmpdir = None
                finally:
                    # If unpacking fails, remove the temporary directory
                    if tmpdir is not None:
                        support.ro_rmtree(tmpdir)
        finally:
            for step in steps:
                try:
                    step.close()
                except IOError as ex:
                    # Can get "close() called during
                    # concurrent operation on the same file
                    # object." if we're unlucky (Python problem).
                    logger.info("Failed to close: %s", ex)
Esempio n. 19
0
	def cook(self, required_digest, recipe, stores, force = False, impl_hint = None, dry_run = False, may_use_mirror = True):
		"""Follow a Recipe.
		@type required_digest: str
		@type recipe: L{Recipe}
		@type stores: L{zeroinstall.zerostore.Stores}
		@type force: bool
		@param impl_hint: the Implementation this is for (as a hint for the GUI, and to allow local files)
		@type dry_run: bool
		@type may_use_mirror: bool
		@see: L{download_impl} uses this method when appropriate"""
		# Maybe we're taking this metaphor too far?

		# Start a download for each ingredient
		blockers = []
		steps = []
		try:
			for stepdata in recipe.steps:
				cls = StepRunner.class_for(stepdata)
				step = cls(stepdata, impl_hint = impl_hint, may_use_mirror = may_use_mirror)
				step.prepare(self, blockers)
				steps.append(step)

			while blockers:
				yield blockers
				tasks.check(blockers)
				blockers = [b for b in blockers if not b.happened]

			if self.external_store:
				# Note: external_store will not work with non-<archive> steps.
				streams = [step.stream for step in steps]
				self._add_to_external_store(required_digest, recipe.steps, streams)
			else:
				# Create an empty directory for the new implementation
				store = stores.stores[0]
				tmpdir = store.get_tmp_dir_for(required_digest)
				try:
					# Unpack each of the downloaded archives into it in turn
					for step in steps:
						step.apply(tmpdir)
					# Check that the result is correct and store it in the cache
					stores.check_manifest_and_rename(required_digest, tmpdir, dry_run=dry_run)
					tmpdir = None
				finally:
					# If unpacking fails, remove the temporary directory
					if tmpdir is not None:
						support.ro_rmtree(tmpdir)
		finally:
			for step in steps:
				try:
					step.close()
				except IOError as ex:
					# Can get "close() called during
					# concurrent operation on the same file
					# object." if we're unlucky (Python problem).
					logger.info("Failed to close: %s", ex)
Esempio n. 20
0
def backup_if_exists(name):
	if not os.path.exists(name):
		return
	backup = name + '~'
	if os.path.exists(backup):
		print "(deleting old backup %s)" % backup
		if os.path.isdir(backup):
			ro_rmtree(backup)
		else:
			os.unlink(backup)
	portable_rename(name, backup)
	print "(renamed old %s as %s; will delete on next run)" % (name, backup)
Esempio n. 21
0
    def cook(self,
             required_digest,
             recipe,
             stores,
             force=False,
             impl_hint=None):
        """Follow a Recipe.
		@param impl_hint: the Implementation this is for (if any) as a hint for the GUI
		@see: L{download_impl} uses this method when appropriate"""
        # Maybe we're taking this metaphor too far?

        # Start downloading all the ingredients.
        streams = {}  # Streams collected from successful downloads

        # Start a download for each ingredient
        blockers = []
        for step in recipe.steps:
            blocker, stream = self.download_archive(step,
                                                    force=force,
                                                    impl_hint=impl_hint)
            assert stream
            blockers.append(blocker)
            streams[step] = stream

        while blockers:
            yield blockers
            tasks.check(blockers)
            blockers = [b for b in blockers if not b.happened]

        from zeroinstall.zerostore import unpack

        # Create an empty directory for the new implementation
        store = stores.stores[0]
        tmpdir = store.get_tmp_dir_for(required_digest)
        try:
            # Unpack each of the downloaded archives into it in turn
            for step in recipe.steps:
                stream = streams[step]
                stream.seek(0)
                unpack.unpack_archive_over(step.url,
                                           stream,
                                           tmpdir,
                                           extract=step.extract,
                                           type=step.type,
                                           start_offset=step.start_offset or 0)
            # Check that the result is correct and store it in the cache
            store.check_manifest_and_rename(required_digest, tmpdir)
            tmpdir = None
        finally:
            # If unpacking fails, remove the temporary directory
            if tmpdir is not None:
                from zeroinstall import support
                support.ro_rmtree(tmpdir)
Esempio n. 22
0
def backup_if_exists(name):
    if not os.path.exists(name):
        return
    backup = name + '~'
    if os.path.exists(backup):
        print "(deleting old backup %s)" % backup
        if os.path.isdir(backup):
            ro_rmtree(backup)
        else:
            os.unlink(backup)
    portable_rename(name, backup)
    print "(renamed old %s as %s; will delete on next run)" % (name, backup)
Esempio n. 23
0
	def check_manifest_and_rename(self, required_digest, tmp, dry_run = False):
		"""Check that tmp has the required_digest and move it into the stores. On success, tmp no longer exists.
		@since: 2.3"""
		if len(self.stores) > 1:
			store = self.get_first_system_store()
			try:
				store.add_dir_to_cache(required_digest, tmp, dry_run = dry_run)
				support.ro_rmtree(tmp)
				return
			except NonwritableStore:
				logger.debug(_("%s not-writable. Trying helper instead."), store)
				pass
		self.stores[0].check_manifest_and_rename(required_digest, tmp, dry_run = dry_run, try_helper = True)
Esempio n. 24
0
def extract_gem(stream, destdir, extract = None, start_offset = 0):
	"@since: 0.53"
	stream.seek(start_offset)
	payload = 'data.tar.gz'
	payload_stream = None
	tmpdir = mkdtemp(dir = destdir)
	try:
		extract_tar(stream, destdir=tmpdir, extract=payload, decompress=None)
		payload_stream = open(os.path.join(tmpdir, payload))
		extract_tar(payload_stream, destdir=destdir, extract=extract, decompress='gzip')
	finally:
		if payload_stream:
			payload_stream.close()
		ro_rmtree(tmpdir)
Esempio n. 25
0
def extract_gem(stream, destdir, extract = None, start_offset = 0):
	"@since: 0.53"
	stream.seek(start_offset)
	payload = 'data.tar.gz'
	payload_stream = None
	tmpdir = mkdtemp(dir = destdir)
	try:
		extract_tar(stream, destdir=tmpdir, extract=payload, decompress=None)
		with open(os.path.join(tmpdir, payload), 'rb') as payload_stream:
			extract_tar(payload_stream, destdir=destdir, extract=extract, decompress='gzip')
	finally:
		if payload_stream:
			payload_stream.close()
		ro_rmtree(tmpdir)
Esempio n. 26
0
    def add_archive_to_cache(self,
                             required_digest,
                             data,
                             url,
                             extract=None,
                             type=None,
                             start_offset=0,
                             try_helper=False,
                             dry_run=False):
        """@type required_digest: str
		@type data: file
		@type url: str
		@type extract: str | None
		@type type: str | None
		@type start_offset: int
		@type try_helper: bool
		@type dry_run: bool"""
        from . import unpack

        if self.lookup(required_digest):
            logger.info(_("Not adding %s as it already exists!"),
                        required_digest)
            return

        tmp = self.get_tmp_dir_for(required_digest)
        try:
            unpack.unpack_archive(url,
                                  data,
                                  tmp,
                                  extract,
                                  type=type,
                                  start_offset=start_offset)
        except:
            import shutil
            shutil.rmtree(tmp)
            raise

        try:
            self.check_manifest_and_rename(required_digest,
                                           tmp,
                                           extract,
                                           try_helper=try_helper,
                                           dry_run=dry_run)
        except Exception:
            #warn(_("Leaving extracted directory as %s"), tmp)
            support.ro_rmtree(tmp)
            raise
Esempio n. 27
0
    def check_manifest_and_rename(self, required_digest, tmp, dry_run=False):
        """Check that tmp has the required_digest and move it into the stores. On success, tmp no longer exists.
		@since: 2.3"""
        if len(self.stores) > 1:
            store = self.get_first_system_store()
            try:
                store.add_dir_to_cache(required_digest, tmp, dry_run=dry_run)
                support.ro_rmtree(tmp)
                return
            except NonwritableStore:
                logger.debug(_("%s not-writable. Trying helper instead."),
                             store)
                pass
        self.stores[0].check_manifest_and_rename(required_digest,
                                                 tmp,
                                                 dry_run=dry_run,
                                                 try_helper=True)
Esempio n. 28
0
def extract_gem(stream, destdir, extract=None, start_offset=0):
    """@type stream: file
	@type destdir: str
	@type start_offset: int
	@since: 0.53"""
    stream.seek(start_offset)
    payload = "data.tar.gz"
    payload_stream = None
    tmpdir = mkdtemp(dir=destdir)
    try:
        extract_tar(stream, destdir=tmpdir, extract=payload, decompress=None)
        with open(os.path.join(tmpdir, payload), "rb") as payload_stream:
            extract_tar(payload_stream, destdir=destdir, extract=extract, decompress="gzip")
    finally:
        if payload_stream:
            payload_stream.close()
        ro_rmtree(tmpdir)
Esempio n. 29
0
	def check_manifest_and_rename(self, required_digest, tmp, extract = None, try_helper = False):
		"""Check that tmp[/extract] has the required_digest.
		On success, rename the checked directory to the digest, and
		make the whole tree read-only.
		@param try_helper: attempt to use privileged helper to import to system cache first (since 0.26)
		@type try_helper: bool
		@raise BadDigest: if the input directory doesn't match the given digest"""
		if extract:
			extracted = os.path.join(tmp, extract)
			if not os.path.isdir(extracted):
				raise Exception(_('Directory %s not found in archive') % extract)
		else:
			extracted = tmp

		from . import manifest

		manifest.fixup_permissions(extracted)

		alg, required_value = manifest.splitID(required_digest)
		actual_digest = alg.getID(manifest.add_manifest_file(extracted, alg))
		if actual_digest != required_digest:
			raise BadDigest(_('Incorrect manifest -- archive is corrupted.\n'
					'Required digest: %(required_digest)s\n'
					'Actual digest: %(actual_digest)s\n') %
					{'required_digest': required_digest, 'actual_digest': actual_digest})

		if try_helper:
			if self._add_with_helper(required_digest, extracted):
				support.ro_rmtree(tmp)
				return
			info(_("Can't add to system store. Trying user store instead."))

		final_name = os.path.join(self.dir, required_digest)
		if os.path.isdir(final_name):
			raise Exception(_("Item %s already stored.") % final_name) # XXX: not really an error

		# If we just want a subdirectory then the rename will change
		# extracted/.. and so we'll need write permission on 'extracted'

		os.chmod(extracted, 0o755)
		os.rename(extracted, final_name)
		os.chmod(final_name, 0o555)

		if extract:
			os.rmdir(tmp)
Esempio n. 30
0
	def cook(self, required_digest, recipe, stores, force = False, impl_hint = None):
		"""Follow a Recipe.
		@param impl_hint: the Implementation this is for (if any) as a hint for the GUI
		@see: L{download_impl} uses this method when appropriate"""
		# Maybe we're taking this metaphor too far?

		# Start downloading all the ingredients.
		streams = {}	# Streams collected from successful downloads

		# Start a download for each ingredient
		blockers = []
		for step in recipe.steps:
			blocker, stream = self.download_archive(step, force = force, impl_hint = impl_hint)
			assert stream
			blockers.append(blocker)
			streams[step] = stream

		while blockers:
			yield blockers
			tasks.check(blockers)
			blockers = [b for b in blockers if not b.happened]

		from zeroinstall.zerostore import unpack

		# Create an empty directory for the new implementation
		store = stores.stores[0]
		tmpdir = store.get_tmp_dir_for(required_digest)
		try:
			# Unpack each of the downloaded archives into it in turn
			for step in recipe.steps:
				stream = streams[step]
				stream.seek(0)
				unpack.unpack_archive_over(step.url, stream, tmpdir,
						extract = step.extract,
						type = step.type,
						start_offset = step.start_offset or 0)
			# Check that the result is correct and store it in the cache
			store.check_manifest_and_rename(required_digest, tmpdir)
			tmpdir = None
		finally:
			# If unpacking fails, remove the temporary directory
			if tmpdir is not None:
				from zeroinstall import support
				support.ro_rmtree(tmpdir)
Esempio n. 31
0
def process_archives(parent):
    for elem in parent.childNodes:
        if elem.namespaceURI != namespaces.XMLNS_IFACE:
            continue

        if elem.localName in ("archive", "file"):
            # Download the archive if missing
            href = elem.getAttribute("href")
            assert href, "missing href on <archive>"
            local_copy = os.path.join(template_dir, os.path.basename(href))
            if not os.path.exists(local_copy):
                print("Downloading {href} to {local_copy}".format(**locals()))
                req = request.urlopen(href)
                with open(local_copy + ".part", "wb") as local_stream:
                    shutil.copyfileobj(req, local_stream)
                support.portable_rename(local_copy + ".part", local_copy)
                req.close()

                # Set the size attribute
            elem.setAttribute("size", str(os.stat(local_copy).st_size))

            if elem.localName == "archive":
                if not elem.hasAttribute("extract"):
                    # Unpack (a rather inefficient way to guess the 'extract' attribute)
                    tmpdir = unpack.unpack_to_tmp(href, local_copy, elem.getAttribute("type"))
                    try:
                        unpack_dir = os.path.join(tmpdir, "unpacked")

                        # Set the extract attribute
                        extract = unpack.guess_extract(unpack_dir)
                        if extract:
                            elem.setAttribute("extract", extract)
                            unpack_dir = os.path.join(unpack_dir, extract)
                            assert os.path.isdir(unpack_dir), "Not a directory: {dir}".format(dir=unpack_dir)
                    finally:
                        support.ro_rmtree(tmpdir)
                else:
                    extract = elem.getAttribute("extract")
                    if extract == "":
                        # Remove empty element
                        elem.removeAttribute("extract")

        elif elem.localName == "recipe":
            process_archives(elem)
Esempio n. 32
0
	def cook(self, required_digest, recipe, stores, force = False, impl_hint = None):
		"""Follow a Recipe.
		@param impl_hint: the Implementation this is for (if any) as a hint for the GUI
		@see: L{download_impl} uses this method when appropriate"""
		# Maybe we're taking this metaphor too far?

		# Start a download for each ingredient
		blockers = []
		steps = []
		try:
			for stepdata in recipe.steps:
				cls = StepRunner.class_for(stepdata)
				step = cls(stepdata, impl_hint=impl_hint)
				step.prepare(self, blockers)
				steps.append(step)

			while blockers:
				yield blockers
				tasks.check(blockers)
				blockers = [b for b in blockers if not b.happened]


			if self.external_store:
				# Note: external_store will not yet work with non-<archive> steps.
				streams = [step.stream for step in steps]
				self._add_to_external_store(required_digest, recipe.steps, streams)
			else:
				# Create an empty directory for the new implementation
				store = stores.stores[0]
				tmpdir = store.get_tmp_dir_for(required_digest)
				try:
					# Unpack each of the downloaded archives into it in turn
					for step in steps:
						step.apply(tmpdir)
					# Check that the result is correct and store it in the cache
					store.check_manifest_and_rename(required_digest, tmpdir)
					tmpdir = None
				finally:
					# If unpacking fails, remove the temporary directory
					if tmpdir is not None:
						support.ro_rmtree(tmpdir)
		finally:
			for step in steps:
				step.close()
Esempio n. 33
0
def handle(config, options, args):
	"""@type args: [str]"""
	if len(args) == 1:
		extract = None
	elif len(args) == 2:
		extract = args[1]
	else:
		raise UsageError()

	source = args[0]
	alg = manifest.algorithms.get(options.algorithm or 'sha1new', None)
	if alg is None:
		raise SafeException(_('Unknown algorithm "%s"') % alg)

	show_manifest = bool(options.manifest)
	show_digest = bool(options.digest) or not show_manifest

	def do_manifest(d):
		if extract is not None:
			d = os.path.join(d, extract)
		digest = alg.new_digest()
		for line in alg.generate_manifest(d):
			if show_manifest:
				print(line)
			digest.update((line + '\n').encode('utf-8'))
		if show_digest:
			print(alg.getID(digest))

	if os.path.isdir(source):
		if extract is not None:
			raise SafeException("Can't use extract with a directory")
		do_manifest(source)
	else:
		data = None
		tmpdir = tempfile.mkdtemp()
		try:
			data = open(args[0], 'rb')
			unpack.unpack_archive(source, data, tmpdir, extract)
			do_manifest(tmpdir)
		finally:
			support.ro_rmtree(tmpdir)
			if data:
				data.close()
Esempio n. 34
0
def handle(config, options, args):
    """@type args: [str]"""
    if len(args) == 1:
        extract = None
    elif len(args) == 2:
        extract = args[1]
    else:
        raise UsageError()

    source = args[0]
    alg = manifest.algorithms.get(options.algorithm or 'sha1new', None)
    if alg is None:
        raise SafeException(_('Unknown algorithm "%s"') % alg)

    show_manifest = bool(options.manifest)
    show_digest = bool(options.digest) or not show_manifest

    def do_manifest(d):
        if extract is not None:
            d = os.path.join(d, extract)
        digest = alg.new_digest()
        for line in alg.generate_manifest(d):
            if show_manifest:
                print(line)
            digest.update((line + '\n').encode('utf-8'))
        if show_digest:
            print(alg.getID(digest))

    if os.path.isdir(source):
        if extract is not None:
            raise SafeException("Can't use extract with a directory")
        do_manifest(source)
    else:
        data = None
        tmpdir = tempfile.mkdtemp()
        try:
            data = open(args[0], 'rb')
            unpack.unpack_archive(source, data, tmpdir, extract)
            do_manifest(tmpdir)
        finally:
            support.ro_rmtree(tmpdir)
            if data:
                data.close()
Esempio n. 35
0
def extract_gem(stream, destdir, extract=None, start_offset=0):
    """@type stream: file
	@type destdir: str
	@type start_offset: int
	@since: 0.53"""
    stream.seek(start_offset)
    payload = 'data.tar.gz'
    payload_stream = None
    tmpdir = mkdtemp(dir=destdir)
    try:
        extract_tar(stream, destdir=tmpdir, extract=payload, decompress=None)
        with open(os.path.join(tmpdir, payload), 'rb') as payload_stream:
            extract_tar(payload_stream,
                        destdir=destdir,
                        extract=extract,
                        decompress='gzip')
    finally:
        if payload_stream:
            payload_stream.close()
        ro_rmtree(tmpdir)
Esempio n. 36
0
	def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0, try_helper = False):
		from . import unpack

		if self.lookup(required_digest):
			info(_("Not adding %s as it already exists!"), required_digest)
			return

		tmp = self.get_tmp_dir_for(required_digest)
		try:
			unpack.unpack_archive(url, data, tmp, extract, type = type, start_offset = start_offset)
		except:
			import shutil
			shutil.rmtree(tmp)
			raise

		try:
			self.check_manifest_and_rename(required_digest, tmp, extract, try_helper = try_helper)
		except Exception:
			#warn(_("Leaving extracted directory as %s"), tmp)
			support.ro_rmtree(tmp)
			raise
Esempio n. 37
0
	def add_archive_to_cache(self, required_digest, data, url, extract = None, type = None, start_offset = 0, try_helper = False):
		from . import unpack

		if self.lookup(required_digest):
			info(_("Not adding %s as it already exists!"), required_digest)
			return

		tmp = self.get_tmp_dir_for(required_digest)
		try:
			unpack.unpack_archive(url, data, tmp, extract, type = type, start_offset = start_offset)
		except:
			import shutil
			shutil.rmtree(tmp)
			raise

		try:
			self.check_manifest_and_rename(required_digest, tmp, extract, try_helper = try_helper)
		except Exception:
			#warn(_("Leaving extracted directory as %s"), tmp)
			support.ro_rmtree(tmp)
			raise
Esempio n. 38
0
	def add_dir_to_cache(self, required_digest, path, try_helper = False):
		"""Copy the contents of path to the cache.
		@param required_digest: the expected digest
		@type required_digest: str
		@param path: the root of the tree to copy
		@type path: str
		@param try_helper: attempt to use privileged helper before user cache (since 0.26)
		@type try_helper: bool
		@raise BadDigest: if the contents don't match the given digest."""
		if self.lookup(required_digest):
			info(_("Not adding %s as it already exists!"), required_digest)
			return

		tmp = self.get_tmp_dir_for(required_digest)
		try:
			_copytree2(path, tmp)
			self.check_manifest_and_rename(required_digest, tmp, try_helper = try_helper)
		except:
			warn(_("Error importing directory."))
			warn(_("Deleting %s"), tmp)
			support.ro_rmtree(tmp)
			raise
Esempio n. 39
0
 def tearDown(self):
     os.chdir(os.path.join(self.tmpdir, os.path.pardir))
     ro_rmtree(self.tmpdir)
Esempio n. 40
0
    def testBackgroundApp(self):
        my_dbus.system_services = {
            "org.freedesktop.NetworkManager": {
                "/org/freedesktop/NetworkManager": NetworkManager()
            }
        }

        trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B',
                                 'example.com:8000')

        global ran_gui

        with output_suppressed():
            # Select a version of Hello
            run_server('Hello.xml', '6FCF121BE2390E0B.gpg', 'HelloWorld.tgz')
            r = Requirements('http://example.com:8000/Hello.xml')
            driver = Driver(requirements=r, config=self.config)
            tasks.wait_for_blocker(driver.solve_with_downloads())
            assert driver.solver.ready
            kill_server_process()

            # Save it as an app
            app = self.config.app_mgr.create_app('test-app', r)
            app.set_selections(driver.solver.selections)
            timestamp = os.path.join(app.path, 'last-checked')
            last_check_attempt = os.path.join(app.path, 'last-check-attempt')
            selections_path = os.path.join(app.path, 'selections.xml')

            def reset_timestamps():
                global ran_gui
                ran_gui = False
                os.utime(timestamp, (1, 1))  # 1970
                os.utime(selections_path, (1, 1))
                if os.path.exists(last_check_attempt):
                    os.unlink(last_check_attempt)

            # Download the implementation
            sels = app.get_selections(may_update=True)
            run_server('HelloWorld.tgz')
            tasks.wait_for_blocker(app.download_selections(sels))
            kill_server_process()

            # Not time for a background update yet
            self.config.freshness = 100
            dl = app.download_selections(app.get_selections(may_update=True))
            assert dl == None
            assert not ran_gui

            # Trigger a background update - no updates found
            reset_timestamps()
            run_server('Hello.xml')
            with trapped_exit(1):
                dl = app.download_selections(
                    app.get_selections(may_update=True))
                assert dl == None
            assert not ran_gui
            self.assertNotEqual(1, os.stat(timestamp).st_mtime)
            self.assertEqual(1, os.stat(selections_path).st_mtime)
            kill_server_process()

            # Change the selections
            sels_path = os.path.join(app.path, 'selections.xml')
            with open(sels_path) as stream:
                old = stream.read()
            with open(sels_path, 'w') as stream:
                stream.write(old.replace('Hello', 'Goodbye'))

            # Trigger another background update - metadata changes found
            reset_timestamps()
            run_server('Hello.xml')
            with trapped_exit(1):
                dl = app.download_selections(
                    app.get_selections(may_update=True))
                assert dl == None
            assert not ran_gui
            self.assertNotEqual(1, os.stat(timestamp).st_mtime)
            self.assertNotEqual(1, os.stat(selections_path).st_mtime)
            kill_server_process()

            # Trigger another background update - GUI needed now

            # Delete cached implementation so we need to download it again
            stored = sels.selections[
                'http://example.com:8000/Hello.xml'].get_path(
                    self.config.stores)
            assert os.path.basename(stored).startswith('sha1')
            ro_rmtree(stored)

            # Replace with a valid local feed so we don't have to download immediately
            with open(sels_path, 'w') as stream:
                stream.write(local_hello)

            os.environ['DISPLAY'] = 'dummy'
            reset_timestamps()
            run_server('Hello.xml')
            with trapped_exit(1):
                dl = app.download_selections(
                    app.get_selections(may_update=True))
                assert dl == None
            assert ran_gui  # (so doesn't actually update)
            kill_server_process()

            # Now again with no DISPLAY
            reset_timestamps()
            del os.environ['DISPLAY']
            run_server('Hello.xml', 'HelloWorld.tgz')
            with trapped_exit(1):
                dl = app.download_selections(
                    app.get_selections(may_update=True))
                assert dl == None
            assert not ran_gui  # (so doesn't actually update)

            self.assertNotEqual(1, os.stat(timestamp).st_mtime)
            self.assertNotEqual(1, os.stat(selections_path).st_mtime)
            kill_server_process()

            sels = app.get_selections()
            sel, = sels.selections.values()
            self.assertEqual("sha1=3ce644dc725f1d21cfcf02562c76f375944b266a",
                             sel.id)

            # Untrust the key
            trust.trust_db.untrust_key(
                'DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')

            os.environ['DISPLAY'] = 'dummy'
            reset_timestamps()
            run_server('Hello.xml')
            with trapped_exit(1):
                #import logging; logging.getLogger().setLevel(logging.INFO)
                dl = app.download_selections(
                    app.get_selections(may_update=True))
                assert dl == None
            assert ran_gui
            kill_server_process()

            # Update not triggered because of last-check-attempt
            ran_gui = False
            os.utime(timestamp, (1, 1))  # 1970
            os.utime(selections_path, (1, 1))
            dl = app.download_selections(app.get_selections(may_update=True))
            assert dl == None
            assert not ran_gui
Esempio n. 41
0
	def tearDown(self):
		BaseTest.tearDown(self)

		support.ro_rmtree(self.tmpdir)

		assert os.umask(0o022) == 0o022
Esempio n. 42
0
	def compile_and_register(self, sels, forced_iface_uri = None):
		"""If forced_iface_uri, register as an implementation of this interface,
		ignoring the any <feed-for>, etc."""

		buildenv = BuildEnv(need_config = False)
		buildenv.config.set('compile', 'interface', sels.interface)
		buildenv.config.set('compile', 'selections', 'selections.xml')
		
		# Download any required packages now, so we can use the GUI to request confirmation, etc
		download_missing = sels.download_missing(self.config, include_packages = True)
		if download_missing:
			yield download_missing
			tasks.check(download_missing)

		tmpdir = tempfile.mkdtemp(prefix = '0compile-')
		try:
			os.chdir(tmpdir)

			# Write configuration for build...

			buildenv.save()

			sel_file = open('selections.xml', 'w')
			try:
				doc = sels.toDOM()
				doc.writexml(sel_file)
				sel_file.write('\n')
			finally:
				sel_file.close()

			# Do the build...

			build = self.spawn_build(buildenv.iface_name)
			if build:
				yield build
				tasks.check(build)

			# Register the result...
			dom = minidom.parse(buildenv.local_iface_file)

			feed_for_elem, = dom.getElementsByTagNameNS(namespaces.XMLNS_IFACE, 'feed-for')
			claimed_iface = feed_for_elem.getAttribute('interface')

			if forced_iface_uri is not None:
				if forced_iface_uri != claimed_iface:
					self.note("WARNING: registering as feed for {forced}, though feed claims to be for {claimed}".format(
						forced = forced_iface_uri,
						claimed = claimed_iface))
			else:
				forced_iface_uri = claimed_iface		# (the top-level interface being built)

			version = sels.selections[sels.interface].version

			site_package_versions_dir = basedir.save_data_path('0install.net', 'site-packages',
						*model.escape_interface_uri(forced_iface_uri))
			leaf =  '%s-%s' % (version, uname[4])
			site_package_dir = os.path.join(site_package_versions_dir, leaf)
			self.note("Storing build in %s" % site_package_dir)

			# 1. Copy new version in under a temporary name. Names starting with '.' are ignored by 0install.
			tmp_distdir = os.path.join(site_package_versions_dir, '.new-' + leaf)
			shutil.copytree(buildenv.distdir, tmp_distdir, symlinks = True)

			# 2. Rename the previous build to .old-VERSION (deleting that if it already existed)
			if os.path.exists(site_package_dir):
				self.note("(moving previous build out of the way)")
				previous_build_dir = os.path.join(site_package_versions_dir, '.old-' + leaf)
				if os.path.exists(previous_build_dir):
					shutil.rmtree(previous_build_dir)
				os.rename(site_package_dir, previous_build_dir)
			else:
				previous_build_dir = None

			# 3. Rename the new version immediately after renaming away the old one to minimise time when there's
			# no version.
			os.rename(tmp_distdir, site_package_dir)

			# 4. Delete the old version.
			if previous_build_dir:
				self.note("(deleting previous build)")
				shutil.rmtree(previous_build_dir)

			local_feed = os.path.join(site_package_dir, '0install', 'feed.xml')
			assert os.path.exists(local_feed), "Feed %s not found!" % local_feed

			# Reload - our 0install will detect the new feed automatically
			iface = self.config.iface_cache.get_interface(forced_iface_uri)
			reader.update_from_cache(iface, iface_cache = self.config.iface_cache)
			self.config.iface_cache.get_feed(local_feed, force = True)

			# Write it out - 0install will add the feed so that older 0install versions can find it
			writer.save_interface(iface)
		except:
			self.note("\nBuild failed: leaving build directory %s for inspection...\n" % tmpdir)
			raise
		else:
			# Can't delete current directory on Windows, so move to parent first
			os.chdir(os.path.join(tmpdir, os.path.pardir))

			ro_rmtree(tmpdir)
Esempio n. 43
0
 def tearDown(self):
     os.chdir(mydir)
     ro_rmtree(self.tmp)
Esempio n. 44
0
	def testBackgroundApp(self):
		my_dbus.system_services = {"org.freedesktop.NetworkManager": {"/org/freedesktop/NetworkManager": NetworkManager()}}

		trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')

		global ran_gui

		with output_suppressed():

			# Create an app, downloading a version of Hello
			run_server('Hello.xml', '6FCF121BE2390E0B.gpg', 'HelloWorld.tgz')
			out, err = self.run_ocaml(['add', 'test-app', 'http://example.com:8000/Hello.xml'])
			assert not out, out
			assert not err, err
			kill_server_process()
			app = basedir.load_first_config(namespaces.config_site, "apps", 'test-app')
			timestamp = os.path.join(app, 'last-checked')
			last_check_attempt = os.path.join(app, 'last-check-attempt')
			selections_path = os.path.join(app, 'selections.xml')

			def reset_timestamps():
				global ran_gui
				ran_gui = False
				os.utime(timestamp, (1, 1))		# 1970
				os.utime(selections_path, (1, 1))
				if os.path.exists(last_check_attempt):
					os.unlink(last_check_attempt)

			# Not time for a background update yet
			self.config.freshness = 100
			self.run_ocaml(['download', 'test-app'])
			assert not ran_gui

			# Trigger a background update - no updates found
			os.environ['ZEROINSTALL_TEST_BACKGROUND'] = 'true'
			reset_timestamps()
			run_server('Hello.xml')
			# (-vv mode makes us wait for the background process to finish)
			out, err = self.run_ocaml(['download', '-vv', 'test-app'])
			assert not out, out
			assert 'Background update: no updates found for test-app' in err, err
			self.assertNotEqual(1, os.stat(timestamp).st_mtime)
			self.assertEqual(1, os.stat(selections_path).st_mtime)
			kill_server_process()

			# Change the selections
			sels_path = os.path.join(app, 'selections.xml')
			with open(sels_path) as stream:
				old = stream.read()
			with open(sels_path, 'w') as stream:
				stream.write(old.replace('Hello', 'Goodbye'))

			# Trigger another background update - metadata changes found
			reset_timestamps()
			run_server('Hello.xml')

			out, err = self.run_ocaml(['download', '-vv', 'test-app'])
			assert not out, out
			assert 'Quick solve succeeded; saving new selections' in err, err

			self.assertNotEqual(1, os.stat(timestamp).st_mtime)
			self.assertNotEqual(1, os.stat(selections_path).st_mtime)
			kill_server_process()

			# Trigger another background update - GUI needed now

			# Delete cached implementation so we need to download it again
			out, err = self.run_ocaml(['select', '--xml', 'test-app'], binary = True)
			sels = selections.Selections(qdom.parse(BytesIO(out)))
			stored = sels.selections['http://example.com:8000/Hello.xml'].get_path(self.config.stores)
			assert os.path.basename(stored).startswith('sha1')
			ro_rmtree(stored)

			# Replace with a valid local feed so we don't have to download immediately
			with open(sels_path, 'w') as stream:
				stream.write(local_hello)

			os.environ['DISPLAY'] = 'dummy'
			reset_timestamps()
			run_server('Hello.xml')
			out, err = self.run_ocaml(['download', '-vv', 'test-app'])
			assert not out, out
			assert 'get-selections-gui' in err, err
			kill_server_process()

			# Now again with no DISPLAY
			reset_timestamps()
			del os.environ['DISPLAY']
			run_server('Hello.xml', 'HelloWorld.tgz')
			out, err = self.run_ocaml(['download', '-vv', 'test-app'])
			assert not out, out
			assert 'GUI unavailable; downloading with no UI' in err, err

			self.assertNotEqual(1, os.stat(timestamp).st_mtime)
			self.assertNotEqual(1, os.stat(selections_path).st_mtime)
			kill_server_process()

			out, err = self.run_ocaml(['select', '--xml', 'test-app'], binary = True)
			sels = selections.Selections(qdom.parse(BytesIO(out)))
			sel, = sels.selections.values()
			self.assertEqual("sha1=3ce644dc725f1d21cfcf02562c76f375944b266a", sel.id)

			# Untrust the key
			trust.trust_db.untrust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')

			os.environ['DISPLAY'] = 'dummy'
			reset_timestamps()
			run_server('Hello.xml')
			out, err = self.run_ocaml(['download', '-vv', 'test-app'])
			assert not out, out
			assert 'need to switch to GUI to confirm keys' in err, err
			assert 'get-selections-gui' in err, err
			kill_server_process()

			# Update not triggered because of last-check-attempt
			ran_gui = False
			os.utime(timestamp, (1, 1))		# 1970
			os.utime(selections_path, (1, 1))
			out, err = self.run_ocaml(['download', '-vv', 'test-app'])
			assert not out, out
			assert 'Tried to check within last hour; not trying again now' in err, err
Esempio n. 45
0
def copy_tree_with_verify(source, target, manifest_data, required_digest):
	"""Copy directory source to be a subdirectory of target if it matches the required_digest.
	manifest_data is normally source/.manifest. source and manifest_data are not trusted
	(will typically be under the control of another user).
	The copy is first done to a temporary directory in target, then renamed to the final name
	only if correct. Therefore, an invalid 'target/required_digest' will never exist.
	A successful return means than target/required_digest now exists (whether we created it or not)."""
	import tempfile

	alg, digest_value = splitID(required_digest)

	if isinstance(alg, OldSHA1):
		raise SafeException(_("Sorry, the 'sha1' algorithm does not support copying."))

	digest = alg.new_digest()
	digest.update(manifest_data)
	manifest_digest = alg.getID(digest)

	if manifest_digest != required_digest:
		raise BadDigest(_("Manifest has been tampered with!\n"
						  "Manifest digest: %(actual_digest)s\n"
						  "Directory name : %(required_digest)s")
						% {'actual_digest': manifest_digest, 'required_digest': required_digest})

	target_impl = os.path.join(target, required_digest)
	if os.path.isdir(target_impl):
		logger.info(_("Target directory '%s' already exists"), target_impl)
		return

	# We've checked that the source's manifest matches required_digest, so it
	# is what we want. Make a list of all the files we need to copy...

	wanted = _parse_manifest(manifest_data.decode('utf-8'))

	tmpdir = tempfile.mkdtemp(prefix = 'tmp-copy-', dir = target)
	try:
		_copy_files(alg, wanted, source, tmpdir)

		if wanted:
			raise SafeException(_('Copy failed; files missing from source:') + '\n- ' +
					    '\n- '.join(wanted.keys()))

		# Make directories read-only (files are already RO)
		for root, dirs, files in os.walk(tmpdir):
			for d in dirs:
				path = os.path.join(root, d)
				mode = os.stat(path).st_mode
				os.chmod(path, mode & 0o555)

		# Check that the copy is correct
		actual_digest = alg.getID(add_manifest_file(tmpdir, alg))
		if actual_digest != required_digest:
			raise SafeException(_("Copy failed; double-check of target gave the wrong digest.\n"
					     "Unless the target was modified during the copy, this is a BUG\n"
					     "in 0store and should be reported.\n"
					     "Expected: %(required_digest)s\n"
					     "Actual:   %(actual_digest)s") % {'required_digest': required_digest, 'actual_digest': actual_digest})
		try:
			os.chmod(tmpdir, 0o755)		# need write permission to rename on MacOS X
			os.rename(tmpdir, target_impl)
			os.chmod(target_impl, 0o555)
			tmpdir = None
		except OSError:
			if not os.path.isdir(target_impl):
				raise
			# else someone else installed it already - return success
	finally:
		if tmpdir is not None:
			logger.info(_("Deleting tmpdir '%s'") % tmpdir)
			from zeroinstall.support import ro_rmtree
			ro_rmtree(tmpdir)
Esempio n. 46
0
                if os.path.islink(src):
                    files.append(s)

            for f in files:
                src = os.path.join(tmpdir, relative_root, f)
                dest = os.path.join(destdir, relative_root, f)
                if os.path.islink(dest):
                    raise SafeException(
                        _('Attempt to unpack file over symlink "%s"!') %
                        os.path.join(relative_root, f))
                os.rename(src, dest)

        for path, mtime in mtimes[1:]:
            os.utime(os.path.join(destdir, path), (mtime, mtime))
    finally:
        ro_rmtree(tmpdir)


def unpack_archive(url,
                   data,
                   destdir,
                   extract=None,
                   type=None,
                   start_offset=0):
    """Unpack stream 'data' into directory 'destdir'. If extract is given, extract just
	that sub-directory from the archive (i.e. destdir/extract will exist afterwards).
	Works out the format from the name."""
    if type is None: type = type_from_url(url)
    if type is None:
        raise SafeException(
            _("Unknown extension (and no MIME type given) in '%s'") % url)
Esempio n. 47
0
    def tearDown(self):
        BaseTest.tearDown(self)

        support.ro_rmtree(self.tmpdir)

        assert os.umask(0o022) == 0o022
Esempio n. 48
0
	def delete(self):
		if SAFE_MODE:
			print("Delete", self.impl_path)
		else:
			support.ro_rmtree(self.impl_path)
Esempio n. 49
0
 def delete(self):
     #print "Delete", self.impl_path
     support.ro_rmtree(self.impl_path)
Esempio n. 50
0
    def compile_and_register(self, sels, forced_iface_uri=None):
        """If forced_iface_uri, register as an implementation of this interface,
		ignoring the any <feed-for>, etc."""

        buildenv = BuildEnv(need_config=False)
        buildenv.config.set('compile', 'interface', sels.interface)
        buildenv.config.set('compile', 'selections', 'selections.xml')

        # Download any required packages now, so we can use the GUI to request confirmation, etc
        download_missing = sels.download_missing(self.config,
                                                 include_packages=True)
        if download_missing:
            yield download_missing
            tasks.check(download_missing)

        tmpdir = tempfile.mkdtemp(prefix='0compile-')
        try:
            os.chdir(tmpdir)

            # Write configuration for build...

            buildenv.save()

            sel_file = open('selections.xml', 'w')
            try:
                doc = sels.toDOM()
                doc.writexml(sel_file)
                sel_file.write('\n')
            finally:
                sel_file.close()

            # Do the build...

            build = self.spawn_build(buildenv.iface_name)
            if build:
                yield build
                tasks.check(build)

            # Register the result...
            dom = minidom.parse(buildenv.local_iface_file)

            feed_for_elem, = dom.getElementsByTagNameNS(
                namespaces.XMLNS_IFACE, 'feed-for')
            claimed_iface = feed_for_elem.getAttribute('interface')

            if forced_iface_uri is not None:
                if forced_iface_uri != claimed_iface:
                    self.note(
                        "WARNING: registering as feed for {forced}, though feed claims to be for {claimed}"
                        .format(forced=forced_iface_uri,
                                claimed=claimed_iface))
            else:
                forced_iface_uri = claimed_iface  # (the top-level interface being built)

            version = sels.selections[sels.interface].version

            site_package_versions_dir = basedir.save_data_path(
                '0install.net', 'site-packages',
                *model.escape_interface_uri(forced_iface_uri))
            leaf = '%s-%s' % (version, build_target_machine_type)
            site_package_dir = os.path.join(site_package_versions_dir, leaf)
            self.note("Storing build in %s" % site_package_dir)

            # 1. Copy new version in under a temporary name. Names starting with '.' are ignored by 0install.
            tmp_distdir = os.path.join(site_package_versions_dir,
                                       '.new-' + leaf)
            shutil.copytree(buildenv.distdir, tmp_distdir, symlinks=True)

            # 2. Rename the previous build to .old-VERSION (deleting that if it already existed)
            if os.path.exists(site_package_dir):
                self.note("(moving previous build out of the way)")
                previous_build_dir = os.path.join(site_package_versions_dir,
                                                  '.old-' + leaf)
                if os.path.exists(previous_build_dir):
                    shutil.rmtree(previous_build_dir)
                os.rename(site_package_dir, previous_build_dir)
            else:
                previous_build_dir = None

            # 3. Rename the new version immediately after renaming away the old one to minimise time when there's
            # no version.
            os.rename(tmp_distdir, site_package_dir)

            # 4. Delete the old version.
            if previous_build_dir:
                self.note("(deleting previous build)")
                shutil.rmtree(previous_build_dir)

            local_feed = os.path.join(site_package_dir, '0install', 'feed.xml')
            assert os.path.exists(
                local_feed), "Feed %s not found!" % local_feed

            # Reload - our 0install will detect the new feed automatically
            iface = self.config.iface_cache.get_interface(forced_iface_uri)
            reader.update_from_cache(iface,
                                     iface_cache=self.config.iface_cache)
            self.config.iface_cache.get_feed(local_feed, force=True)

            # Write it out - 0install will add the feed so that older 0install versions can find it
            writer.save_interface(iface)

            seen_key = (forced_iface_uri, sels.selections[sels.interface].id)
            assert seen_key not in self.seen, seen_key
            self.seen[seen_key] = site_package_dir
        except:
            self.note(
                "\nBuild failed: leaving build directory %s for inspection...\n"
                % tmpdir)
            raise
        else:
            # Can't delete current directory on Windows, so move to parent first
            os.chdir(os.path.join(tmpdir, os.path.pardir))

            ro_rmtree(tmpdir)
Esempio n. 51
0
	def tearDown(self):
		ro_rmtree(self.tmpdir)
Esempio n. 52
0
	def delete(self):
		if SAFE_MODE:
			print("Delete", self.impl_path)
		else:
			support.ro_rmtree(self.impl_path)
Esempio n. 53
0
#!/usr/bin/env python

import os, sys, subprocess
from os.path import join
from xml.sax import saxutils
import time

from zeroinstall import support

launcher, = sys.argv[1:]

my_dir = os.path.dirname(os.path.abspath(__file__))

cache_injector = join(my_dir, "cache", "0install.net", "injector")
if os.path.exists(cache_injector):
    support.ro_rmtree(cache_injector)

os.environ['XDG_CONFIG_HOME'] = join(my_dir, 'config')
os.environ['XDG_CACHE_HOME'] = join(my_dir, 'cache')
os.environ['XDG_DATA_HOME'] = join(my_dir, 'data')

os.environ['XDG_CONFIG_DIRS'] = ''
os.environ['XDG_CACHE_DIRS'] = ''
os.environ['XDG_DATA_DIRS'] = ''

#os.environ['OCAMLRUNPARAM'] = 'b'

os.chdir(my_dir)

with open('selections.xml', 'r') as stream:
    xml = stream.read()
Esempio n. 54
0
 def delete(self):
     # print "Delete", self.impl_path
     support.ro_rmtree(self.impl_path)
Esempio n. 55
0
	def testBackgroundUnsolvable(self):
		my_dbus.system_services = {"org.freedesktop.NetworkManager": {"/org/freedesktop/NetworkManager": NetworkManager()}}

		trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')

		# Create new app
		run_server('Hello.xml', '6FCF121BE2390E0B.gpg', 'HelloWorld.tgz')
		out, err = self.run_ocaml(['add', 'test-app', 'http://example.com:8000/Hello.xml'])
		kill_server_process()
		assert not out, out
		assert not err, err

		# Delete cached implementation so we need to download it again
		out, err = self.run_ocaml(['select', '--xml', 'test-app'], binary = True)
		sels = selections.Selections(qdom.parse(BytesIO(out)))
		stored = sels.selections['http://example.com:8000/Hello.xml'].get_path(self.config.stores)
		assert os.path.basename(stored).startswith('sha1')
		ro_rmtree(stored)

		out, err = self.run_ocaml(['select', '--xml', 'test-app'], binary = True)
		assert not err, err
		sels = selections.Selections(qdom.parse(BytesIO(out)))
		# Replace the selection with a bogus and unusable <package-implementation>
		sel, = sels.selections.values()
		sel.attrs['id'] = "package:dummy:badpackage"
		sel.attrs['from-feed'] = "distribution:http://example.com:8000/Hello.xml"
		sel.attrs['package'] = "badpackage"
		sel.get_command('run').qdom.attrs['path'] = '/i/dont/exist'

		app = basedir.load_first_config(namespaces.config_site, "apps", 'test-app')

		with open(os.path.join(app, 'selections.xml'), 'wt') as stream:
			doc = sels.toDOM()
			doc.writexml(stream, addindent="  ", newl="\n", encoding = 'utf-8')

		# Not time for a background update yet, but the missing binary should trigger
		# an update anyway.
		self.config.freshness = 0

		# Check we try to launch the GUI...
		os.environ['DISPLAY'] = 'dummy'
		run_server('Hello.xml', 'HelloWorld.tgz')
		out, err = self.run_ocaml(['download', '--xml', '-v', 'test-app'], binary = True)
		kill_server_process()
		err = err.decode('utf-8')
		assert 'get new selections; current ones are not usable' in err, err
		assert 'get-selections-gui' in err, err
		sels = selections.Selections(qdom.parse(BytesIO(out)))

		# Check we can also work without the GUI...
		del os.environ['DISPLAY']

		# Delete cached implementation so we need to download it again
		out, err = self.run_ocaml(['select', '--xml', 'test-app'], binary = True)
		sels = selections.Selections(qdom.parse(BytesIO(out)))
		stored = sels.selections['http://example.com:8000/Hello.xml'].get_path(self.config.stores)
		assert os.path.basename(stored).startswith('sha1')
		ro_rmtree(stored)

		run_server('Hello.xml', 'HelloWorld.tgz')
		out, err = self.run_ocaml(['download', '--xml', '-v', 'test-app'], binary = True)
		kill_server_process()
		err = err.decode('utf-8')
		assert 'get new selections; current ones are not usable' in err, err
		assert 'get-selections-gui' not in err, err
		sels = selections.Selections(qdom.parse(BytesIO(out)))

		# Now trigger a background update which discovers that no solution is possible
		timestamp = os.path.join(app, 'last-checked')
		last_check_attempt = os.path.join(app, 'last-check-attempt')
		selections_path = os.path.join(app, 'selections.xml')
		def reset_timestamps():
			global ran_gui
			ran_gui = False
			os.utime(timestamp, (1, 1))		# 1970
			os.utime(selections_path, (1, 1))
			if os.path.exists(last_check_attempt):
				os.unlink(last_check_attempt)
		reset_timestamps()

		out, err = self.run_ocaml(['destroy', 'test-app'])
		assert not out, out
		assert not err, err

		run_server('Hello.xml')
		out, err = self.run_ocaml(['add', '--source', 'test-app', 'http://example.com:8000/Hello.xml'])
		assert not out, out
		assert 'We want source and this is a binary' in err, err
Esempio n. 56
0
 def tearDown(self):
     os.chdir(os.path.join(self.tmpdir, os.path.pardir))
     ro_rmtree(self.tmpdir)
Esempio n. 57
0
def copy_tree_with_verify(source, target, manifest_data, required_digest):
    """Copy directory source to be a subdirectory of target if it matches the required_digest.
	manifest_data is normally source/.manifest. source and manifest_data are not trusted
	(will typically be under the control of another user).
	The copy is first done to a temporary directory in target, then renamed to the final name
	only if correct. Therefore, an invalid 'target/required_digest' will never exist.
	A successful return means than target/required_digest now exists (whether we created it or not)."""
    import tempfile
    from logging import info

    alg, digest_value = splitID(required_digest)

    if isinstance(alg, OldSHA1):
        raise SafeException(
            _("Sorry, the 'sha1' algorithm does not support copying."))

    digest = alg.new_digest()
    digest.update(manifest_data)
    manifest_digest = alg.getID(digest)

    if manifest_digest != required_digest:
        raise BadDigest(
            _("Manifest has been tampered with!\n"
              "Manifest digest: %(actual_digest)s\n"
              "Directory name : %(required_digest)s") % {
                  'actual_digest': manifest_digest,
                  'required_digest': required_digest
              })

    target_impl = os.path.join(target, required_digest)
    if os.path.isdir(target_impl):
        info(_("Target directory '%s' already exists"), target_impl)
        return

    # We've checked that the source's manifest matches required_digest, so it
    # is what we want. Make a list of all the files we need to copy...

    wanted = _parse_manifest(manifest_data)

    tmpdir = tempfile.mkdtemp(prefix='tmp-copy-', dir=target)
    try:
        _copy_files(alg, wanted, source, tmpdir)

        if wanted:
            raise SafeException(
                _('Copy failed; files missing from source:') + '\n- ' +
                '\n- '.join(wanted.keys()))

        # Make directories read-only (files are already RO)
        for root, dirs, files in os.walk(tmpdir):
            for d in dirs:
                path = os.path.join(root, d)
                mode = os.stat(path).st_mode
                os.chmod(path, mode & 0o555)

        # Check that the copy is correct
        actual_digest = alg.getID(add_manifest_file(tmpdir, alg))
        if actual_digest != required_digest:
            raise SafeException(
                _("Copy failed; double-check of target gave the wrong digest.\n"
                  "Unless the target was modified during the copy, this is a BUG\n"
                  "in 0store and should be reported.\n"
                  "Expected: %(required_digest)s\n"
                  "Actual:   %(actual_digest)s") % {
                      'required_digest': required_digest,
                      'actual_digest': actual_digest
                  })
        try:
            os.chmod(tmpdir,
                     0o755)  # need write permission to rename on MacOS X
            os.rename(tmpdir, target_impl)
            os.chmod(target_impl, 0o555)
            tmpdir = None
        except OSError:
            if not os.path.isdir(target_impl):
                raise
            # else someone else installed it already - return success
    finally:
        if tmpdir is not None:
            info(_("Deleting tmpdir '%s'") % tmpdir)
            from zeroinstall.support import ro_rmtree
            ro_rmtree(tmpdir)
Esempio n. 58
0
	def testBackgroundApp(self):
		my_dbus.system_services = {"org.freedesktop.NetworkManager": {"/org/freedesktop/NetworkManager": NetworkManager()}}

		trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')

		global ran_gui

		with output_suppressed():
			# Select a version of Hello
			run_server('Hello.xml', '6FCF121BE2390E0B.gpg', 'HelloWorld.tgz')
			r = Requirements('http://example.com:8000/Hello.xml')
			driver = Driver(requirements = r, config = self.config)
			tasks.wait_for_blocker(driver.solve_with_downloads())
			assert driver.solver.ready
			kill_server_process()

			# Save it as an app
			app = self.config.app_mgr.create_app('test-app', r)
			app.set_selections(driver.solver.selections)
			timestamp = os.path.join(app.path, 'last-checked')
			last_check_attempt = os.path.join(app.path, 'last-check-attempt')
			selections_path = os.path.join(app.path, 'selections.xml')

			def reset_timestamps():
				global ran_gui
				ran_gui = False
				os.utime(timestamp, (1, 1))		# 1970
				os.utime(selections_path, (1, 1))
				if os.path.exists(last_check_attempt):
					os.unlink(last_check_attempt)

			# Download the implementation
			sels = app.get_selections(may_update = True)
			run_server('HelloWorld.tgz')
			tasks.wait_for_blocker(app.download_selections(sels))
			kill_server_process()

			# Not time for a background update yet
			self.config.freshness = 100
			dl = app.download_selections(app.get_selections(may_update = True))
			assert dl == None
			assert not ran_gui

			# Trigger a background update - no updates found
			reset_timestamps()
			run_server('Hello.xml')
			with trapped_exit(1):
				dl = app.download_selections(app.get_selections(may_update = True))
				assert dl == None
			assert not ran_gui
			self.assertNotEqual(1, os.stat(timestamp).st_mtime)
			self.assertEqual(1, os.stat(selections_path).st_mtime)
			kill_server_process()

			# Change the selections
			sels_path = os.path.join(app.path, 'selections.xml')
			with open(sels_path) as stream:
				old = stream.read()
			with open(sels_path, 'w') as stream:
				stream.write(old.replace('Hello', 'Goodbye'))

			# Trigger another background update - metadata changes found
			reset_timestamps()
			run_server('Hello.xml')
			with trapped_exit(1):
				dl = app.download_selections(app.get_selections(may_update = True))
				assert dl == None
			assert not ran_gui
			self.assertNotEqual(1, os.stat(timestamp).st_mtime)
			self.assertNotEqual(1, os.stat(selections_path).st_mtime)
			kill_server_process()

			# Trigger another background update - GUI needed now

			# Delete cached implementation so we need to download it again
			stored = sels.selections['http://example.com:8000/Hello.xml'].get_path(self.config.stores)
			assert os.path.basename(stored).startswith('sha1')
			ro_rmtree(stored)

			# Replace with a valid local feed so we don't have to download immediately
			with open(sels_path, 'w') as stream:
				stream.write(local_hello)

			os.environ['DISPLAY'] = 'dummy'
			reset_timestamps()
			run_server('Hello.xml')
			with trapped_exit(1):
				dl = app.download_selections(app.get_selections(may_update = True))
				assert dl == None
			assert ran_gui	# (so doesn't actually update)
			kill_server_process()

			# Now again with no DISPLAY
			reset_timestamps()
			del os.environ['DISPLAY']
			run_server('Hello.xml', 'HelloWorld.tgz')
			with trapped_exit(1):
				dl = app.download_selections(app.get_selections(may_update = True))
				assert dl == None
			assert not ran_gui	# (so doesn't actually update)

			self.assertNotEqual(1, os.stat(timestamp).st_mtime)
			self.assertNotEqual(1, os.stat(selections_path).st_mtime)
			kill_server_process()

			sels = app.get_selections()
			sel, = sels.selections.values()
			self.assertEqual("sha1=3ce644dc725f1d21cfcf02562c76f375944b266a", sel.id)

			# Untrust the key
			trust.trust_db.untrust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')

			os.environ['DISPLAY'] = 'dummy'
			reset_timestamps()
			run_server('Hello.xml')
			with trapped_exit(1):
				#import logging; logging.getLogger().setLevel(logging.INFO)
				dl = app.download_selections(app.get_selections(may_update = True))
				assert dl == None
			assert ran_gui
			kill_server_process()

			# Update not triggered because of last-check-attempt
			ran_gui = False
			os.utime(timestamp, (1, 1))		# 1970
			os.utime(selections_path, (1, 1))
			dl = app.download_selections(app.get_selections(may_update = True))
			assert dl == None
			assert not ran_gui
Esempio n. 59
0
 def apply(self, basedir):
     """@type basedir: str"""
     path = native_path_within_base(basedir, self.stepdata.path)
     support.ro_rmtree(path)
Esempio n. 60
0
	def destroy_tmp(self):
		if self.tmpdir:
			ro_rmtree(self.tmpdir)
			self.tmpdir = None