Ejemplo n.º 1
0
	def testNoSig(self):
		stream = tempfile.TemporaryFile()
		stream.write("Hello")
		stream.seek(0)
		try:
			gpg.check_stream(stream)
			assert False
		except model.SafeException:
			pass	# OK
Ejemplo n.º 2
0
	def testNoSig(self):
		with tempfile.TemporaryFile(mode = 'w+b') as stream:
			stream.write(b"Hello")
			stream.seek(0)
			try:
				gpg.check_stream(stream)
				assert False
			except model.SafeException:
				pass	# OK
Ejemplo n.º 3
0
def check_signature(path):
	data = file(path).read()
	xml_comment = data.rfind('\n<!-- Base64 Signature')
	if xml_comment >= 0:
		data_stream, sigs = gpg.check_stream(file(path))
		sign_fn = sign_xml
		data = data[:xml_comment + 1]
		data_stream.close()
	elif data.startswith('-----BEGIN'):
		warn("Plain GPG signatures are no longer supported - not checking signature!")
		warn("Will save in XML format.")
		child = subprocess.Popen(['gpg', '--decrypt', path], stdout = subprocess.PIPE)
		data, unused = child.communicate()
		import __main__
		__main__.force_save = True
		return data, sign_xml, None
	else:
		return data, sign_unsigned, None
	for sig in sigs:
		if isinstance(sig, gpg.ValidSig):
			return data, sign_fn, sig.fingerprint
	print "ERROR: No valid signatures found!"
	for sig in sigs:
		print "Got:", sig
	ok = raw_input('Ignore and load anyway? (y/N) ').lower()
	if ok and 'yes'.startswith(ok):
		import __main__
		__main__.force_save = True
		return data, sign_unsigned, None
	sys.exit(1)
Ejemplo n.º 4
0
def check_signature(path):
    with open(path, 'rb') as stream:
        data = stream.read()
    xml_comment = data.rfind(b'\n<!-- Base64 Signature')
    if xml_comment >= 0:
        with open(path, 'rb') as stream:
            data_stream, sigs = gpg.check_stream(stream)
        sign_fn = sign_xml
        data = data[:xml_comment + 1]
        data_stream.close()
    elif data.startswith(b'-----BEGIN'):
        warn(
            "Plain GPG signatures are no longer supported - not checking signature!"
        )
        warn("Will save in XML format.")
        child = subprocess.Popen(['gpg', '--decrypt', path],
                                 stdout=subprocess.PIPE)
        data, unused = child.communicate()
        import __main__
        __main__.force_save = True
        return data, sign_xml, None
    else:
        return data, sign_unsigned, None
    for sig in sigs:
        if isinstance(sig, gpg.ValidSig):
            return data, sign_fn, sig.fingerprint
    print("ERROR: No valid signatures found!")
    for sig in sigs:
        print("Got:", sig)
    ok = input('Ignore and load anyway? (y/N) ').lower()
    if ok and 'yes'.startswith(ok):
        import __main__
        __main__.force_save = True
        return data, sign_unsigned, None
    sys.exit(1)
Ejemplo n.º 5
0
def handle(config, options, args):
	if not args:
		raise UsageError()

	for x in args:
		if not os.path.isfile(x):
			raise SafeException(_("File '%s' does not exist") % x)
		logger.info(_("Importing from file '%s'"), x)
		with open(x, 'rb') as signed_data:
			data, sigs = gpg.check_stream(signed_data)
			doc = minidom.parseString(data.read())
			uri = doc.documentElement.getAttribute('uri')
			if not uri:
				raise SafeException(_("Missing 'uri' attribute on root element in '%s'") % x)
			logger.info(_("Importing information about interface %s"), uri)
			signed_data.seek(0)

			pending = PendingFeed(uri, signed_data)

			def run():
				keys_downloaded = tasks.Task(pending.download_keys(config.fetcher), "download keys")
				yield keys_downloaded.finished
				tasks.check(keys_downloaded.finished)
				if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml):
					blocker = config.trust_mgr.confirm_keys(pending)
					if blocker:
						yield blocker
						tasks.check(blocker)
					if not config.iface_cache.update_feed_if_trusted(uri, pending.sigs, pending.new_xml):
						raise SafeException(_("No signing keys trusted; not importing"))

			task = tasks.Task(run(), "import feed")

			tasks.wait_for_blocker(task.finished)
Ejemplo n.º 6
0
	def testErrSig(self):
		stream = tempfile.TemporaryFile()
		stream.write(err_sig)
		stream.seek(0)
		data, sigs = gpg.check_stream(stream)
		self.assertEquals("Bad\n", data.read())
		assert len(sigs) == 1
		assert isinstance(sigs[0], gpg.ErrSig)
		assert sigs[0].need_key() == "8C6289C86DBDA68E"
		self.assertEquals("17", sigs[0].status[gpg.ErrSig.ALG])
		assert sigs[0].is_trusted() is False
		assert str(sigs[0]).startswith('ERROR')
Ejemplo n.º 7
0
	def testErrSig(self):
		with tempfile.TemporaryFile(mode = 'w+b') as stream:
			stream.write(err_sig)
			stream.seek(0)
			data, sigs = gpg.check_stream(stream)
			self.assertEqual(err_sig, data.read())
			assert len(sigs) == 1
			assert isinstance(sigs[0], gpg.ErrSig)
			assert sigs[0].need_key() == "7AB89A977DAAA397"
			self.assertEqual("1", sigs[0].status[gpg.ErrSig.ALG])
			assert sigs[0].is_trusted() is False
			assert str(sigs[0]).startswith('ERROR')
Ejemplo n.º 8
0
	def check_bad(self, sig):
		stream = tempfile.TemporaryFile()
		stream.write(sig)
		stream.seek(0)
		data, sigs = gpg.check_stream(stream)
		assert len(sigs) == 1
		assert isinstance(sigs[0], gpg.BadSig)
		self.assertEquals("AE07828059A53CC1",
				  sigs[0].status[gpg.BadSig.KEYID])
		assert sigs[0].is_trusted() is False
		assert sigs[0].need_key() is None
		assert str(sigs[0]).startswith('BAD')
		return data.read()
Ejemplo n.º 9
0
def check_signature(path):
	data = file(path).read()
	xml_comment = data.rfind('\n<!-- Base64 Signature')
	if xml_comment >= 0:
		data_stream, sigs = gpg.check_stream(file(path))
		sign_fn = sign_xml
		data = data[:xml_comment + 1]
		data_stream.close()
	elif data.startswith('-----BEGIN'):
		data_stream, sigs = gpg.check_stream(file(path))
		sign_fn = sign_xml		# Don't support saving as plain
		data = data_stream.read()
	else:
		return data, sign_unsigned, None
	for sig in sigs:
		if isinstance(sig, gpg.ValidSig):
			return data, sign_fn, sig.fingerprint
	error = "ERROR: No valid signatures found!\n"
	for sig in sigs:
		error += "\nGot: %s" % sig
	error += '\n\nTo edit it anyway, remove the signature using a text editor.'
	raise Exception(error)
Ejemplo n.º 10
0
	def check_bad(self, sig):
		with tempfile.TemporaryFile(mode = 'w+b') as stream:
			stream.write(sig)
			stream.seek(0)
			data, sigs = gpg.check_stream(stream)
			assert len(sigs) == 1
			assert isinstance(sigs[0], gpg.BadSig)
			self.assertEqual("AE07828059A53CC1",
					  sigs[0].status[gpg.BadSig.KEYID])
			assert sigs[0].is_trusted() is False
			assert sigs[0].need_key() is None
			assert str(sigs[0]).startswith('BAD')
			return data.read()
Ejemplo n.º 11
0
def handle(config, options, args):
    if not args:
        raise UsageError()

    h = config.handler

    for x in args:
        if not os.path.isfile(x):
            raise SafeException(_("File '%s' does not exist") % x)
        logging.info(_("Importing from file '%s'"), x)
        signed_data = file(x)
        data, sigs = gpg.check_stream(signed_data)
        doc = minidom.parseString(data.read())
        uri = doc.documentElement.getAttribute('uri')
        if not uri:
            raise SafeException(
                _("Missing 'uri' attribute on root element in '%s'") % x)
        logging.info(_("Importing information about interface %s"), uri)
        signed_data.seek(0)

        pending = PendingFeed(uri, signed_data)

        def run():
            keys_downloaded = tasks.Task(pending.download_keys(h),
                                         "download keys")
            yield keys_downloaded.finished
            tasks.check(keys_downloaded.finished)
            if not config.iface_cache.update_feed_if_trusted(
                    uri, pending.sigs, pending.new_xml):
                blocker = config.trust_mgr.confirm_keys(pending)
                if blocker:
                    yield blocker
                    tasks.check(blocker)
                if not config.iface_cache.update_feed_if_trusted(
                        uri, pending.sigs, pending.new_xml):
                    raise SafeException(
                        _("No signing keys trusted; not importing"))

        task = tasks.Task(run(), "import feed")

        errors = tasks.wait_for_blocker(task.finished)
        if errors:
            raise SafeException(
                _("Errors during download: ") + '\n'.join(errors))
Ejemplo n.º 12
0
	def check_good(self, sig):
		stream = tempfile.TemporaryFile()
		stream.write(sig)
		stream.seek(0)
		data, sigs = gpg.check_stream(stream)
		assert len(sigs) == 1
		assert isinstance(sigs[0], gpg.ValidSig)
		self.assertEquals("92429807C9853C0744A68B9AAE07828059A53CC1",
				  sigs[0].fingerprint)
		assert sigs[0].is_trusted() is True
		assert sigs[0].need_key() is None
		assert str(sigs[0]).startswith('Valid')
		for item in sigs[0].get_details():
			if item[0] == 'uid' and len(item) > 9:
				assert item[9] in ["Thomas Leonard <*****@*****.**>"], str(item)
				break
		else:
			self.fail("Missing name")
		return data.read()
Ejemplo n.º 13
0
	def recheck(self):
		"""Set new_xml and sigs by reading signed_data.
		You need to call this when previously-missing keys are added to the GPG keyring."""
		from . import gpg
		try:
			self.signed_data.seek(0)
			stream, sigs = gpg.check_stream(self.signed_data)
			assert sigs

			data = stream.read()
			if stream is not self.signed_data:
				stream.close()

			self.new_xml = data
			self.sigs = sigs
		except:
			self.signed_data.seek(0)
			logger.info(_("Failed to check GPG signature. Data received was:\n") + repr(self.signed_data.read()))
			raise
Ejemplo n.º 14
0
	def check_good(self, sig):
		with tempfile.TemporaryFile(mode = 'w+b') as stream:
			stream.write(sig)
			stream.seek(0)
			data, sigs = gpg.check_stream(stream)

			assert len(sigs) == 1
			assert isinstance(sigs[0], gpg.ValidSig)
			self.assertEqual("92429807C9853C0744A68B9AAE07828059A53CC1",
					  sigs[0].fingerprint)
			assert sigs[0].is_trusted() is True
			assert sigs[0].need_key() is None
			assert str(sigs[0]).startswith('Valid')
			for item in sigs[0].get_details():
				if item[0] == 'uid' and len(item) > 9:
					assert item[9] in ["Thomas Leonard <*****@*****.**>"], str(item)
					break
			else:
				self.fail("Missing name")
			return data.read()
Ejemplo n.º 15
0
	def get_cached_signatures(self, uri):
		"""Verify the cached interface using GPG.
		Only new-style XML-signed interfaces retain their signatures in the cache.
		@param uri: the feed to check
		@type uri: str
		@return: a list of signatures, or None
		@rtype: [L{gpg.Signature}] or None
		@since: 0.25"""
		import gpg
		if os.path.isabs(uri):
			old_iface = uri
		else:
			old_iface = basedir.load_first_cache(config_site, 'interfaces', escape(uri))
			if old_iface is None:
				return None
		try:
			return gpg.check_stream(file(old_iface))[1]
		except SafeException, ex:
			debug(_("No signatures (old-style interface): %s") % ex)
			return None
Ejemplo n.º 16
0
    def get_cached_signatures(self, uri):
        """Verify the cached interface using GPG.
		Only new-style XML-signed interfaces retain their signatures in the cache.
		@param uri: the feed to check
		@type uri: str
		@return: a list of signatures, or None
		@rtype: [L{gpg.Signature}] or None
		@since: 0.25"""
        import gpg
        if uri.startswith('/'):
            old_iface = uri
        else:
            old_iface = basedir.load_first_cache(config_site, 'interfaces',
                                                 escape(uri))
            if old_iface is None:
                return None
        try:
            return gpg.check_stream(file(old_iface))[1]
        except SafeException, ex:
            debug(_("No signatures (old-style interface): %s") % ex)
            return None
Ejemplo n.º 17
0
    def recheck(self):
        """Set new_xml and sigs by reading signed_data.
		You need to call this when previously-missing keys are added to the GPG keyring."""
        import gpg
        try:
            self.signed_data.seek(0)
            stream, sigs = gpg.check_stream(self.signed_data)
            assert sigs

            data = stream.read()
            if stream is not self.signed_data:
                stream.close()

            self.new_xml = data
            self.sigs = sigs
        except:
            self.signed_data.seek(0)
            info(
                _("Failed to check GPG signature. Data received was:\n") +
                repr(self.signed_data.read()))
            raise
Ejemplo n.º 18
0
def _import_feed(args):
	from zeroinstall.support import tasks
	from zeroinstall.injector import gpg, handler
	from zeroinstall.injector.iface_cache import PendingFeed
	from xml.dom import minidom
	handler = handler.Handler()

	for x in args:
		if not os.path.isfile(x):
			raise SafeException(_("File '%s' does not exist") % x)
		logging.info(_("Importing from file '%s'"), x)
		signed_data = file(x)
		data, sigs = gpg.check_stream(signed_data)
		doc = minidom.parseString(data.read())
		uri = doc.documentElement.getAttribute('uri')
		if not uri:
			raise SafeException(_("Missing 'uri' attribute on root element in '%s'") % x)
		iface = iface_cache.get_interface(uri)
		logging.info(_("Importing information about interface %s"), iface)
		signed_data.seek(0)

		pending = PendingFeed(uri, signed_data)

		def run():
			keys_downloaded = tasks.Task(pending.download_keys(handler), "download keys")
			yield keys_downloaded.finished
			tasks.check(keys_downloaded.finished)
			if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml):
				blocker = handler.confirm_trust_keys(iface, pending.sigs, pending.new_xml)
				if blocker:
					yield blocker
					tasks.check(blocker)
				if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml):
					raise SafeException(_("No signing keys trusted; not importing"))

		task = tasks.Task(run(), "import feed")

		errors = handler.wait_for_blocker(task.finished)
		if errors:
			raise SafeException(_("Errors during download: ") + '\n'.join(errors))
Ejemplo n.º 19
0
def export_feeds(export_dir, feeds, keys_used):
	"""Copy each feed (and icon) in feeds from the cache to export_dir.
	Add all signing key fingerprints to keys_used."""
	for feed in feeds:
		if feed.startswith('/'):
			info("Skipping local feed %s", feed)
			continue
		if feed.startswith('distribution:'):
			info("Skipping distribution feed %s", feed)
			continue
		print "Exporting feed", feed
		# Store feed
		cached = basedir.load_first_cache(namespaces.config_site,
						  'interfaces',
						  model.escape(feed))
		if cached:
			feed_dir = os.path.join(export_dir, get_feed_path(feed))
			feed_dst = os.path.join(feed_dir, 'latest.xml')
			if not os.path.isdir(feed_dir):
				os.makedirs(feed_dir)
			shutil.copyfile(cached, feed_dst)
			info("Exported feed %s", feed)

			icon_path = iface_cache.iface_cache.get_icon_path(iface_cache.iface_cache.get_interface(feed))
			if icon_path:
				icon_dst = os.path.join(feed_dir, 'icon.png')
				shutil.copyfile(icon_path, icon_dst)

			# Get the keys
			stream = file(cached)
			unused, sigs = gpg.check_stream(stream)
			stream.close()
			for x in sigs:
				if isinstance(x, gpg.ValidSig):
					keys_used.add(x.fingerprint)
				else:
					warn("Signature problem: %s" % x)
		else:
			warn("Feed not cached: %s", feed)
Ejemplo n.º 20
0
    def get_cached_signatures(self, uri):
        """Verify the cached interface using GPG.
		Only new-style XML-signed interfaces retain their signatures in the cache.
		@param uri: the feed to check
		@type uri: str
		@return: a list of signatures, or None
		@rtype: [L{gpg.Signature}] or None
		@since: 0.25"""
        from . import gpg
        if os.path.isabs(uri):
            old_iface = uri
        else:
            old_iface = basedir.load_first_cache(config_site, 'interfaces',
                                                 escape(uri))
            if old_iface is None:
                return None
        try:
            with open(old_iface, 'rb') as stream:
                return gpg.check_stream(stream)[1]
        except SafeException as ex:
            logger.info(_("No signatures (old-style interface): %s") % ex)
            return None
Ejemplo n.º 21
0
def process(config, xml_file, delete_on_success):
	# Step 1 : check everything looks sensible, reject if not

	with open(xml_file, 'rb') as stream:
		xml_text = stream.read()
		sig_index = xml_text.rfind('\n<!-- Base64 Signature')
		if sig_index != -1:
			stream.seek(0)
			stream, sigs = gpg.check_stream(stream)
		else:
			sig_index = len(xml_text)
			sigs = []
		root = qdom.parse(BytesIO(xml_text))

	master = get_feed_url(root, xml_file)
	import_master = 'uri' in root.attrs

	if not import_master:
		root.attrs['uri'] = master	# (hack so we can parse it here without setting local_path)

	# Check signatures are valid
	if config.CONTRIBUTOR_GPG_KEYS is not None:
		for sig in sigs:
			if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS:
				break
		else:
			raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format(
				path = xml_file,
				sigs = ', '.join([str(s) for s in sigs])))

	feed = model.ZeroInstallFeed(root)

	# Perform custom checks defined by the repository owner
	for impl in feed.implementations.values():
		problem = config.check_new_impl(impl)
		if problem:
			raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format(
				problem = problem, xml_file = xml_file, config = config.__file__))

	feeds_rel_path = paths.get_feeds_rel_path(config, master)
	feed_path = join("feeds", feeds_rel_path)
	feed_dir = dirname(feed_path)
	if not os.path.isdir(feed_dir):
		os.makedirs(feed_dir)

	scm.ensure_no_uncommitted_changes(feed_path)

	if import_master:
		if os.path.exists(feed_path):
			with open(feed_path, 'rb') as stream:
				existing = stream.read()
			if existing == xml_text[:sig_index]:
				print("Already imported {feed}; skipping".format(feed = feed_path))
				if delete_on_success:
					os.unlink(xml_file)
				return None
			else:
				raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n"
						    "To ADD new versions to this feed, remove the a 'uri' attribute from "
						    "the root element in {new}.\n\n"
						    "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n"
						    "To RE-UPLOAD the archives, do that manually and then edit archives.db."
						    .format(url = feed.url, new = xml_file, path = feed_path))

	# Calculate commit message
	if import_master:
		name = basename(xml_file)
		if name == 'feed.xml':
			name = basename(dirname(xml_file))
		action = 'Imported {file}'.format(file = name)
	else:
		versions = set(i.get_version() for i in feed.implementations.values())
		action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions))
	commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8'))

	# Calculate new XML
	new_file = not os.path.exists(feed_path)
	git_path = relpath(feed_path, 'feeds')

	if import_master:
		assert new_file
		new_xml = xml_text[:sig_index]
	elif new_file:
		new_xml = create_from_local(master, xml_file)
	else:
		# Merge into existing feed
		try:
			new_doc = merge.merge_files(master, feed_path, xml_file)
		except merge.DuplicateIDException as ex:
			# Did we already import this XML? Compare with the last Git log entry.
			msg, previous_commit_xml = get_last_commit(git_path)
			if previous_commit_xml == xml_text:
				print("Already merged this into {feed}; skipping".format(feed = feed_path))
				return msg
			raise ex

		new_xml = None	# (will regenerate from new_doc below)

	# Step 2 : upload archives to hosting

	processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed)

	# Step 3 : merge XML into feeds directory

	# Regenerate merged feed
	if new_xml is None:
		new_versions = frozenset(impl.get_version() for impl in feed.implementations.values())
		if len(new_versions) == 1 and getattr(config, 'TRACK_TESTING_IMPLS', True):
			ask_if_previous_still_testing(new_doc, list(new_versions)[0])
		new_xml = formatting.format_doc(new_doc)

	did_git_add = False

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = xml_file))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise

	# Delete XML and archives from incoming directory
	if delete_on_success:
		os.unlink(xml_file)
		for archive in processed_archives:
			os.unlink(archive.incoming_path)

	return commit_msg.split('\n', 1)[0]
Ejemplo n.º 22
0
def process(config, xml_file, delete_on_success):
	# Step 1 : check everything looks sensible, reject if not

	with open(xml_file, 'rb') as stream:
		xml_text = stream.read()
		sig_index = xml_text.rfind('\n<!-- Base64 Signature')
		if sig_index != -1:
			stream.seek(0)
			stream, sigs = gpg.check_stream(stream)
		else:
			sig_index = len(xml_text)
			sigs = []
		root = qdom.parse(BytesIO(xml_text))

	master = get_feed_url(root, xml_file)
	import_master = 'uri' in root.attrs

	if not import_master:
		root.attrs['uri'] = master	# (hack so we can parse it here without setting local_path)

	# Check signatures are valid
	if config.CONTRIBUTOR_GPG_KEYS is not None:
		for sig in sigs:
			if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS:
				break
		else:
			raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format(
				path = xml_file,
				sigs = ', '.join([str(s) for s in sigs])))

	feed = model.ZeroInstallFeed(root)

	# Perform custom checks defined by the repository owner
	for impl in feed.implementations.values():
		problem = config.check_new_impl(impl)
		if problem:
			raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format(
				problem = problem, xml_file = xml_file, config = config.__file__))

	feeds_rel_path = paths.get_feeds_rel_path(config, master)
	feed_path = join("feeds", feeds_rel_path)
	feed_dir = dirname(feed_path)
	if not os.path.isdir(feed_dir):
		os.makedirs(feed_dir)

	scm.ensure_no_uncommitted_changes(feed_path)

	if import_master:
		if os.path.exists(feed_path):
			with open(feed_path, 'rb') as stream:
				existing = stream.read()
			if existing == xml_text[:sig_index]:
				print("Already imported {feed}; skipping".format(feed = feed_path))
				if delete_on_success:
					os.unlink(xml_file)
				return None
			else:
				raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n"
						    "To ADD new versions to this feed, remove the a 'uri' attribute from "
						    "the root element in {new}.\n\n"
						    "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n"
						    "To RE-UPLOAD the archives, do that manually and then edit archives.db."
						    .format(url = feed.url, new = xml_file, path = feed_path))

	# Calculate commit message
	if import_master:
		name = basename(xml_file)
		if name == 'feed.xml':
			name = basename(dirname(xml_file))
		action = 'Imported {file}'.format(file = name)
	else:
		versions = set(i.get_version() for i in feed.implementations.values())
		action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions))
	commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8'))

	# Calculate new XML
	new_file = not os.path.exists(feed_path)
	git_path = relpath(feed_path, 'feeds')

	if import_master:
		assert new_file
		new_xml = xml_text[:sig_index]
	elif new_file:
		new_xml = create_from_local(master, xml_file)
	else:
		# Merge into existing feed
		try:
			new_doc = merge.merge_files(master, feed_path, xml_file)
		except merge.DuplicateIDException as ex:
			# Did we already import this XML? Compare with the last Git log entry.
			msg, previous_commit_xml = get_last_commit(git_path)
			if previous_commit_xml == xml_text:
				print("Already merged this into {feed}; skipping".format(feed = feed_path))
				return msg
			raise ex

		new_xml = None	# (will regenerate from new_doc below)

	# Step 2 : upload archives to hosting

	processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed)

	# Step 3 : merge XML into feeds directory

	# Prompt about existing testing versions
	if new_xml is None:
		new_versions = frozenset(impl.get_version() for impl in feed.implementations.values())
		if len(new_versions) == 1:
			ask_if_previous_still_testing(new_doc, list(new_versions)[0])
		new_xml = formatting.format_doc(new_doc)

	did_git_add = False

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = xml_file))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise

	# Delete XML and archives from incoming directory
	if delete_on_success:
		os.unlink(xml_file)
		for archive in processed_archives:
			os.unlink(archive.incoming_path)

	return commit_msg.split('\n', 1)[0]
Ejemplo n.º 23
0
	def do_install(self, archive_stream, progress_bar, archive_offset):
		# Step 1. Import GPG keys

		# Maybe GPG has never been run before. Let it initialse, or we'll get an error code
		# from the first import... (ignore return value here)
		subprocess.call([get_gpg(), '--check-trustdb', '-q'])

		key_dir = os.path.join(mydir, 'keys')
		for key in os.listdir(key_dir):
			check_call([get_gpg(), '--import', '-q', os.path.join(key_dir, key)])

		# Step 2. Import feeds and trust their signing keys
		for root, dirs, files in os.walk(os.path.join(mydir, 'feeds')):
			if 'latest.xml' in files:
				feed_path = os.path.join(root, 'latest.xml')
				icon_path = os.path.join(root, 'icon.png')

				# Get URI
				feed_stream = file(feed_path)
				doc = qdom.parse(feed_stream)
				uri = doc.getAttribute('uri')
				assert uri, "Missing 'uri' attribute on root element in '%s'" % feed_path
				domain = trust.domain_from_url(uri)

				feed_stream.seek(0)
				stream, sigs = gpg.check_stream(feed_stream)
				for s in sigs:
					if not trust.trust_db.is_trusted(s.fingerprint, domain):
						print "Adding key %s to trusted list for %s" % (s.fingerprint, domain)
						trust.trust_db.trust_key(s.fingerprint, domain)
				oldest_sig = min([s.get_timestamp() for s in sigs])
				try:
					config.iface_cache.update_feed_from_network(uri, stream.read(), oldest_sig)
				except iface_cache.ReplayAttack:
					# OK, the user has a newer copy already
					pass
				if feed_stream != stream:
					feed_stream.close()
				stream.close()

				if os.path.exists(icon_path):
					icons_cache = basedir.save_cache_path(namespaces.config_site, 'interface_icons')
					icon_file = os.path.join(icons_cache, model.escape(uri))
					if not os.path.exists(icon_file):
						shutil.copyfile(icon_path, icon_file)

		# Step 3. Solve to find out which implementations we actually need
		archive_stream.seek(archive_offset)

		extract_impls = {}	# Impls we need but which are compressed (ID -> Impl)
		tmp = tempfile.mkdtemp(prefix = '0export-')
		try:
			# Create a "fake store" with the implementation in the archive
			archive = tarfile.open(name=archive_stream.name, mode='r|', fileobj=archive_stream)
			fake_store = FakeStore()
			for tarmember in archive:
				if tarmember.name.startswith('implementations'):
					impl = os.path.basename(tarmember.name).split('.')[0]
					fake_store.impls.add(impl)

			bootstrap_store = zerostore.Store(os.path.join(mydir, 'implementations'))
			stores = config.stores

			toplevel_uris = [uri.strip() for uri in file(os.path.join(mydir, 'toplevel_uris'))]
			ZEROINSTALL_URI = "@ZEROINSTALL_URI@"
			for uri in [ZEROINSTALL_URI] + toplevel_uris:
				# This is so the solver treats versions in the setup archive as 'cached',
				# meaning that it will prefer using them to doing a download
				stores.stores.append(bootstrap_store)
				stores.stores.append(fake_store)

				# Shouldn't need to download anything, but we might not have all feeds
				r = requirements.Requirements(uri)
				d = driver.Driver(config = config, requirements = r)
				config.network_use = model.network_minimal
				download_feeds = d.solve_with_downloads()
				h.wait_for_blocker(download_feeds)
				assert d.solver.ready, d.solver.get_failure_reason()

				# Add anything chosen from the setup store to the main store
				stores.stores.remove(fake_store)
				stores.stores.remove(bootstrap_store)
				for iface, impl in d.get_uncached_implementations():
					print >>sys.stderr, "Need to import", impl
					if impl.id in fake_store.impls:
						# Delay extraction
						extract_impls[impl.id] = impl
					else:
						impl_src = os.path.join(mydir, 'implementations', impl.id)

						if os.path.isdir(impl_src):
							stores.add_dir_to_cache(impl.id, impl_src)
						else:
							print >>sys.stderr, "Required impl %s (for %s) not present" % (impl, iface)

				# Remember where we copied 0launch to, because we'll need it after
				# the temporary directory is deleted.
				if uri == ZEROINSTALL_URI:
					global copied_0launch_in_cache
					impl = d.solver.selections.selections[uri]
					if not impl.id.startswith('package:'):
						copied_0launch_in_cache = impl.get_path(stores = config.stores)
					# (else we selected the distribution version of Zero Install)
		finally:
			shutil.rmtree(tmp)

		# Count total number of bytes to extract
		extract_total = 0
		for impl in extract_impls.values():
			impl_info = archive.getmember('implementations/' + impl.id + '.tar.bz2')
			extract_total += impl_info.size

		self.sent = 0

		# Actually extract+import implementations in archive
		archive_stream.seek(archive_offset)
		archive = tarfile.open(name=archive_stream.name, mode='r|',
                fileobj=archive_stream)

		for tarmember in archive:
			if not tarmember.name.startswith('implementations'):
				continue
			impl_id = tarmember.name.split('/')[1].split('.')[0]
			if impl_id not in extract_impls:
				print "Skip", impl_id
				continue
			print "Extracting", impl_id
			tmp = tempfile.mkdtemp(prefix = '0export-')
			try:
				impl_stream = archive.extractfile(tarmember)
				self.child = subprocess.Popen('bunzip2|tar xf -', shell = True, stdin = subprocess.PIPE, cwd = tmp)
				mainloop = gobject.MainLoop(gobject.main_context_default())

				def pipe_ready(src, cond):
					data = impl_stream.read(4096)
					if not data:
						mainloop.quit()
						self.child.stdin.close()
						return False
					self.sent += len(data)
					if progress_bar:
						progress_bar.set_fraction(float(self.sent) / extract_total)
					self.child.stdin.write(data)
					return True
				gobject.io_add_watch(self.child.stdin, gobject.IO_OUT | gobject.IO_HUP, pipe_ready, priority = gobject.PRIORITY_LOW)

				mainloop.run()

				self.child.wait()
				if self.child.returncode:
					raise Exception("Failed to unpack archive (code %d)" % self.child.returncode)

				stores.add_dir_to_cache(impl_id, tmp)

			finally:
				shutil.rmtree(tmp)

		return toplevel_uris
Ejemplo n.º 24
0
	def testSimple(self):
		# (do a slow sub-process call here just to check that the top-level
		# wrapper works)
		subprocess.check_call(['0repo', 'create', 'my-repo', 'Test Key for 0repo'])
		os.chdir('my-repo')

		update_config('raise Exception("No upload method specified: edit upload_archives() in 0repo-config.py")',
				'return test0repo.upload(archives)')

		# Regenerate
		out = run_repo([])
		assert "Exported public key as 'public/keys/6D66BDF4F467A18D.gpg'" in out, out
		assert os.path.exists(join('public', 'catalog.xml'))
		assert os.path.exists(join('public', 'resources/catalog.xsl'))
		assert os.path.exists(join('public', 'resources/catalog.css'))
		assert os.path.exists(join('public', 'keys', '6D66BDF4F467A18D.gpg'))

		# Create a new feed (external archive)
		shutil.copyfile(join(mydir, 'test-1.xml'), join('incoming', 'test-1.xml'))
		responses['/downloads/test-1.tar.bz2'] = FakeResponse(419419)
		out = run_repo([])
		assert 'Processing test-1.xml' in out, repr(out)

		assert os.path.exists(join('feeds', 'tests', 'test.xml'))

		assert os.path.exists(join('public', 'tests', 'test.xml'))
		with open(join('public', 'tests', '6D66BDF4F467A18D.gpg')) as stream:
			data = stream.read()
			assert 'BEGIN PGP PUBLIC KEY BLOCK' in data, data

		with open(join('public', 'catalog.xml')) as stream:
			catalog = qdom.parse(stream)
		feeds = catalog.childNodes
		self.assertEqual(1, len(feeds))
		feed, = feeds
		self.assertEqual(XMLNS_IFACE, feed.uri)
		self.assertEqual("http://example.com/myrepo/tests/test.xml", feed.attrs['uri'])

		# Check invalid archives are rejected
		with open(join(mydir, 'test-2.xml'), 'rt') as stream:
			test2_orig = stream.read()
		ex = test_invalid(test2_orig.replace('href="test-2.tar.bz2"', 'href=""'))
		assert "Missing href attribute on <archive>" in ex, ex

		ex = test_invalid(test2_orig.replace('href="test-2.tar.bz2"', 'href=".tar.bz2"'))
		assert ex == "Illegal archive name '.tar.bz2'", ex

		ex = test_invalid(test2_orig.replace('href="test-2.tar.bz2"', 'href="foo bar"'))
		assert ex == "Illegal archive name 'foo bar'", ex

		ex = test_invalid(test2_orig.replace('href="test-2.tar.bz2"', 'href="foo&#xa;bar"'))
		assert ex == "Illegal archive name 'foo\nbar'", ex

		ex = test_invalid(test2_orig)
		assert "test-2.tar.bz2' not found" in ex, ex

		ex = test_invalid(test2_orig.replace('href="test-2.tar.bz2"', 'href="http://example.com/INVALID"'))
		assert "HTTP error: got status code 404" in ex, ex

		shutil.copyfile(join(mydir, 'test-2.tar.bz2'), 'test-2.tar.bz2')

		ex = test_invalid(test2_orig.replace("sha256new='RPUJPV", "sha256new='RPV"))
		assert 'Incorrect manifest -- archive is corrupted' in ex, ex

		# Now add some local archives
		shutil.copyfile(join(mydir, 'test-2.tar.bz2'), join('incoming', 'test-2.tar.bz2'))
		shutil.copyfile(join(mydir, 'test-2.xml'), join('incoming', 'test-2.xml'))
		out = run_repo([], stdin = 'n\n')		# (don't mark 0.1 as stable)
		assert 'Updated public/tests/test.xml' in out, out

		self.assertEqual([], os.listdir('incoming'))
		assert os.path.exists(join('archive-backups', 'test-2.tar.bz2'))

		archive_db = archives.ArchiveDB('archives.db')
		stored_archive = archive_db.lookup('test-2.tar.bz2')
		self.assertEqual('852dda97d7c67e055738de87c27df85c4b6e5707', stored_archive.sha1)
		self.assertEqual('http://example.com/myrepo/archives/test-2.tar.bz2', stored_archive.url)

		with open(join('public', 'tests', 'test.xml'), 'rb') as stream:
			stream, sigs = gpg.check_stream(stream)
			assert isinstance(sigs[0], gpg.ValidSig), sigs[0]

			stream.seek(0)

			feed = model.ZeroInstallFeed(qdom.parse(stream))
		impl2 = feed.implementations['version2']
		self.assertEqual(stored_archive.url, impl2.download_sources[0].url)

		# Check invalid feeds
		with open(join(mydir, 'test-1.xml'), 'rt') as stream:
			orig_data = stream.read()

		ex = test_invalid(orig_data.replace('license', 'LICENSE'))
		assert "Missing 'license' attribute in" in ex, ex

		ex = test_invalid(orig_data.replace('released', 'RELEASED'))
		assert "Missing 'released' attribute in" in ex, ex

		ex = test_invalid(orig_data.replace('version="1"', 'version="1-pre"'))
		assert "Version number must end in a digit (got 1-pre)" in ex, ex

		# Import twice with identical XML
		out = run_repo(['add', join(mydir, 'test-2.xml')])
		assert 'Already merged this into feeds/tests/test.xml; skipping' in out, out

		# Import twice with non-identical XML
		ex = test_invalid(orig_data.replace('only', 'ONLY'))
		assert 'Duplicate ID sha1new=4f860b217bb94723ad6af9062d25dc7faee6a7ae' in ex, ex

		# Re-add the same archive
		with open('test.xml', 'wt') as stream:
			stream.write(test2_orig.replace('version2', 'version3').replace('version="2"', 'version="3"'))
		out = run_repo(['add', 'test.xml'], stdin='y\n')	# (mark 0.2 as stable)
		assert 'Updated public/tests/test.xml' in out, out
		assert "The previous release, version 2, is still marked as 'testing'. Set to stable?" in out, out

		# Re-add a different archive
		with open('test-2.tar.bz2', 'ab') as stream:
			stream.write(b'!')
		ex = test_invalid(test2_orig.replace('version2', 'version4'))
		assert "A different archive with basename 'test-2.tar.bz2' is already in the repository" in ex, ex

		# Test a recipe
		out = run_repo(['add', join(mydir, 'test-4.xml')], stdin = 'n\n')
		assert "Updated public/tests/test.xml" in out, out

		# Import pre-existing feed
		update_config('CONTRIBUTOR_GPG_KEYS = None', 'CONTRIBUTOR_GPG_KEYS = set()')

		try:
			run_repo(['add', join(mydir, 'imported.xml')])
			assert 0
		except SafeException as ex:
			assert 'No trusted signatures on feed' in str(ex)

		update_config('CONTRIBUTOR_GPG_KEYS = set()', 'CONTRIBUTOR_GPG_KEYS = {"3F52282D484EB9401EE3A66A6D66BDF4F467A18D"}')

		responses['/imported-1.tar.bz2'] = FakeResponse(200)
		out = run_repo(['add', join(mydir, 'imported.xml')])
		assert os.path.exists(join('public', 'tests', 'imported.xml'))

		# Check stability levels
		with open(join('public', 'tests', 'test.xml'), 'rb') as stream:
			stream, sigs = gpg.check_stream(stream)
			assert isinstance(sigs[0], gpg.ValidSig), sigs[0]
			stream.seek(0)
			feed = model.ZeroInstallFeed(qdom.parse(stream))
		self.assertEqual(model.testing, feed.implementations["sha1new=4f860b217bb94723ad6af9062d25dc7faee6a7ae"].get_stability())
		self.assertEqual(model.stable, feed.implementations['version2'].get_stability())
		self.assertEqual(model.testing, feed.implementations['version3'].get_stability())
		self.assertEqual(model.testing, feed.implementations['version4'].get_stability())