예제 #1
0
파일: apps.py 프로젝트: dabrahams/0install
	def set_selections(self, sels, set_last_checked = True):
		"""Store a new set of selections. We include today's date in the filename
		so that we keep a history of previous selections (max one per day), in case
		we want to to roll back later."""
		date = time.strftime('%Y-%m-%d')
		sels_file = os.path.join(self.path, 'selections-{date}.xml'.format(date = date))
		dom = sels.toDOM()

		tmp = tempfile.NamedTemporaryFile(prefix = 'selections.xml-', dir = self.path, delete = False, mode = 'wt')
		try:
			dom.writexml(tmp, addindent="  ", newl="\n", encoding = 'utf-8')
		except:
			tmp.close()
			os.unlink(tmp.name)
			raise
		tmp.close()
		portable_rename(tmp.name, sels_file)

		sels_latest = os.path.join(self.path, 'selections.xml')
		if os.path.exists(sels_latest):
			os.unlink(sels_latest)
		os.symlink(os.path.basename(sels_file), sels_latest)

		if set_last_checked:
			self.set_last_checked()
예제 #2
0
def write_to_git(feed_path, new_xml, commit_msg, config, new_file = False):
	did_git_add = False
	git_path = relpath(feed_path, 'feeds')

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY if getattr(config, 'SIGN_COMMITS', True) else None)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = git_path))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise
예제 #3
0
    def save(self):
        d = basedir.save_config_path(config_site, config_prog)
        db_file = os.path.join(d, 'trustdb.xml')
        if self._dry_run:
            print(
                _("[dry-run] would update trust database {file}").format(
                    file=db_file))
            return
        from xml.dom import minidom
        import tempfile

        doc = minidom.Document()
        root = doc.createElementNS(XMLNS_TRUST, 'trusted-keys')
        root.setAttribute('xmlns', XMLNS_TRUST)
        doc.appendChild(root)

        for fingerprint in self.keys:
            keyelem = doc.createElementNS(XMLNS_TRUST, 'key')
            root.appendChild(keyelem)
            keyelem.setAttribute('fingerprint', fingerprint)
            for domain in self.keys[fingerprint]:
                domainelem = doc.createElementNS(XMLNS_TRUST, 'domain')
                domainelem.setAttribute('value', domain)
                keyelem.appendChild(domainelem)

        with tempfile.NamedTemporaryFile(dir=d,
                                         prefix='trust-',
                                         delete=False,
                                         mode='wt') as tmp:
            doc.writexml(tmp,
                         indent="",
                         addindent="  ",
                         newl="\n",
                         encoding='utf-8')
        support.portable_rename(tmp.name, db_file)
예제 #4
0
    def _import_new_feed(self, feed_url, new_xml, modified_time):
        """Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
        assert modified_time
        assert isinstance(new_xml, bytes), repr(new_xml)

        upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
        cached = os.path.join(upstream_dir, escape(feed_url))

        old_modified = None
        if os.path.exists(cached):
            with open(cached, 'rb') as stream:
                old_xml = stream.read()
            if old_xml == new_xml:
                logger.debug(_("No change"))
                # Update in-memory copy, in case someone else updated the disk copy
                self.get_feed(feed_url, force=True)
                return
            old_modified = int(os.stat(cached).st_mtime)

        # Do we need to write this temporary file now?
        try:
            with open(cached + '.new', 'wb') as stream:
                stream.write(new_xml)
            os.utime(cached + '.new', (modified_time, modified_time))
            new_mtime = reader.check_readable(feed_url, cached + '.new')
            assert new_mtime == modified_time

            old_modified = self._get_signature_date(feed_url) or old_modified

            if old_modified:
                if new_mtime < old_modified:
                    raise ReplayAttack(
                        _("New feed's modification time is "
                          "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                          "Refusing update.") % {
                              'iface': feed_url,
                              'old_time': _pretty_time(old_modified),
                              'new_time': _pretty_time(new_mtime)
                          })
                if new_mtime == old_modified:
                    # You used to have to update the modification time manually.
                    # Now it comes from the signature, this check isn't useful
                    # and often causes problems when the stored format changes
                    # (e.g., when we stopped writing last-modified attributes)
                    pass
                    #raise SafeException("Interface has changed, but modification time "
                    #		    "hasn't! Refusing update.")
        except:
            os.unlink(cached + '.new')
            raise

        portable_rename(cached + '.new', cached)
        logger.debug(_("Saved as %s") % cached)

        self.get_feed(feed_url, force=True)
예제 #5
0
파일: trust.py 프로젝트: dsqmoore/0install
	def save(self):
		d = basedir.save_config_path(config_site, config_prog)
		db_file = os.path.join(d, 'trustdb.xml')
		if self._dry_run:
			print(_("[dry-run] would update trust database {file}").format(file = db_file))
			return
		from xml.dom import minidom
		import tempfile

		doc = minidom.Document()
		root = doc.createElementNS(XMLNS_TRUST, 'trusted-keys')
		root.setAttribute('xmlns', XMLNS_TRUST)
		doc.appendChild(root)

		for fingerprint in self.keys:
			keyelem = doc.createElementNS(XMLNS_TRUST, 'key')
			root.appendChild(keyelem)
			keyelem.setAttribute('fingerprint', fingerprint)
			for domain in self.keys[fingerprint]:
				domainelem = doc.createElementNS(XMLNS_TRUST, 'domain')
				domainelem.setAttribute('value', domain)
				keyelem.appendChild(domainelem)

		with tempfile.NamedTemporaryFile(dir = d, prefix = 'trust-', delete = False, mode = 'wt') as tmp:
			doc.writexml(tmp, indent = "", addindent = "  ", newl = "\n", encoding = 'utf-8')
		support.portable_rename(tmp.name, db_file)
예제 #6
0
파일: apps.py 프로젝트: timdiels/0install
    def set_selections(self, sels):
        """Store a new set of selections. We include today's date in the filename
		so that we keep a history of previous selections (max one per day), in case
		we want to to roll back later."""
        date = time.strftime('%Y-%m-%d')
        sels_file = os.path.join(self.path,
                                 'selections-{date}.xml'.format(date=date))
        dom = sels.toDOM()

        tmp = tempfile.NamedTemporaryFile(prefix='selections.xml-',
                                          dir=self.path,
                                          delete=False,
                                          mode='wt')
        try:
            dom.writexml(tmp, addindent="  ", newl="\n", encoding='utf-8')
        except:
            tmp.close()
            os.unlink(tmp.name)
            raise
        tmp.close()
        portable_rename(tmp.name, sels_file)

        sels_latest = os.path.join(self.path, 'selections.xml')
        if os.path.exists(sels_latest):
            os.unlink(sels_latest)
        os.symlink(os.path.basename(sels_file), sels_latest)

        self.set_last_checked()
예제 #7
0
파일: register.py 프로젝트: 0install/0repo
def handle(args):
	cmd.find_config()
	config = cmd.load_config()

	path = join(basedir.save_config_path('0install.net', '0repo'), 'repositories.json')
	if os.path.exists(path):
		with open(path, 'rb') as stream:
			db = json.load(stream)
	else:
		db = {}
	
	existing = db.get(config.REPOSITORY_BASE_URL, None)

	entry = {'type': 'local', 'path': os.getcwd()}

	if existing and existing == entry:
		print("Already registered in {path} (no changes made):\n{base}: {json}".format(
			path = path,
			base = config.REPOSITORY_BASE_URL,
			json = json.dumps(entry)))
		return

	db[config.REPOSITORY_BASE_URL] = entry

	with open(path + '.new', 'wb') as stream:
		json.dump(db, stream)
	support.portable_rename(path + '.new', path)
	
	if existing:
		print("Updated entry in {path} to:".format(path = path))
	else:
		print("Created new entry in {path}:".format(path = path))

	print("{base}: {json}".format(base = config.REPOSITORY_BASE_URL, json = json.dumps(entry)))
예제 #8
0
def build_slave(src_feed, archive_file, archive_dir_public_url, target_feed):
    try:
        COMPILE = [os.environ['0COMPILE']]
    except KeyError:
        # (build slave has an old 0release)
        COMPILE = [
            '0launch', '--not-before=1.2',
            'http://0install.net/2006/interfaces/0compile.xml'
        ]

    feed = support.load_feed(src_feed)

    src_feed = os.path.abspath(src_feed)
    archive_file = os.path.abspath(archive_file)
    target_feed = os.path.abspath(target_feed)

    impl, = feed.implementations.values()

    tmpdir = tempfile.mkdtemp(prefix='0release-')
    try:
        os.chdir(tmpdir)
        depdir = os.path.join(tmpdir, 'dependencies')
        os.mkdir(depdir)

        support.unpack_tarball(archive_file)
        portable_rename(impl.download_sources[0].extract,
                        os.path.join(depdir, impl.id))

        config = ConfigParser.RawConfigParser()
        config.add_section('compile')
        config.set('compile', 'download-base-url', archive_dir_public_url)
        config.set('compile', 'version-modifier', '')
        config.set('compile', 'interface', src_feed)
        config.set('compile', 'selections', '')
        config.set('compile', 'metadir', '0install')
        stream = open(os.path.join(tmpdir, '0compile.properties'), 'w')
        try:
            config.write(stream)
        finally:
            stream.close()

        support.check_call(COMPILE + ['build'], cwd=tmpdir)
        support.check_call(COMPILE + ['publish', '--target-feed', target_feed],
                           cwd=tmpdir)

        # TODO: run unit-tests

        feed = support.load_feed(target_feed)
        impl = support.get_singleton_impl(feed)
        archive_file = support.get_archive_basename(impl)

        shutil.move(archive_file,
                    os.path.join(os.path.dirname(target_feed), archive_file))
    except:
        print "\nLeaving temporary directory %s for inspection...\n" % tmpdir
        raise
    else:
        shutil.rmtree(tmpdir)
예제 #9
0
파일: archives.py 프로젝트: 0install/0repo
	def save_all(self):
		with open(self.path + '.new', 'wt') as stream:
			stream.write("# Records the absolute URL of all known archives.\n"
				     "# To relocate archives, edit this file to contain the new addresses and run '0repo'.\n"
				     "# Each line is 'basename SHA1 URL'\n")

			for basename, e in sorted(self.entries.items()):
				stream.write('%s %s %s\n' % (basename, e.sha1, e.url))
		support.portable_rename(self.path + '.new', self.path)
예제 #10
0
파일: archives.py 프로젝트: 0install/0repo
	def save_all(self):
		with open(self.path + '.new', 'wt') as stream:
			stream.write("# Records the absolute URL of all known archives.\n"
				     "# To relocate archives, edit this file to contain the new addresses and run '0repo'.\n"
				     "# Each line is 'basename SHA1 URL'\n")

			for basename, e in sorted(self.entries.items()):
				stream.write('%s %s %s\n' % (basename, e.sha1, e.url))
		support.portable_rename(self.path + '.new', self.path)
예제 #11
0
파일: build.py 프로젝트: 0install/0repo
def build_public_feeds(config):
	feeds = []
	for dirpath, dirnames, filenames in os.walk('feeds'):
		for f in filenames:
			if f.endswith('.xml') and not f.startswith('.'):
				source_path = join(dirpath, f)
				public_rel_path = paths.get_public_rel_path(config, relpath(source_path, 'feeds'))
				target_path = join("public", public_rel_path)
				new_doc = generate_public_xml(config, source_path)
				changed = True
				if os.path.exists(target_path):
					with open(target_path, 'rb') as stream:
						old_doc = minidom.parse(stream)
					if xmltools.nodes_equal(old_doc.documentElement, new_doc.documentElement):
						#print("%s unchanged" % source_path)
						changed = False
				feeds.append(PublicFeed(abspath(source_path), public_rel_path, new_doc, changed))

	if config.GPG_SIGNING_KEY:
		key_path = export_key(join('public', 'keys'), config.GPG_SIGNING_KEY)
		other_files = [relpath(key_path, 'public')]
	else:
		other_files = []

	for public_feed in feeds:
		target_path = join('public', public_feed.public_rel_path)

		target_dir = dirname(target_path)
		if not os.path.isdir(target_dir):
			os.makedirs(target_dir)

		if config.GPG_SIGNING_KEY and config.GPG_PUBLIC_KEY_DIRECTORY:
			key_symlink_rel_path = join(dirname(public_feed.public_rel_path), config.GPG_PUBLIC_KEY_DIRECTORY, basename(key_path))
			other_files.append(key_symlink_rel_path)
			key_symlink_path = join('public', key_symlink_rel_path)
			if not os.path.exists(key_symlink_path):
				if os.name == 'nt':
					import shutil
					shutil.copyfile(key_path, key_symlink_path)
				else:
					os.symlink(relpath(key_path, dirname(key_symlink_path)), key_symlink_path)
			os.stat(key_symlink_path)

		if not public_feed.changed: continue

		path_to_resources = relpath(join('public', 'resources'), dirname(target_path))
		new_xml = (feed_header % path_to_resources).encode('utf-8') + public_feed.doc.documentElement.toxml('utf-8') + '\n'

		signed_xml = sign_xml(config, new_xml)

		with open(target_path + '.new', 'wb') as stream:
			stream.write(signed_xml)
		support.portable_rename(target_path + '.new', target_path)
		print("Updated", target_path)

	return feeds, other_files
예제 #12
0
	def _import_new_feed(self, feed_url, new_xml, modified_time):
		"""Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@param new_xml: the data to write
		@param modified_time: when new_xml was modified
		@raises ReplayAttack: if the new mtime is older than the current one
		"""
		assert modified_time
		assert isinstance(new_xml, bytes), repr(new_xml)

		upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
		cached = os.path.join(upstream_dir, escape(feed_url))

		old_modified = None
		if os.path.exists(cached):
			with open(cached, 'rb') as stream:
				old_xml = stream.read()
			if old_xml == new_xml:
				logger.debug(_("No change"))
				# Update in-memory copy, in case someone else updated the disk copy
				self.get_feed(feed_url, force = True)
				return
			old_modified = int(os.stat(cached).st_mtime)

		# Do we need to write this temporary file now?
		try:
			with open(cached + '.new', 'wb') as stream:
				stream.write(new_xml)
			os.utime(cached + '.new', (modified_time, modified_time))
			new_mtime = reader.check_readable(feed_url, cached + '.new')
			assert new_mtime == modified_time

			old_modified = self._get_signature_date(feed_url) or old_modified

			if old_modified:
				if new_mtime < old_modified:
					raise ReplayAttack(_("New feed's modification time is "
						"before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
						"Refusing update.")
						% {'iface': feed_url, 'old_time': _pretty_time(old_modified), 'new_time': _pretty_time(new_mtime)})
				if new_mtime == old_modified:
					# You used to have to update the modification time manually.
					# Now it comes from the signature, this check isn't useful
					# and often causes problems when the stored format changes
					# (e.g., when we stopped writing last-modified attributes)
					pass
					#raise SafeException("Interface has changed, but modification time "
					#		    "hasn't! Refusing update.")
		except:
			os.unlink(cached + '.new')
			raise

		portable_rename(cached + '.new', cached)
		logger.debug(_("Saved as %s") % cached)

		self.get_feed(feed_url, force = True)
예제 #13
0
파일: build.py 프로젝트: benjamin-hg/0repo
def build_public_feeds(config):
	feeds = []
	for dirpath, dirnames, filenames in os.walk('feeds'):
		for f in filenames:
			if f.endswith('.xml') and not f.startswith('.'):
				source_path = join(dirpath, f)
				public_rel_path = paths.get_public_rel_path(config, relpath(source_path, 'feeds'))
				target_path = join("public", public_rel_path)
				new_doc = generate_public_xml(config, source_path)
				changed = True
				if os.path.exists(target_path):
					with open(target_path, 'rb') as stream:
						old_doc = minidom.parse(stream)
					if xmltools.nodes_equal(old_doc.documentElement, new_doc.documentElement):
						#print("%s unchanged" % source_path)
						changed = False
				feeds.append(PublicFeed(abspath(source_path), public_rel_path, new_doc, changed))

	key_path = export_key(join('public', 'keys'), config.GPG_SIGNING_KEY)

	other_files = [relpath(key_path, 'public')]

	for public_feed in feeds:
		target_path = join('public', public_feed.public_rel_path)

		target_dir = dirname(target_path)
		if not os.path.isdir(target_dir):
			os.makedirs(target_dir)

		if config.GPG_PUBLIC_KEY_DIRECTORY:
			key_symlink_rel_path = join(dirname(public_feed.public_rel_path), config.GPG_PUBLIC_KEY_DIRECTORY, basename(key_path))
			other_files.append(key_symlink_rel_path)
			key_symlink_path = join('public', key_symlink_rel_path)
			if not os.path.exists(key_symlink_path):
				if os.name == 'nt':
					import shutil
					shutil.copyfile(key_path, key_symlink_path)
				else:
					os.symlink(relpath(key_path, dirname(key_symlink_path)), key_symlink_path)
			os.stat(key_symlink_path)

		if not public_feed.changed: continue

		path_to_resources = relpath(join('public', 'resources'), dirname(target_path))
		new_xml = (feed_header % path_to_resources).encode('utf-8') + public_feed.doc.documentElement.toxml('utf-8') + '\n'

		signed_xml = sign_xml(config, new_xml)

		with open(target_path + '.new', 'wb') as stream:
			stream.write(signed_xml)
		support.portable_rename(target_path + '.new', target_path)
		print("Updated", target_path)

	return feeds, other_files
예제 #14
0
def backup_if_exists(name):
    if not os.path.exists(name):
        return
    backup = name + '~'
    if os.path.exists(backup):
        print "(deleting old backup %s)" % backup
        if os.path.isdir(backup):
            ro_rmtree(backup)
        else:
            os.unlink(backup)
    portable_rename(name, backup)
    print "(renamed old %s as %s; will delete on next run)" % (name, backup)
예제 #15
0
파일: writer.py 프로젝트: timdiels/0install
def _atomic_save(doc, parent, uri):
    import tempfile
    tmp_fd, tmp_name = tempfile.mkstemp(dir=parent)
    try:
        tmp_file = os.fdopen(tmp_fd, 'w')
        doc.writexml(tmp_file, addindent=" ", newl='\n')
        tmp_file.close()
        path = os.path.join(parent, model._pretty_escape(uri))
        support.portable_rename(tmp_name, path)
    except:
        os.unlink(tmp_name)
        raise
예제 #16
0
	def save(self):
		tmp_name = release_status_file + '.new'
		tmp = file(tmp_name, 'w')
		try:
			lines = ["%s=%s\n" % (name, getattr(self, name)) for name in self.__slots__ if getattr(self, name)]
			tmp.write(''.join(lines))
			tmp.close()
			portable_rename(tmp_name, release_status_file)
			info("Wrote status to %s", release_status_file)
		except:
			os.unlink(tmp_name)
			raise
예제 #17
0
def backup_if_exists(name):
	if not os.path.exists(name):
		return
	backup = name + '~'
	if os.path.exists(backup):
		print "(deleting old backup %s)" % backup
		if os.path.isdir(backup):
			ro_rmtree(backup)
		else:
			os.unlink(backup)
	portable_rename(name, backup)
	print "(renamed old %s as %s; will delete on next run)" % (name, backup)
예제 #18
0
def _atomic_save(doc, parent, uri):
	import tempfile
	tmp_fd, tmp_name = tempfile.mkstemp(dir = parent)
	try:
		tmp_file = os.fdopen(tmp_fd, 'w')
		doc.writexml(tmp_file, addindent = " ", newl = '\n')
		tmp_file.close()
		path = os.path.join(parent, model._pretty_escape(uri))
		support.portable_rename(tmp_name, path)
	except:
		os.unlink(tmp_name)
		raise
예제 #19
0
def sign_xml(path, data, key):
    tmp = write_tmp(path, data)
    sigtmp = tmp + '.sig'
    try:
        run_gpg(key, '--detach-sign', '--output', sigtmp, tmp)
    finally:
        os.unlink(tmp)
    with open(sigtmp, 'rb') as stream:
        encoded = base64.encodestring(stream.read())
    os.unlink(sigtmp)
    sig = b"<!-- Base64 Signature\n" + encoded + b"\n-->\n"
    support.portable_rename(write_tmp(path, data + sig), path)
예제 #20
0
def build_slave(src_feed, archive_file, archive_dir_public_url, target_feed):
	try:
		COMPILE = [os.environ['0COMPILE']]
	except KeyError:
		# (build slave has an old 0release)
		COMPILE = ['0launch', '--not-before=0.30', 'http://0install.net/2006/interfaces/0compile.xml']

	feed = support.load_feed(src_feed)

	src_feed = os.path.abspath(src_feed)
	archive_file = os.path.abspath(archive_file)
	target_feed = os.path.abspath(target_feed)

	impl, = feed.implementations.values()

	tmpdir = tempfile.mkdtemp(prefix = '0release-')
	try:
		os.chdir(tmpdir)
		depdir = os.path.join(tmpdir, 'dependencies')
		os.mkdir(depdir)

		support.unpack_tarball(archive_file)
		portable_rename(impl.download_sources[0].extract, os.path.join(depdir, impl.id))

		config = ConfigParser.RawConfigParser()
		config.add_section('compile')
		config.set('compile', 'download-base-url', archive_dir_public_url)
		config.set('compile', 'version-modifier', '')
		config.set('compile', 'interface', src_feed)
		config.set('compile', 'selections', '')
		config.set('compile', 'metadir', '0install')
		stream = open(os.path.join(tmpdir, '0compile.properties'), 'w')
		try:
			config.write(stream)
		finally:
			stream.close()

		support.check_call(COMPILE + ['build'], cwd = tmpdir)
		support.check_call(COMPILE + ['publish', '--target-feed', target_feed], cwd = tmpdir)

		# TODO: run unit-tests

		feed = support.load_feed(target_feed)
		impl = support.get_singleton_impl(feed)
		archive_file = support.get_archive_basename(impl)

		shutil.move(archive_file, os.path.join(os.path.dirname(target_feed), archive_file))
	except:
		print "\nLeaving temporary directory %s for inspection...\n" % tmpdir
		raise
	else:
		shutil.rmtree(tmpdir)
예제 #21
0
파일: apps.py 프로젝트: dabrahams/0install
	def set_requirements(self, requirements):
		import json
		tmp = tempfile.NamedTemporaryFile(prefix = 'tmp-requirements-', dir = self.path, delete = False, mode = 'wt')
		try:
			json.dump(dict((key, getattr(requirements, key)) for key in requirements.__slots__), tmp)
		except:
			tmp.close()
			os.unlink(tmp.name)
			raise
		tmp.close()

		reqs_file = os.path.join(self.path, 'requirements.json')
		portable_rename(tmp.name, reqs_file)
예제 #22
0
파일: catalog.py 프로젝트: 0install/0repo
def write_catalog(config, feeds, dir_rel_path):
	cat_ns = namespace.Namespace()
	cat_ns.register_namespace(XMLNS_CATALOG, "c")

	impl = minidom.getDOMImplementation()
	cat_doc = impl.createDocument(XMLNS_CATALOG, "c:catalog", None)
	cat_root = cat_doc.documentElement
	cat_root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns:c', XMLNS_CATALOG)
	cat_root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns', XMLNS_IFACE)

	custom_tags = {}
	for (name, ns, tags) in getattr(config, 'ADDITIONAL_CATALOG_TAGS', []):
		cat_ns.register_namespace(ns, name)
		cat_root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns:' + name, ns)
		custom_tags[ns] = tags

	feed_roots = [feed.doc.documentElement for feed in feeds]

	def get_name(feed_root):
		return feed_root.getElementsByTagName('name')[0].firstChild.wholeText

	is_excluded_from_catalog = getattr(config, 'is_excluded_from_catalog', _default_is_excluded_from_catalog)

	for feed_root in sorted(feed_roots, key=get_name):
		if is_excluded_from_catalog(feed_root, dir_rel_path): continue
		elem = cat_doc.createElementNS(XMLNS_IFACE, "interface")
		elem.setAttribute('uri', feed_root.getAttribute("uri"))
		for feed_elem in feed_root.childNodes:
			ns = feed_elem.namespaceURI
			if ((ns == XMLNS_IFACE and feed_elem.localName in catalog_names) or
				(ns in custom_tags and feed_elem.localName in custom_tags[ns])):
				elem.appendChild(cat_ns.import_node(cat_doc, feed_elem))
		cat_root.appendChild(elem)

	catalog_file = join('public', dir_rel_path, 'catalog.xml')

	need_update = True
	if os.path.exists(catalog_file):
		with open(catalog_file, 'rb') as stream:
			old_catalog = minidom.parse(stream)
		need_update = not xmltools.nodes_equal(old_catalog.documentElement, cat_doc.documentElement)

	if need_update:
		path_to_resources = relpath('resources', dir_rel_path).replace(os.sep, '/').encode()
		new_data = build.sign_xml(config, (catalog_header % path_to_resources) + cat_doc.documentElement.toxml(encoding = 'utf-8') + b'\n')
		with open(catalog_file + '.new', 'wb') as stream:
			stream.write(new_data)
		support.portable_rename(catalog_file + '.new', catalog_file)
		print("Updated " + catalog_file)

	return join(dir_rel_path, 'catalog.xml')
예제 #23
0
파일: catalog.py 프로젝트: 0install/0repo
def write_catalog(config, feeds):
	cat_ns = namespace.Namespace()
	cat_ns.register_namespace(XMLNS_CATALOG, "c")

	impl = minidom.getDOMImplementation()
	cat_doc = impl.createDocument(XMLNS_CATALOG, "c:catalog", None)
	cat_root = cat_doc.documentElement
	cat_root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns:c', XMLNS_CATALOG)
	cat_root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns', XMLNS_IFACE)

	custom_tags = {}
	for (name, ns, tags) in getattr(config, 'ADDITIONAL_CATALOG_TAGS', []):
		cat_ns.register_namespace(ns, name)
		cat_root.setAttributeNS(XMLNS_NAMESPACE, 'xmlns:' + name, ns)
		custom_tags[ns] = tags

	feed_roots = [feed.doc.documentElement for feed in feeds]

	def get_name(feed_root):
		return feed_root.getElementsByTagName('name')[0].firstChild.wholeText

	def is_replaced(feed_root):
		return feed_root.getElementsByTagName('replaced-by').length > 0

	for feed_root in sorted(feed_roots, key=get_name):
		if is_replaced(feed_root): continue
		elem = cat_doc.createElementNS(XMLNS_IFACE, "interface")
		elem.setAttribute('uri', feed_root.getAttribute("uri"))
		for feed_elem in feed_root.childNodes:
			ns = feed_elem.namespaceURI
			if ((ns == XMLNS_IFACE and feed_elem.localName in catalog_names) or
				(ns in custom_tags and feed_elem.localName in custom_tags[ns])):
				elem.appendChild(cat_ns.import_node(cat_doc, feed_elem))
		cat_root.appendChild(elem)
	
	catalog_file = join('public', 'catalog.xml')

	need_update = True
	if os.path.exists(catalog_file):
		with open(catalog_file, 'rb') as stream:
			old_catalog = minidom.parse(stream)
		need_update = not xmltools.nodes_equal(old_catalog.documentElement, cat_doc.documentElement)

	if need_update:
		new_data = build.sign_xml(config, catalog_header + cat_doc.documentElement.toxml(encoding = 'utf-8') + '\n')
		with open(catalog_file + '.new', 'wb') as stream:
			stream.write(new_data)
		support.portable_rename(catalog_file + '.new', catalog_file)
		print("Updated catalog.xml")

	return ['catalog.xml']
예제 #24
0
    def build_binaries(self):
        if not self.targets: return

        print "Source package, so generating binaries..."

        archive_file = support.get_archive_basename(self.src_impl)

        for target in self.targets:
            start = self.get('builder-' + target, 'start', None)
            command = self.config.get('builder-' + target, 'build')
            stop = self.get('builder-' + target, 'stop', None)

            binary_feed = 'binary-' + target + '.xml'
            if os.path.exists(binary_feed):
                print "Feed %s already exists; not rebuilding" % binary_feed
            else:
                print "\nBuilding binary with builder '%s' ...\n" % target

                if start: support.show_and_run(start, [])
                try:
                    args = [
                        os.path.basename(self.src_feed_name), archive_file,
                        self.archive_dir_public_url, binary_feed + '.new'
                    ]
                    if not command:
                        assert target == 'host', 'Missing build command'
                        support.check_call(
                            [sys.executable, sys.argv[0], '--build-slave'] +
                            args)
                    else:
                        support.show_and_run(command, args)
                finally:
                    if stop: support.show_and_run(stop, [])

                bin_feed = support.load_feed(binary_feed + '.new')
                bin_impl = support.get_singleton_impl(bin_feed)
                bin_archive_file = support.get_archive_basename(bin_impl)
                bin_size = bin_impl.download_sources[0].size

                assert os.path.exists(
                    bin_archive_file
                ), "Compiled binary '%s' not found!" % os.path.abspath(
                    bin_archive_file)
                assert os.path.getsize(
                    bin_archive_file
                ) == bin_size, "Compiled binary '%s' has wrong size!" % os.path.abspath(
                    bin_archive_file)

                portable_rename(binary_feed + '.new', binary_feed)
예제 #25
0
	def save_globals(self):
		"""Write global settings."""
		parser = ConfigParser.ConfigParser()
		parser.add_section('global')

		parser.set('global', 'help_with_testing', str(self.help_with_testing))
		parser.set('global', 'network_use', self.network_use)
		parser.set('global', 'freshness', str(self.freshness))
		parser.set('global', 'auto_approve_keys', str(self.auto_approve_keys))

		path = basedir.save_config_path(config_site, config_prog)
		path = os.path.join(path, 'global')
		with open(path + '.new', 'wt') as stream:
			parser.write(stream)
		support.portable_rename(path + '.new', path)
예제 #26
0
    def save_globals(self):
        """Write global settings."""
        parser = ConfigParser.ConfigParser()
        parser.add_section('global')

        parser.set('global', 'help_with_testing', str(self.help_with_testing))
        parser.set('global', 'network_use', self.network_use)
        parser.set('global', 'freshness', str(self.freshness))
        parser.set('global', 'auto_approve_keys', str(self.auto_approve_keys))

        path = basedir.save_config_path(config_site, config_prog)
        path = os.path.join(path, 'global')
        with open(path + '.new', 'wt') as stream:
            parser.write(stream)
        support.portable_rename(path + '.new', path)
예제 #27
0
 def save(self):
     tmp_name = release_status_file + '.new'
     tmp = file(tmp_name, 'w')
     try:
         lines = [
             "%s=%s\n" % (name, getattr(self, name))
             for name in self.__slots__ if getattr(self, name)
         ]
         tmp.write(''.join(lines))
         tmp.close()
         portable_rename(tmp_name, release_status_file)
         info("Wrote status to %s", release_status_file)
     except:
         os.unlink(tmp_name)
         raise
예제 #28
0
파일: apps.py 프로젝트: dsqmoore/0install
	def set_requirements(self, requirements):
		reqs_file = os.path.join(self.path, 'requirements.json')
		if self.config.handler.dry_run:
			print(_("[dry-run] would write {file}").format(file = reqs_file))
		else:
			import json
			tmp = tempfile.NamedTemporaryFile(prefix = 'tmp-requirements-', dir = self.path, delete = False, mode = 'wt')
			try:
				json.dump(dict((key, getattr(requirements, key)) for key in requirements.__slots__), tmp)
			except:
				tmp.close()
				os.unlink(tmp.name)
				raise
			tmp.close()

			portable_rename(tmp.name, reqs_file)
예제 #29
0
파일: distro.py 프로젝트: afb/0install
	def flush(self):
		# Wipe the cache
		try:
			info = os.stat(self.source)
			mtime = int(info.st_mtime)
			size = info.st_size
		except Exception as ex:
			logger.warning("Failed to stat %s: %s", self.source, ex)
			mtime = size = 0
		self.cache = {}
		import tempfile
		tmp = tempfile.NamedTemporaryFile(mode = 'wt', dir = self.cache_dir, delete = False)
		tmp.write("mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format))
		tmp.close()
		portable_rename(tmp.name, os.path.join(self.cache_dir, self.cache_leaf))

		self._load_cache()
예제 #30
0
    def set_selections(self, sels, set_last_checked=True):
        """Store a new set of selections. We include today's date in the filename
		so that we keep a history of previous selections (max one per day), in case
		we want to to roll back later.
		@type sels: L{zeroinstall.injector.selections.Selections}
		@type set_last_checked: bool"""
        date = time.strftime('%Y-%m-%d')
        sels_file = os.path.join(self.path,
                                 'selections-{date}.xml'.format(date=date))
        dom = sels.toDOM()

        if self.config.handler.dry_run:
            print(
                _("[dry-run] would write selections to {file}").format(
                    file=sels_file))
        else:
            tmp = tempfile.NamedTemporaryFile(prefix='selections.xml-',
                                              dir=self.path,
                                              delete=False,
                                              mode='wt')
            try:
                dom.writexml(tmp, addindent="  ", newl="\n", encoding='utf-8')
            except:
                tmp.close()
                os.unlink(tmp.name)
                raise
            tmp.close()
            portable_rename(tmp.name, sels_file)

        sels_latest = os.path.join(self.path, 'selections.xml')
        if self.config.handler.dry_run:
            print(
                _("[dry-run] would update {link} to point to new selections file"
                  ).format(link=sels_latest))
        else:
            if os.path.exists(sels_latest):
                os.unlink(sels_latest)
            if os.name == "nt":
                import shutil
                shutil.copyfile(sels_file, sels_latest)
            else:
                os.symlink(os.path.basename(sels_file), sels_latest)

        if set_last_checked:
            self.set_last_checked()
예제 #31
0
파일: apps.py 프로젝트: timdiels/0install
    def set_requirements(self, requirements):
        import json
        tmp = tempfile.NamedTemporaryFile(prefix='tmp-requirements-',
                                          dir=self.path,
                                          delete=False,
                                          mode='wt')
        try:
            json.dump(
                dict((key, getattr(requirements, key))
                     for key in requirements.__slots__), tmp)
        except:
            tmp.close()
            os.unlink(tmp.name)
            raise
        tmp.close()

        reqs_file = os.path.join(self.path, 'requirements.json')
        portable_rename(tmp.name, reqs_file)
예제 #32
0
def process_archives(parent):
    for elem in parent.childNodes:
        if elem.namespaceURI != namespaces.XMLNS_IFACE:
            continue

        if elem.localName in ("archive", "file"):
            # Download the archive if missing
            href = elem.getAttribute("href")
            assert href, "missing href on <archive>"
            local_copy = os.path.join(template_dir, os.path.basename(href))
            if not os.path.exists(local_copy):
                print("Downloading {href} to {local_copy}".format(**locals()))
                req = request.urlopen(href)
                with open(local_copy + ".part", "wb") as local_stream:
                    shutil.copyfileobj(req, local_stream)
                support.portable_rename(local_copy + ".part", local_copy)
                req.close()

                # Set the size attribute
            elem.setAttribute("size", str(os.stat(local_copy).st_size))

            if elem.localName == "archive":
                if not elem.hasAttribute("extract"):
                    # Unpack (a rather inefficient way to guess the 'extract' attribute)
                    tmpdir = unpack.unpack_to_tmp(href, local_copy, elem.getAttribute("type"))
                    try:
                        unpack_dir = os.path.join(tmpdir, "unpacked")

                        # Set the extract attribute
                        extract = unpack.guess_extract(unpack_dir)
                        if extract:
                            elem.setAttribute("extract", extract)
                            unpack_dir = os.path.join(unpack_dir, extract)
                            assert os.path.isdir(unpack_dir), "Not a directory: {dir}".format(dir=unpack_dir)
                    finally:
                        support.ro_rmtree(tmpdir)
                else:
                    extract = elem.getAttribute("extract")
                    if extract == "":
                        # Remove empty element
                        elem.removeAttribute("extract")

        elif elem.localName == "recipe":
            process_archives(elem)
예제 #33
0
파일: fetch.py 프로젝트: timdiels/0install
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()
예제 #34
0
		def download_and_add_icon():
			stream = dl.tempfile
			try:
				yield dl.downloaded
				tasks.check(dl.downloaded)
				if dl.unmodified: return
				stream.seek(0)

				import shutil, tempfile
				icons_cache = basedir.save_cache_path(config_site, 'interface_icons')

				tmp_file = tempfile.NamedTemporaryFile(dir = icons_cache, delete = False)
				shutil.copyfileobj(stream, tmp_file)
				tmp_file.close()

				icon_file = os.path.join(icons_cache, escape(interface.uri))
				portable_rename(tmp_file.name, icon_file)
			finally:
				stream.close()
예제 #35
0
    def _write_cache(self, cache):
        #cache.sort() 	# Might be useful later; currently we don't care
        import tempfile
        fd, tmpname = tempfile.mkstemp(prefix='zeroinstall-cache-tmp',
                                       dir=self.cache_dir)
        try:
            stream = os.fdopen(fd, 'wt')
            stream.write('version: 2\n')
            stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
            stream.write('size: %d\n' % self._status_details.st_size)
            stream.write('\n')
            for line in cache:
                stream.write(line + '\n')
            stream.close()

            portable_rename(tmpname,
                            os.path.join(self.cache_dir, self.cache_leaf))
        except:
            os.unlink(tmpname)
            raise
예제 #36
0
    def flush(self):
        # Wipe the cache
        try:
            info = os.stat(self.source)
            mtime = int(info.st_mtime)
            size = info.st_size
        except Exception as ex:
            logger.warn("Failed to stat %s: %s", self.source, ex)
            mtime = size = 0
        self.cache = {}
        import tempfile
        tmp = tempfile.NamedTemporaryFile(mode='wt',
                                          dir=self.cache_dir,
                                          delete=False)
        tmp.write("mtime=%d\nsize=%d\nformat=%d\n\n" %
                  (mtime, size, self.format))
        tmp.close()
        portable_rename(tmp.name, os.path.join(self.cache_dir,
                                               self.cache_leaf))

        self._load_cache()
예제 #37
0
파일: distro.py 프로젝트: dsqmoore/0install
	def _write_cache(self, cache):
		#cache.sort() 	# Might be useful later; currently we don't care
		import tempfile
		fd, tmpname = tempfile.mkstemp(prefix = 'zeroinstall-cache-tmp',
					       dir = self.cache_dir)
		try:
			stream = os.fdopen(fd, 'wt')
			stream.write('version: 2\n')
			stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
			stream.write('size: %d\n' % self._status_details.st_size)
			stream.write('\n')
			for line in cache:
				stream.write(line + '\n')
			stream.close()

			portable_rename(tmpname,
				  os.path.join(self.cache_dir,
					       self.cache_leaf))
		except:
			os.unlink(tmpname)
			raise
예제 #38
0
    def set_requirements(self, requirements):
        """@type requirements: L{zeroinstall.injector.requirements.Requirements}"""
        reqs_file = os.path.join(self.path, 'requirements.json')
        if self.config.handler.dry_run:
            print(_("[dry-run] would write {file}").format(file=reqs_file))
        else:
            import json
            tmp = tempfile.NamedTemporaryFile(prefix='tmp-requirements-',
                                              dir=self.path,
                                              delete=False,
                                              mode='wt')
            try:
                json.dump(
                    dict((key, getattr(requirements, key))
                         for key in requirements.__slots__), tmp)
            except:
                tmp.close()
                os.unlink(tmp.name)
                raise
            tmp.close()

            portable_rename(tmp.name, reqs_file)
예제 #39
0
	def build_binaries(self):
		if not self.targets: return

		print "Source package, so generating binaries..."

		archive_file = support.get_archive_basename(self.src_impl)

		for target in self.targets:
			start = self.get('builder-' + target, 'start', None)
			command = self.config.get('builder-' + target, 'build')
			stop = self.get('builder-' + target, 'stop', None)

			binary_feed = 'binary-' + target + '.xml'
			if os.path.exists(binary_feed):
				print "Feed %s already exists; not rebuilding" % binary_feed
			else:
				print "\nBuilding binary with builder '%s' ...\n" % target

				if start: support.show_and_run(start, [])
				try:
					args = [os.path.basename(self.src_feed_name), archive_file, self.archive_dir_public_url, binary_feed + '.new']
					if not command:
						assert target == 'host', 'Missing build command'
						support.check_call([sys.executable, sys.argv[0], '--build-slave'] + args)
					else:
						support.show_and_run(command, args)
				finally:
					if stop: support.show_and_run(stop, [])

				bin_feed = support.load_feed(binary_feed + '.new')
				bin_impl = support.get_singleton_impl(bin_feed)
				bin_archive_file = support.get_archive_basename(bin_impl)
				bin_size = bin_impl.download_sources[0].size

				assert os.path.exists(bin_archive_file), "Compiled binary '%s' not found!" % os.path.abspath(bin_archive_file)
				assert os.path.getsize(bin_archive_file) == bin_size, "Compiled binary '%s' has wrong size!" % os.path.abspath(bin_archive_file)

				portable_rename(binary_feed + '.new', binary_feed)
예제 #40
0
	def set_selections(self, sels, set_last_checked = True):
		"""Store a new set of selections. We include today's date in the filename
		so that we keep a history of previous selections (max one per day), in case
		we want to to roll back later.
		@type sels: L{zeroinstall.injector.selections.Selections}
		@type set_last_checked: bool"""
		date = time.strftime('%Y-%m-%d')
		sels_file = os.path.join(self.path, 'selections-{date}.xml'.format(date = date))
		dom = sels.toDOM()

		if self.config.handler.dry_run:
			print(_("[dry-run] would write selections to {file}").format(file = sels_file))
		else:
			tmp = tempfile.NamedTemporaryFile(prefix = 'selections.xml-', dir = self.path, delete = False, mode = 'wt')
			try:
				dom.writexml(tmp, addindent="  ", newl="\n", encoding = 'utf-8')
			except:
				tmp.close()
				os.unlink(tmp.name)
				raise
			tmp.close()
			portable_rename(tmp.name, sels_file)

		sels_latest = os.path.join(self.path, 'selections.xml')
		if self.config.handler.dry_run:
			print(_("[dry-run] would update {link} to point to new selections file").format(link = sels_latest))
		else:
			if os.path.exists(sels_latest):
				os.unlink(sels_latest)
			if os.name == "nt":
				import shutil
				shutil.copyfile(sels_file, sels_latest)
			else:
				os.symlink(os.path.basename(sels_file), sels_latest)

		if set_last_checked:
			self.set_last_checked()
예제 #41
0
파일: register.py 프로젝트: 0install/0repo
def handle(args):
    cmd.find_config()
    config = cmd.load_config()

    path = join(basedir.save_config_path('0install.net', '0repo'),
                'repositories.json')
    if os.path.exists(path):
        with open(path, 'rb') as stream:
            db = json.load(stream)
    else:
        db = {}

    existing = db.get(config.REPOSITORY_BASE_URL, None)

    entry = {'type': 'local', 'path': os.getcwd()}

    if existing and existing == entry:
        print(
            "Already registered in {path} (no changes made):\n{base}: {json}".
            format(path=path,
                   base=config.REPOSITORY_BASE_URL,
                   json=json.dumps(entry)))
        return

    db[config.REPOSITORY_BASE_URL] = entry

    with open(path + '.new', 'w') as stream:
        json.dump(db, stream)
    support.portable_rename(path + '.new', path)

    if existing:
        print("Updated entry in {path} to:".format(path=path))
    else:
        print("Created new entry in {path}:".format(path=path))

    print("{base}: {json}".format(base=config.REPOSITORY_BASE_URL,
                                  json=json.dumps(entry)))
예제 #42
0
def sign_unsigned(path, data, key):
    support.portable_rename(write_tmp(path, data), path)
예제 #43
0
    def _import_new_feed(self, feed_url, new_xml, modified_time, dry_run):
        """Write new_xml into the cache.
		@param feed_url: the URL for the feed being updated
		@type feed_url: str
		@param new_xml: the data to write
		@type new_xml: str
		@param modified_time: when new_xml was modified
		@type modified_time: int
		@type dry_run: bool
		@raises ReplayAttack: if the new mtime is older than the current one"""
        assert modified_time
        assert isinstance(new_xml, bytes), repr(new_xml)

        upstream_dir = basedir.save_cache_path(config_site, "interfaces")
        cached = os.path.join(upstream_dir, escape(feed_url))

        old_modified = None
        if os.path.exists(cached):
            with open(cached, "rb") as stream:
                old_xml = stream.read()
            if old_xml == new_xml:
                logger.debug(_("No change"))
                # Update in-memory copy, in case someone else updated the disk copy
                self.get_feed(feed_url, force=True)
                return
            old_modified = int(os.stat(cached).st_mtime)

        if dry_run:
            print(_("[dry-run] would cache feed {url} as {cached}").format(url=feed_url, cached=cached))
            from io import BytesIO
            from zeroinstall.injector import qdom

            root = qdom.parse(BytesIO(new_xml), filter_for_version=True)
            feed = model.ZeroInstallFeed(root)
            reader.update_user_feed_overrides(feed)
            self._feeds[feed_url] = feed
            return

            # Do we need to write this temporary file now?
        try:
            with open(cached + ".new", "wb") as stream:
                stream.write(new_xml)
            os.utime(cached + ".new", (modified_time, modified_time))
            new_mtime = reader.check_readable(feed_url, cached + ".new")
            assert new_mtime == modified_time

            old_modified = self._get_signature_date(feed_url) or old_modified

            if old_modified:
                if new_mtime < old_modified:
                    raise ReplayAttack(
                        _(
                            "New feed's modification time is "
                            "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
                            "Refusing update."
                        )
                        % {
                            "iface": feed_url,
                            "old_time": _pretty_time(old_modified),
                            "new_time": _pretty_time(new_mtime),
                        }
                    )
                if new_mtime == old_modified:
                    # You used to have to update the modification time manually.
                    # Now it comes from the signature, this check isn't useful
                    # and often causes problems when the stored format changes
                    # (e.g., when we stopped writing last-modified attributes)
                    pass
                    # raise SafeException("Interface has changed, but modification time "
                    # 		    "hasn't! Refusing update.")
        except:
            os.unlink(cached + ".new")
            raise

        portable_rename(cached + ".new", cached)
        logger.debug(_("Saved as %s") % cached)

        self.get_feed(feed_url, force=True)
예제 #44
0
def process(config, xml_file, delete_on_success):
	# Step 1 : check everything looks sensible, reject if not

	with open(xml_file, 'rb') as stream:
		xml_text = stream.read()
		sig_index = xml_text.rfind('\n<!-- Base64 Signature')
		if sig_index != -1:
			stream.seek(0)
			stream, sigs = gpg.check_stream(stream)
		else:
			sig_index = len(xml_text)
			sigs = []
		root = qdom.parse(BytesIO(xml_text))

	master = get_feed_url(root, xml_file)
	import_master = 'uri' in root.attrs

	if not import_master:
		root.attrs['uri'] = master	# (hack so we can parse it here without setting local_path)

	# Check signatures are valid
	if config.CONTRIBUTOR_GPG_KEYS is not None:
		for sig in sigs:
			if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS:
				break
		else:
			raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format(
				path = xml_file,
				sigs = ', '.join([str(s) for s in sigs])))

	feed = model.ZeroInstallFeed(root)

	# Perform custom checks defined by the repository owner
	for impl in feed.implementations.values():
		problem = config.check_new_impl(impl)
		if problem:
			raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format(
				problem = problem, xml_file = xml_file, config = config.__file__))

	feeds_rel_path = paths.get_feeds_rel_path(config, master)
	feed_path = join("feeds", feeds_rel_path)
	feed_dir = dirname(feed_path)
	if not os.path.isdir(feed_dir):
		os.makedirs(feed_dir)

	scm.ensure_no_uncommitted_changes(feed_path)

	if import_master:
		if os.path.exists(feed_path):
			with open(feed_path, 'rb') as stream:
				existing = stream.read()
			if existing == xml_text[:sig_index]:
				print("Already imported {feed}; skipping".format(feed = feed_path))
				if delete_on_success:
					os.unlink(xml_file)
				return None
			else:
				raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n"
						    "To ADD new versions to this feed, remove the a 'uri' attribute from "
						    "the root element in {new}.\n\n"
						    "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n"
						    "To RE-UPLOAD the archives, do that manually and then edit archives.db."
						    .format(url = feed.url, new = xml_file, path = feed_path))

	# Calculate commit message
	if import_master:
		name = basename(xml_file)
		if name == 'feed.xml':
			name = basename(dirname(xml_file))
		action = 'Imported {file}'.format(file = name)
	else:
		versions = set(i.get_version() for i in feed.implementations.values())
		action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions))
	commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8'))

	# Calculate new XML
	new_file = not os.path.exists(feed_path)
	git_path = relpath(feed_path, 'feeds')

	if import_master:
		assert new_file
		new_xml = xml_text[:sig_index]
	elif new_file:
		new_xml = create_from_local(master, xml_file)
	else:
		# Merge into existing feed
		try:
			new_doc = merge.merge_files(master, feed_path, xml_file)
		except merge.DuplicateIDException as ex:
			# Did we already import this XML? Compare with the last Git log entry.
			msg, previous_commit_xml = get_last_commit(git_path)
			if previous_commit_xml == xml_text:
				print("Already merged this into {feed}; skipping".format(feed = feed_path))
				return msg
			raise ex

		new_xml = None	# (will regenerate from new_doc below)

	# Step 2 : upload archives to hosting

	processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed)

	# Step 3 : merge XML into feeds directory

	# Prompt about existing testing versions
	if new_xml is None:
		new_versions = frozenset(impl.get_version() for impl in feed.implementations.values())
		if len(new_versions) == 1:
			ask_if_previous_still_testing(new_doc, list(new_versions)[0])
		new_xml = formatting.format_doc(new_doc)

	did_git_add = False

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = xml_file))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise

	# Delete XML and archives from incoming directory
	if delete_on_success:
		os.unlink(xml_file)
		for archive in processed_archives:
			os.unlink(archive.incoming_path)

	return commit_msg.split('\n', 1)[0]
예제 #45
0
def process(config, xml_file, delete_on_success):
	# Step 1 : check everything looks sensible, reject if not

	with open(xml_file, 'rb') as stream:
		xml_text = stream.read()
		sig_index = xml_text.rfind('\n<!-- Base64 Signature')
		if sig_index != -1:
			stream.seek(0)
			stream, sigs = gpg.check_stream(stream)
		else:
			sig_index = len(xml_text)
			sigs = []
		root = qdom.parse(BytesIO(xml_text))

	master = get_feed_url(root, xml_file)
	import_master = 'uri' in root.attrs

	if not import_master:
		root.attrs['uri'] = master	# (hack so we can parse it here without setting local_path)

	# Check signatures are valid
	if config.CONTRIBUTOR_GPG_KEYS is not None:
		for sig in sigs:
			if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS:
				break
		else:
			raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format(
				path = xml_file,
				sigs = ', '.join([str(s) for s in sigs])))

	feed = model.ZeroInstallFeed(root)

	# Perform custom checks defined by the repository owner
	for impl in feed.implementations.values():
		problem = config.check_new_impl(impl)
		if problem:
			raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format(
				problem = problem, xml_file = xml_file, config = config.__file__))

	feeds_rel_path = paths.get_feeds_rel_path(config, master)
	feed_path = join("feeds", feeds_rel_path)
	feed_dir = dirname(feed_path)
	if not os.path.isdir(feed_dir):
		os.makedirs(feed_dir)

	scm.ensure_no_uncommitted_changes(feed_path)

	if import_master:
		if os.path.exists(feed_path):
			with open(feed_path, 'rb') as stream:
				existing = stream.read()
			if existing == xml_text[:sig_index]:
				print("Already imported {feed}; skipping".format(feed = feed_path))
				if delete_on_success:
					os.unlink(xml_file)
				return None
			else:
				raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n"
						    "To ADD new versions to this feed, remove the a 'uri' attribute from "
						    "the root element in {new}.\n\n"
						    "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n"
						    "To RE-UPLOAD the archives, do that manually and then edit archives.db."
						    .format(url = feed.url, new = xml_file, path = feed_path))

	# Calculate commit message
	if import_master:
		name = basename(xml_file)
		if name == 'feed.xml':
			name = basename(dirname(xml_file))
		action = 'Imported {file}'.format(file = name)
	else:
		versions = set(i.get_version() for i in feed.implementations.values())
		action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions))
	commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8'))

	# Calculate new XML
	new_file = not os.path.exists(feed_path)
	git_path = relpath(feed_path, 'feeds')

	if import_master:
		assert new_file
		new_xml = xml_text[:sig_index]
	elif new_file:
		new_xml = create_from_local(master, xml_file)
	else:
		# Merge into existing feed
		try:
			new_doc = merge.merge_files(master, feed_path, xml_file)
		except merge.DuplicateIDException as ex:
			# Did we already import this XML? Compare with the last Git log entry.
			msg, previous_commit_xml = get_last_commit(git_path)
			if previous_commit_xml == xml_text:
				print("Already merged this into {feed}; skipping".format(feed = feed_path))
				return msg
			raise ex

		new_xml = None	# (will regenerate from new_doc below)

	# Step 2 : upload archives to hosting

	processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed)

	# Step 3 : merge XML into feeds directory

	# Regenerate merged feed
	if new_xml is None:
		new_versions = frozenset(impl.get_version() for impl in feed.implementations.values())
		if len(new_versions) == 1 and getattr(config, 'TRACK_TESTING_IMPLS', True):
			ask_if_previous_still_testing(new_doc, list(new_versions)[0])
		new_xml = formatting.format_doc(new_doc)

	did_git_add = False

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = xml_file))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise

	# Delete XML and archives from incoming directory
	if delete_on_success:
		os.unlink(xml_file)
		for archive in processed_archives:
			os.unlink(archive.incoming_path)

	return commit_msg.split('\n', 1)[0]