Esempio n. 1
0
def write_to_git(feed_path, new_xml, commit_msg, config, new_file = False):
	did_git_add = False
	git_path = relpath(feed_path, 'feeds')

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY if getattr(config, 'SIGN_COMMITS', True) else None)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = git_path))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise
Esempio n. 2
0
def handle(args):
    if args.key == '-':
        key = None
    else:
        # Get the fingerprint from the key ID (and check we have the secret key)
        try:
            keys = subprocess.check_output([
                'gpg', '-q', '--fixed-list-mode', '--fingerprint',
                '--with-colons', '--list-secret-keys', args.key
            ],
                                           encoding='utf-8')
        except subprocess.CalledProcessError as ex:
            raise SafeException("GPG key '{key}' not found ({ex})".format(
                key=args.key, ex=ex))

        in_ssb = False
        fingerprint = None
        for line in keys.split('\n'):
            bits = line.split(':')
            if bits[0] == 'ssb': in_ssb = True
            elif bits[0] == 'sec': in_ssb = False
            elif bits[0] == 'fpr':
                if in_ssb and fingerprint is not None:
                    pass  # Ignore sub-keys (unless we don't have a primary - can that happen?)
                elif fingerprint is None:
                    fingerprint = bits[9]
                else:
                    raise SafeException(
                        "Multiple GPG keys match '{key}':\n{output}".format(
                            key=args.key, output=keys))

        if fingerprint is None:
            raise SafeException(
                "GPG key not found '{key}'".format(key=args.key))
        key = '0x' + fingerprint

    # Create the directory structure
    os.mkdir(args.path)
    os.chdir(args.path)
    os.mkdir('incoming')
    os.mkdir('feeds')
    os.mkdir('public')

    # Write the configuration file, with the GPG key filled in
    with open(join(topdir, 'resources', '0repo-config.py.template'),
              'rt') as stream:
        data = stream.read()
    data = data.replace('"{{GPGKEY}}"', '"' + key + '"' if key else "None")
    with open('0repo-config.py', 'wt') as stream:
        stream.write(data)

    # Initialise the Git repository
    subprocess.check_call(['git', 'init', '-q', 'feeds'])
    scm.commit('feeds', [],
               'Created new repository',
               key,
               extra_options=['--allow-empty'])
Esempio n. 3
0
def handle(args):
	if args.key == '-':
		key = None
	else:
		# Get the fingerprint from the key ID (and check we have the secret key)
		try:
			keys = subprocess.check_output(['gpg', '-q', '--fixed-list-mode', '--fingerprint', '--with-colons', '--list-secret-keys', args.key])
		except subprocess.CalledProcessError as ex:
			raise SafeException("GPG key '{key}' not found ({ex})".format(key = args.key, ex = ex))

		in_ssb = False
		fingerprint = None
		for line in keys.split('\n'):
			bits = line.split(':')
			if bits[0] == 'ssb': in_ssb = True
			elif bits[0] == 'sec': in_ssb = False
			elif bits[0] == 'fpr':
				if in_ssb and fingerprint is not None:
					pass	# Ignore sub-keys (unless we don't have a primary - can that happen?)
				elif fingerprint is None:
					fingerprint = bits[9]
				else:
					raise SafeException("Multiple GPG keys match '{key}':\n{output}".format(
						key = args.key, output = keys))

		if fingerprint is None:
			raise SafeException("GPG key not found '{key}'".format(key = args.key))
		key = '0x' + fingerprint

	# Create the directory structure
	os.mkdir(args.path)
	os.chdir(args.path)
	os.mkdir('incoming')
	os.mkdir('feeds')
	os.mkdir('public')

	# Write the configuration file, with the GPG key filled in
	with open(join(topdir, 'resources', '0repo-config.py.template'), 'rt') as stream:
		data = stream.read()
	data = data.replace('"{{GPGKEY}}"', '"' + key + '"' if key else "None")
	with open('0repo-config.py', 'wt') as stream:
		stream.write(data)

	# Initialise the Git repository
	subprocess.check_call(['git', 'init', '-q', 'feeds'])
	scm.commit('feeds', [], 'Created new repository', key, extra_options = ['--allow-empty'])
Esempio n. 4
0
def handle(args):
	# Get the fingerprint from the key ID (and check we have the secret key)
	try:
		keys = subprocess.check_output(['gpg', '-q', '--fixed-list-mode', '--fingerprint', '--with-colons', '--list-secret-keys', args.key])
	except subprocess.CalledProcessError as ex:
		raise SafeException("GPG key '{key}' not found ({ex})".format(key = args.key, ex = ex))

	fingerprint = None
	for line in keys.split('\n'):
		bits = line.split(':')
		if bits[0] == 'fpr':
			if fingerprint is None:
				fingerprint = bits[9]
			else:
				raise SafeException("Multiple GPG keys match '{key}':\n{output}".format(
					key = args.key, output = keys))
	
	if fingerprint is None:
		raise SafeException("GPG key not found '{key}'".format(key = args.key))

	# Create the directory structure
	os.mkdir(args.path)
	os.chdir(args.path)
	os.mkdir('incoming')
	os.mkdir('feeds')
	os.mkdir('public')

	# Write the configuration file, with the GPG key filled in
	with open(join(topdir, 'resources', '0repo-config.py.template'), 'rt') as stream:
		data = stream.read()
	data = data.replace('{{GPGKEY}}', '0x' + fingerprint)
	with open('0repo-config.py', 'wt') as stream:
		stream.write(data)

	# Initialise the Git repository
	subprocess.check_call(['git', 'init', '-q', 'feeds'])
	scm.commit('feeds', [], 'Created new repository', '0x' + fingerprint, extra_options = ['--allow-empty'])
Esempio n. 5
0
def process(config, xml_file, delete_on_success):
	# Step 1 : check everything looks sensible, reject if not

	with open(xml_file, 'rb') as stream:
		xml_text = stream.read()
		sig_index = xml_text.rfind('\n<!-- Base64 Signature')
		if sig_index != -1:
			stream.seek(0)
			stream, sigs = gpg.check_stream(stream)
		else:
			sig_index = len(xml_text)
			sigs = []
		root = qdom.parse(BytesIO(xml_text))

	master = get_feed_url(root, xml_file)
	import_master = 'uri' in root.attrs

	if not import_master:
		root.attrs['uri'] = master	# (hack so we can parse it here without setting local_path)

	# Check signatures are valid
	if config.CONTRIBUTOR_GPG_KEYS is not None:
		for sig in sigs:
			if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS:
				break
		else:
			raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format(
				path = xml_file,
				sigs = ', '.join([str(s) for s in sigs])))

	feed = model.ZeroInstallFeed(root)

	# Perform custom checks defined by the repository owner
	for impl in feed.implementations.values():
		problem = config.check_new_impl(impl)
		if problem:
			raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format(
				problem = problem, xml_file = xml_file, config = config.__file__))

	feeds_rel_path = paths.get_feeds_rel_path(config, master)
	feed_path = join("feeds", feeds_rel_path)
	feed_dir = dirname(feed_path)
	if not os.path.isdir(feed_dir):
		os.makedirs(feed_dir)

	scm.ensure_no_uncommitted_changes(feed_path)

	if import_master:
		if os.path.exists(feed_path):
			with open(feed_path, 'rb') as stream:
				existing = stream.read()
			if existing == xml_text[:sig_index]:
				print("Already imported {feed}; skipping".format(feed = feed_path))
				if delete_on_success:
					os.unlink(xml_file)
				return None
			else:
				raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n"
						    "To ADD new versions to this feed, remove the a 'uri' attribute from "
						    "the root element in {new}.\n\n"
						    "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n"
						    "To RE-UPLOAD the archives, do that manually and then edit archives.db."
						    .format(url = feed.url, new = xml_file, path = feed_path))

	# Calculate commit message
	if import_master:
		name = basename(xml_file)
		if name == 'feed.xml':
			name = basename(dirname(xml_file))
		action = 'Imported {file}'.format(file = name)
	else:
		versions = set(i.get_version() for i in feed.implementations.values())
		action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions))
	commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8'))

	# Calculate new XML
	new_file = not os.path.exists(feed_path)
	git_path = relpath(feed_path, 'feeds')

	if import_master:
		assert new_file
		new_xml = xml_text[:sig_index]
	elif new_file:
		new_xml = create_from_local(master, xml_file)
	else:
		# Merge into existing feed
		try:
			new_doc = merge.merge_files(master, feed_path, xml_file)
		except merge.DuplicateIDException as ex:
			# Did we already import this XML? Compare with the last Git log entry.
			msg, previous_commit_xml = get_last_commit(git_path)
			if previous_commit_xml == xml_text:
				print("Already merged this into {feed}; skipping".format(feed = feed_path))
				return msg
			raise ex

		new_xml = None	# (will regenerate from new_doc below)

	# Step 2 : upload archives to hosting

	processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed)

	# Step 3 : merge XML into feeds directory

	# Regenerate merged feed
	if new_xml is None:
		new_versions = frozenset(impl.get_version() for impl in feed.implementations.values())
		if len(new_versions) == 1 and getattr(config, 'TRACK_TESTING_IMPLS', True):
			ask_if_previous_still_testing(new_doc, list(new_versions)[0])
		new_xml = formatting.format_doc(new_doc)

	did_git_add = False

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = xml_file))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise

	# Delete XML and archives from incoming directory
	if delete_on_success:
		os.unlink(xml_file)
		for archive in processed_archives:
			os.unlink(archive.incoming_path)

	return commit_msg.split('\n', 1)[0]
Esempio n. 6
0
def process(config, xml_file, delete_on_success):
	# Step 1 : check everything looks sensible, reject if not

	with open(xml_file, 'rb') as stream:
		xml_text = stream.read()
		sig_index = xml_text.rfind('\n<!-- Base64 Signature')
		if sig_index != -1:
			stream.seek(0)
			stream, sigs = gpg.check_stream(stream)
		else:
			sig_index = len(xml_text)
			sigs = []
		root = qdom.parse(BytesIO(xml_text))

	master = get_feed_url(root, xml_file)
	import_master = 'uri' in root.attrs

	if not import_master:
		root.attrs['uri'] = master	# (hack so we can parse it here without setting local_path)

	# Check signatures are valid
	if config.CONTRIBUTOR_GPG_KEYS is not None:
		for sig in sigs:
			if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS:
				break
		else:
			raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format(
				path = xml_file,
				sigs = ', '.join([str(s) for s in sigs])))

	feed = model.ZeroInstallFeed(root)

	# Perform custom checks defined by the repository owner
	for impl in feed.implementations.values():
		problem = config.check_new_impl(impl)
		if problem:
			raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format(
				problem = problem, xml_file = xml_file, config = config.__file__))

	feeds_rel_path = paths.get_feeds_rel_path(config, master)
	feed_path = join("feeds", feeds_rel_path)
	feed_dir = dirname(feed_path)
	if not os.path.isdir(feed_dir):
		os.makedirs(feed_dir)

	scm.ensure_no_uncommitted_changes(feed_path)

	if import_master:
		if os.path.exists(feed_path):
			with open(feed_path, 'rb') as stream:
				existing = stream.read()
			if existing == xml_text[:sig_index]:
				print("Already imported {feed}; skipping".format(feed = feed_path))
				if delete_on_success:
					os.unlink(xml_file)
				return None
			else:
				raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n"
						    "To ADD new versions to this feed, remove the a 'uri' attribute from "
						    "the root element in {new}.\n\n"
						    "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n"
						    "To RE-UPLOAD the archives, do that manually and then edit archives.db."
						    .format(url = feed.url, new = xml_file, path = feed_path))

	# Calculate commit message
	if import_master:
		name = basename(xml_file)
		if name == 'feed.xml':
			name = basename(dirname(xml_file))
		action = 'Imported {file}'.format(file = name)
	else:
		versions = set(i.get_version() for i in feed.implementations.values())
		action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions))
	commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8'))

	# Calculate new XML
	new_file = not os.path.exists(feed_path)
	git_path = relpath(feed_path, 'feeds')

	if import_master:
		assert new_file
		new_xml = xml_text[:sig_index]
	elif new_file:
		new_xml = create_from_local(master, xml_file)
	else:
		# Merge into existing feed
		try:
			new_doc = merge.merge_files(master, feed_path, xml_file)
		except merge.DuplicateIDException as ex:
			# Did we already import this XML? Compare with the last Git log entry.
			msg, previous_commit_xml = get_last_commit(git_path)
			if previous_commit_xml == xml_text:
				print("Already merged this into {feed}; skipping".format(feed = feed_path))
				return msg
			raise ex

		new_xml = None	# (will regenerate from new_doc below)

	# Step 2 : upload archives to hosting

	processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed)

	# Step 3 : merge XML into feeds directory

	# Prompt about existing testing versions
	if new_xml is None:
		new_versions = frozenset(impl.get_version() for impl in feed.implementations.values())
		if len(new_versions) == 1:
			ask_if_previous_still_testing(new_doc, list(new_versions)[0])
		new_xml = formatting.format_doc(new_doc)

	did_git_add = False

	try:
		with open(feed_path + '.new', 'wb') as stream:
			stream.write(new_xml)
		support.portable_rename(feed_path + '.new', feed_path)

		# Commit
		if new_file:
			subprocess.check_call(['git', 'add', git_path], cwd = 'feeds')
			did_git_add = True

		# (this must be last in the try block)
		scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY)
	except Exception as ex:
		# Roll-back (we didn't commit to Git yet)
		print(ex)
		print("Error updating feed {feed}; rolling-back...".format(feed = xml_file))
		if new_file:
			if os.path.exists(feed_path):
				os.unlink(feed_path)
			if did_git_add:
				subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds')
		else:
			subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds')
		raise

	# Delete XML and archives from incoming directory
	if delete_on_success:
		os.unlink(xml_file)
		for archive in processed_archives:
			os.unlink(archive.incoming_path)

	return commit_msg.split('\n', 1)[0]