def create_from_local(master_feed_url, new_impls_feed): with open(new_impls_feed, 'rb') as stream: doc = minidom.parse(stream) root = doc.documentElement root.setAttribute('uri', master_feed_url) to_remove = [] for child in root.childNodes: if child.localName == 'feed-for' and child.namespaceURI == XMLNS_IFACE: # Remove <feed-for> to_remove.append(child) # Remove any preceeding commments too node = child while node.previousSibling: node = node.previousSibling if node.nodeType == Node.COMMENT_NODE or \ (node.nodeType == Node.TEXT_NODE and node.nodeValue.strip() == ''): to_remove.append(node) else: break for node in to_remove: root.removeChild(node) return formatting.format_doc(doc)
def handle(args): if not cmd.find_config(missing_ok=True): from_registry = registry.lookup(args.uri) assert from_registry[ 'type'] == 'local', 'Unsupported registry type in %s' % from_registry os.chdir(from_registry['path']) config = cmd.load_config() rel_uri = args.uri[len(config.REPOSITORY_BASE_URL):] feed_path = join('feeds', config.get_feeds_rel_path(rel_uri)) with open(feed_path, 'rb') as stream: doc = minidom.parse(stream) messages = [] for impl in merge.find_impls(doc.documentElement): impl_id = impl.getAttribute("id") impl_version = impl.getAttribute("version") impl_stability = impl.getAttribute("stability") if impl_id == args.id or impl_version == args.id: if args.stability and impl_stability != args.stability: messages.append( 'Implementation {id} (version {version}) stability set to {stability}' .format(id=impl_id, version=impl_version, stability=args.stability)) impl.setAttribute("stability", args.stability) if len(messages) > 0: commit_msg = 'Modified {uri}\n\n{messages}'.format( uri=args.uri, messages='\n'.join(messages)) new_xml = formatting.format_doc(doc) incoming.write_to_git(feed_path, new_xml, commit_msg, config) update.do_update(config) else: print("No changes made.")
def process(config, xml_file, delete_on_success): # Step 1 : check everything looks sensible, reject if not with open(xml_file, 'rb') as stream: xml_text = stream.read() sig_index = xml_text.rfind('\n<!-- Base64 Signature') if sig_index != -1: stream.seek(0) stream, sigs = gpg.check_stream(stream) else: sig_index = len(xml_text) sigs = [] root = qdom.parse(BytesIO(xml_text)) master = get_feed_url(root, xml_file) import_master = 'uri' in root.attrs if not import_master: root.attrs['uri'] = master # (hack so we can parse it here without setting local_path) # Check signatures are valid if config.CONTRIBUTOR_GPG_KEYS is not None: for sig in sigs: if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS: break else: raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format( path = xml_file, sigs = ', '.join([str(s) for s in sigs]))) feed = model.ZeroInstallFeed(root) # Perform custom checks defined by the repository owner for impl in feed.implementations.values(): problem = config.check_new_impl(impl) if problem: raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format( problem = problem, xml_file = xml_file, config = config.__file__)) feeds_rel_path = paths.get_feeds_rel_path(config, master) feed_path = join("feeds", feeds_rel_path) feed_dir = dirname(feed_path) if not os.path.isdir(feed_dir): os.makedirs(feed_dir) scm.ensure_no_uncommitted_changes(feed_path) if import_master: if os.path.exists(feed_path): with open(feed_path, 'rb') as stream: existing = stream.read() if existing == xml_text[:sig_index]: print("Already imported {feed}; skipping".format(feed = feed_path)) if delete_on_success: os.unlink(xml_file) return None else: raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n" "To ADD new versions to this feed, remove the a 'uri' attribute from " "the root element in {new}.\n\n" "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n" "To RE-UPLOAD the archives, do that manually and then edit archives.db." .format(url = feed.url, new = xml_file, path = feed_path)) # Calculate commit message if import_master: name = basename(xml_file) if name == 'feed.xml': name = basename(dirname(xml_file)) action = 'Imported {file}'.format(file = name) else: versions = set(i.get_version() for i in feed.implementations.values()) action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions)) commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8')) # Calculate new XML new_file = not os.path.exists(feed_path) git_path = relpath(feed_path, 'feeds') if import_master: assert new_file new_xml = xml_text[:sig_index] elif new_file: new_xml = create_from_local(master, xml_file) else: # Merge into existing feed try: new_doc = merge.merge_files(master, feed_path, xml_file) except merge.DuplicateIDException as ex: # Did we already import this XML? Compare with the last Git log entry. msg, previous_commit_xml = get_last_commit(git_path) if previous_commit_xml == xml_text: print("Already merged this into {feed}; skipping".format(feed = feed_path)) return msg raise ex new_xml = None # (will regenerate from new_doc below) # Step 2 : upload archives to hosting processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed) # Step 3 : merge XML into feeds directory # Regenerate merged feed if new_xml is None: new_versions = frozenset(impl.get_version() for impl in feed.implementations.values()) if len(new_versions) == 1 and getattr(config, 'TRACK_TESTING_IMPLS', True): ask_if_previous_still_testing(new_doc, list(new_versions)[0]) new_xml = formatting.format_doc(new_doc) did_git_add = False try: with open(feed_path + '.new', 'wb') as stream: stream.write(new_xml) support.portable_rename(feed_path + '.new', feed_path) # Commit if new_file: subprocess.check_call(['git', 'add', git_path], cwd = 'feeds') did_git_add = True # (this must be last in the try block) scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY) except Exception as ex: # Roll-back (we didn't commit to Git yet) print(ex) print("Error updating feed {feed}; rolling-back...".format(feed = xml_file)) if new_file: if os.path.exists(feed_path): os.unlink(feed_path) if did_git_add: subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds') else: subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds') raise # Delete XML and archives from incoming directory if delete_on_success: os.unlink(xml_file) for archive in processed_archives: os.unlink(archive.incoming_path) return commit_msg.split('\n', 1)[0]
def process(config, xml_file, delete_on_success): # Step 1 : check everything looks sensible, reject if not with open(xml_file, 'rb') as stream: xml_text = stream.read() sig_index = xml_text.rfind('\n<!-- Base64 Signature') if sig_index != -1: stream.seek(0) stream, sigs = gpg.check_stream(stream) else: sig_index = len(xml_text) sigs = [] root = qdom.parse(BytesIO(xml_text)) master = get_feed_url(root, xml_file) import_master = 'uri' in root.attrs if not import_master: root.attrs['uri'] = master # (hack so we can parse it here without setting local_path) # Check signatures are valid if config.CONTRIBUTOR_GPG_KEYS is not None: for sig in sigs: if isinstance(sig, gpg.ValidSig) and sig.fingerprint in config.CONTRIBUTOR_GPG_KEYS: break else: raise SafeException("No trusted signatures on feed {path}; signatures were: {sigs}".format( path = xml_file, sigs = ', '.join([str(s) for s in sigs]))) feed = model.ZeroInstallFeed(root) # Perform custom checks defined by the repository owner for impl in feed.implementations.values(): problem = config.check_new_impl(impl) if problem: raise SafeException("{problem} in {xml_file}\n(this check was configured in {config}: check_new_impl())".format( problem = problem, xml_file = xml_file, config = config.__file__)) feeds_rel_path = paths.get_feeds_rel_path(config, master) feed_path = join("feeds", feeds_rel_path) feed_dir = dirname(feed_path) if not os.path.isdir(feed_dir): os.makedirs(feed_dir) scm.ensure_no_uncommitted_changes(feed_path) if import_master: if os.path.exists(feed_path): with open(feed_path, 'rb') as stream: existing = stream.read() if existing == xml_text[:sig_index]: print("Already imported {feed}; skipping".format(feed = feed_path)) if delete_on_success: os.unlink(xml_file) return None else: raise SafeException("Can't import '{url}'; non-identical feed {path} already exists.\n\n" "To ADD new versions to this feed, remove the a 'uri' attribute from " "the root element in {new}.\n\n" "To EDIT the feed, just edit {path} directly rather than trying to add it again.\n\n" "To RE-UPLOAD the archives, do that manually and then edit archives.db." .format(url = feed.url, new = xml_file, path = feed_path)) # Calculate commit message if import_master: name = basename(xml_file) if name == 'feed.xml': name = basename(dirname(xml_file)) action = 'Imported {file}'.format(file = name) else: versions = set(i.get_version() for i in feed.implementations.values()) action = 'Added {name} {versions}'.format(name = feed.get_name(), versions = ', '.join(versions)) commit_msg = '%s\n\n%s' % (action, xml_text.decode('utf-8')) # Calculate new XML new_file = not os.path.exists(feed_path) git_path = relpath(feed_path, 'feeds') if import_master: assert new_file new_xml = xml_text[:sig_index] elif new_file: new_xml = create_from_local(master, xml_file) else: # Merge into existing feed try: new_doc = merge.merge_files(master, feed_path, xml_file) except merge.DuplicateIDException as ex: # Did we already import this XML? Compare with the last Git log entry. msg, previous_commit_xml = get_last_commit(git_path) if previous_commit_xml == xml_text: print("Already merged this into {feed}; skipping".format(feed = feed_path)) return msg raise ex new_xml = None # (will regenerate from new_doc below) # Step 2 : upload archives to hosting processed_archives = archives.process_archives(config, incoming_dir = dirname(xml_file), feed = feed) # Step 3 : merge XML into feeds directory # Prompt about existing testing versions if new_xml is None: new_versions = frozenset(impl.get_version() for impl in feed.implementations.values()) if len(new_versions) == 1: ask_if_previous_still_testing(new_doc, list(new_versions)[0]) new_xml = formatting.format_doc(new_doc) did_git_add = False try: with open(feed_path + '.new', 'wb') as stream: stream.write(new_xml) support.portable_rename(feed_path + '.new', feed_path) # Commit if new_file: subprocess.check_call(['git', 'add', git_path], cwd = 'feeds') did_git_add = True # (this must be last in the try block) scm.commit('feeds', [git_path], commit_msg, key = config.GPG_SIGNING_KEY) except Exception as ex: # Roll-back (we didn't commit to Git yet) print(ex) print("Error updating feed {feed}; rolling-back...".format(feed = xml_file)) if new_file: if os.path.exists(feed_path): os.unlink(feed_path) if did_git_add: subprocess.check_call(['git', 'rm', '--', git_path], cwd = 'feeds') else: subprocess.check_call(['git', 'checkout', 'HEAD', '--', git_path], cwd = 'feeds') raise # Delete XML and archives from incoming directory if delete_on_success: os.unlink(xml_file) for archive in processed_archives: os.unlink(archive.incoming_path) return commit_msg.split('\n', 1)[0]