def merge_wikitext(ui_, repo, base_dir, tmp_file, in_stream): """ Merge changes from a submission zip file into the repository. """ # HgFileOverlay to read bundle files with. prev_overlay = HgFileOverlay(ui_, repo, base_dir, tmp_file) # Direct overlay to write updates into the repo. head_overlay = DirectFiles(os.path.join(repo.root, base_dir)) arch = ZipFile(in_stream, 'r') try: base_ver, dummy = unpack_info(arch.read('__INFO__')) if not has_version(repo, base_ver): # REDFLAG: Think. What about 000000000000? # It is always legal. hmmmm... raise SubmitError("Base version: %s not in repository." % base_ver[:12], True) # Still need to check for illegal submissions. prev_overlay.version = base_ver # REDFLAG: revisit. # just assert in forking_extract_wikitext and # get rid of extra checking / exception raising? check_base_shas(arch, prev_overlay) # Hmmmm... checking against a version of readonly.txt # which may be later than the one that the submitter # used. check_writable(head_overlay, arch) check_merges([name for name in arch.namelist() if name != '__INFO__'], # pylint gives spurious E1101 here ??? #pylint: disable-msg=E1101 prev_overlay.list_pages(os.path.join(prev_overlay. base_path, 'wikitext')), ArchiveHasher(arch).hexdigest) # created, modified, removed, skipped, forked op_lut = (set([]), set([]), set([]), set([]), set([])) for name in arch.namelist(): # check_base_sha validates wikinames. if name == "__INFO__": continue action, versioned_name = forking_extract_wikitext(arch, prev_overlay, head_overlay, name) op_lut[action].add(versioned_name) return op_lut finally: arch.close()
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay( ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx.bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate( ('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)
def get_hg_overlay(self, repo): return HgFileOverlay(self.ui_, repo, DEFAULT_WIKI_ROOT, os.path.join(self.tmp_dir, '_tmp_shared_hg_overlay_tmp'))
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay(ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx. bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate(('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)