def execute_insert_patch(ui_, repo, params, stored_cfg): """ Create and hg bundle containing all changes not already in the infocalypse repo in Freenet and insert it to a CHK. Returns a machine readable patch notification message. """ try: update_sm = setup(ui_, repo, params, stored_cfg) out_file = make_temp_file(update_sm.ctx.bundle_cache.base_dir) ui_.status("Reading repo state from Freenet...\n") freenet_heads = read_freenet_heads(params, update_sm, params['REQUEST_URI']) # This may eventually change to support other patch types. create_patch_bundle(ui_, repo, freenet_heads, out_file) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'patch_bundle_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.file_name = out_file request.in_params.send_data = True # Must do this here because file gets deleted. chk_len = os.path.getsize(out_file) ui_.status("Inserting %i byte patch bundle...\n" % os.path.getsize(out_file)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) freenet_heads = list(freenet_heads) freenet_heads.sort() heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 ret = ':'.join(('B', normalize(params['REQUEST_URI']), str(chk_len), ':'.join([base[:12] for base in freenet_heads]), '|', ':'.join([head[:12] for head in heads]), chk)) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret raise util.Abort("Patch CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def execute_insert_patch(ui_, repo, params, stored_cfg): """ Create and hg bundle containing all changes not already in the infocalypse repo in Freenet and insert it to a CHK. Returns a machine readable patch notification message. """ try: update_sm = setup(ui_, repo, params, stored_cfg) out_file = make_temp_file(update_sm.ctx.bundle_cache.base_dir) ui_.status("Reading repo state from Freenet...\n") freenet_heads = read_freenet_heads(params, update_sm, params['REQUEST_URI']) # This may eventually change to support other patch types. create_patch_bundle(ui_, repo, freenet_heads, out_file) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'patch_bundle_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.file_name = out_file request.in_params.send_data = True # Must do this here because file gets deleted. chk_len = os.path.getsize(out_file) ui_.status("Inserting %i byte patch bundle...\n" % os.path.getsize(out_file)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) freenet_heads = list(freenet_heads) freenet_heads.sort() heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 ret = ':'.join( ('B', normalize(params['REQUEST_URI']), str(chk_len), ':'.join([base[:12] for base in freenet_heads]), '|', ':'.join([head[:12] for head in heads]), chk)) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret raise util.Abort("Patch CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def _handle_submission(self, msg_id, submission_tuple, msg): """ INTERNAL: Handle incoming submission bundles.""" self.debug("handle_submission -- %s" % msg_id) self.trace("handle_submission -- %s" % str(submission_tuple)) tmp_file = make_temp_file(self.params['TMP_DIR']) try: self.applier.apply_submission(msg_id, submission_tuple, msg[2], tmp_file) finally: if os.path.exists(tmp_file): os.remove(tmp_file)
def _get_bundle(self, edge, pad): """ Returns a (temp_file, mime_type) tuple for the hg bundle file corresponding to edge. """ original_len = self.graph.get_length(edge) expected_len = original_len if pad: expected_len += 1 # Hmmmm... misuse of bundle cache dir? tmp_file = make_temp_file(self.parent.ctx.bundle_cache.base_dir) raised = False try: bundle = self.parent.ctx.bundle_cache.make_bundle(self.graph, self.parent.ctx. version_table, edge[:2], tmp_file) if bundle[0] != original_len: raise BundleException("Wrong size. Expected: %i. Got: %i" % (original_len, bundle[0])) assert bundle[0] == original_len if pad: out_file = open(tmp_file, 'ab') try: out_file.seek(0, os.SEEK_END) out_file.write(PAD_BYTE) finally: out_file.close() assert expected_len == os.path.getsize(tmp_file) raised = False finally: if raised and os.path.exists(tmp_file): os.remove(tmp_file) if expected_len <= FREENET_BLOCK_LEN: mime_type = None else: assert edge[2] > -1 and edge[2] < 2 mime_type = HG_MIME_TYPE_FMT % edge[2] return (tmp_file, mime_type)
def _get_bundle(self, edge, pad): """ Returns a (temp_file, mime_type) tuple for the hg bundle file corresponding to edge. """ original_len = self.graph.get_length(edge) expected_len = original_len if pad: expected_len += 1 # Hmmmm... misuse of bundle cache dir? tmp_file = make_temp_file(self.parent.ctx.bundle_cache.base_dir) raised = False try: bundle = self.parent.ctx.bundle_cache.make_bundle( self.graph, self.parent.ctx.version_table, edge[:2], tmp_file) if bundle[0] != original_len: raise BundleException("Wrong size. Expected: %i. Got: %i" % (original_len, bundle[0])) assert bundle[0] == original_len if pad: out_file = open(tmp_file, 'ab') try: out_file.seek(0, os.SEEK_END) out_file.write(PAD_BYTE) finally: out_file.close() assert expected_len == os.path.getsize(tmp_file) raised = False finally: if raised and os.path.exists(tmp_file): os.remove(tmp_file) if expected_len <= FREENET_BLOCK_LEN: mime_type = None else: assert edge[2] > -1 and edge[2] < 2 mime_type = HG_MIME_TYPE_FMT % edge[2] return (tmp_file, mime_type)
def make_request(self, candidate): """ Implementation of RetryingRequestList virtual. """ #print "CANDIDATE: ", candidate assert len(candidate) >= 7 candidate[ 1] += 1 # Keep track of the number of times it has been tried # tag == edge, but what if we don't have an edge yet? request = CandidateRequest(self.parent) request.in_params.fcp_params = self.parent.params.copy() uri = candidate[0] if candidate[2]: uri = clear_control_bytes(uri) request.in_params.fcp_params['URI'] = uri request.in_params.definition = GET_DEF request.in_params.file_name = (make_temp_file( self.parent.ctx.bundle_cache.base_dir)) self.parent.ctx.set_cancel_time(request) # Set tag if not candidate[3] is None: request.tag = candidate[3] # Edge else: # REDFLAG: Do better! # Some random digit string. request.tag = request.in_params.file_name[-12:] # Set candidate request.candidate = candidate #print "make_request --", request.tag, candidate[0] # Tags must be unique or we will loose requests! assert not request.tag in self.pending #request.in_params.fcp_params['MaxSize'] = ??? return request
def make_request(self, candidate): """ Implementation of RetryingRequestList virtual. """ #print "CANDIDATE: ", candidate assert len(candidate) >= 7 candidate[1] += 1 # Keep track of the number of times it has been tried # tag == edge, but what if we don't have an edge yet? request = CandidateRequest(self.parent) request.in_params.fcp_params = self.parent.params.copy() uri = candidate[0] if candidate[2]: uri = clear_control_bytes(uri) request.in_params.fcp_params['URI'] = uri request.in_params.definition = GET_DEF request.in_params.file_name = ( make_temp_file(self.parent.ctx.bundle_cache.base_dir)) self.parent.ctx.set_cancel_time(request) # Set tag if not candidate[3] is None: request.tag = candidate[3] # Edge else: # REDFLAG: Do better! # Some random digit string. request.tag = request.in_params.file_name[-12:] # Set candidate request.candidate = candidate #print "make_request --", request.tag, candidate[0] # Tags must be unique or we will loose requests! assert not request.tag in self.pending #request.in_params.fcp_params['MaxSize'] = ??? return request
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay( ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx.bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate( ('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay(ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx. bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate(('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)