def next_runnable(self): """ Implementation of RequestQueueState virtual. """ if self.queued: return None self.queued = True uri = self.insert_uri if is_usk(uri): # Hack to keep freenet from doing a USK search. uri = get_ssk_for_usk_version(uri, 0) request = StatefulRequest(self.parent) request.in_params.definition = GET_REQUEST_URI_DEF request.in_params.fcp_params = { 'URI': uri, 'MaxRetries': 1, 'PriorityClass': 1, 'UploadFrom': 'direct', 'GetCHKOnly': True } request.in_params.send_data = '@' * 9 request.in_params.fcp_params['DataLength'] = (len( request.in_params.send_data)) request.tag = 'only_invert' # Hmmmm... self.parent.ctx.set_cancel_time(request) return request
def execute_insert_patch(ui_, repo, params, stored_cfg): """ Create and hg bundle containing all changes not already in the infocalypse repo in Freenet and insert it to a CHK. Returns a machine readable patch notification message. """ try: update_sm = setup(ui_, repo, params, stored_cfg) out_file = make_temp_file(update_sm.ctx.bundle_cache.base_dir) ui_.status("Reading repo state from Freenet...\n") freenet_heads = read_freenet_heads(params, update_sm, params['REQUEST_URI']) # This may eventually change to support other patch types. create_patch_bundle(ui_, repo, freenet_heads, out_file) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'patch_bundle_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.file_name = out_file request.in_params.send_data = True # Must do this here because file gets deleted. chk_len = os.path.getsize(out_file) ui_.status("Inserting %i byte patch bundle...\n" % os.path.getsize(out_file)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) freenet_heads = list(freenet_heads) freenet_heads.sort() heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 ret = ':'.join(('B', normalize(params['REQUEST_URI']), str(chk_len), ':'.join([base[:12] for base in freenet_heads]), '|', ':'.join([head[:12] for head in heads]), chk)) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret raise util.Abort("Patch CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def execute_insert_patch(ui_, repo, params, stored_cfg): """ Create and hg bundle containing all changes not already in the infocalypse repo in Freenet and insert it to a CHK. Returns a machine readable patch notification message. """ try: update_sm = setup(ui_, repo, params, stored_cfg) out_file = make_temp_file(update_sm.ctx.bundle_cache.base_dir) ui_.status("Reading repo state from Freenet...\n") freenet_heads = read_freenet_heads(params, update_sm, params['REQUEST_URI']) # This may eventually change to support other patch types. create_patch_bundle(ui_, repo, freenet_heads, out_file) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'patch_bundle_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.file_name = out_file request.in_params.send_data = True # Must do this here because file gets deleted. chk_len = os.path.getsize(out_file) ui_.status("Inserting %i byte patch bundle...\n" % os.path.getsize(out_file)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) freenet_heads = list(freenet_heads) freenet_heads.sort() heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 ret = ':'.join( ('B', normalize(params['REQUEST_URI']), str(chk_len), ':'.join([base[:12] for base in freenet_heads]), '|', ':'.join([head[:12] for head in heads]), chk)) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret raise util.Abort("Patch CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def make_splitfile_metadata_request(self, edge, tag): """ Makes a StatefulRequest for the Freenet metadata for the CHK corresponding to an edge in the update graph. Helper function used by InsertingBundles state. """ request = StatefulRequest(self.parent) request.tag = tag # TRICKY: Clear control bytes to get the raw CHK contents, # disabling Freenet metadata handling. uri = clear_control_bytes(self.parent.ctx.graph.get_chk(edge)) request.in_params.definition = GET_DEF request.in_params.fcp_params = self.parent.params.copy() request.in_params.fcp_params['URI'] = uri self.set_cancel_time(request) return request
def make_edge_insert_request(self, edge, tag, salted_metadata_cache): """ Makes a StatefuleRequest to insert the hg bundle corresponding to an edge in the update graph. Helper function used by InsertingBundles state. """ request = StatefulRequest(self.parent) request.tag = tag request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = self.parent.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' kind = self.graph.insert_type(edge) if kind == INSERT_SALTED_METADATA: #print "make_edge_insert_request -- salted" assert edge[2] == 1 raw_bytes = salted_metadata_cache[(edge[0], edge[1], 0)] pos = raw_bytes.find(METADATA_MARKER) if pos == -1 or len(raw_bytes) < pos + len(METADATA_MARKER) + 1: raise Exception("Couldn't read marker string.") salted_pos = pos + len(METADATA_MARKER) old_salt = raw_bytes[salted_pos] if old_salt != '0': raise Exception("Unexpected salt byte: %s" % old_salt) twiddled_bytes = raw_bytes[:salted_pos] + '1' \ + raw_bytes[salted_pos + 1:] assert len(raw_bytes) == len(twiddled_bytes) request.in_params.send_data = twiddled_bytes self.set_cancel_time(request) return request assert (kind == INSERT_NORMAL or kind == INSERT_PADDED or kind == INSERT_HUGE) pad = (kind == INSERT_PADDED) #print "make_edge_insert_request -- from disk: pad" tmp_file, mime_type = self._get_bundle(edge, pad) request.in_params.file_name = tmp_file request.in_params.send_data = True if not mime_type is None: request.in_params.fcp_params['Metadata.ContentType'] = mime_type self.set_cancel_time(request) return request
def _start_freesite_insert(self): """ INTERNAL: Start asynchronous insert of Wiki freesite. """ assert self.update_sm is None self.debug("start_freesite_insert -- starting insert of edition: %i" % (latest_site_index(self.repo) + 1)) self.update_sm = setup_sm(self.ui_, self.repo, self.runner, self.params) # LATER: Replace UICallbacks and back out dorky chaining? self.update_sm.transition_callback = ( ChainedCallback.chain((self.update_sm.transition_callback, self._freesite_transition))) # DCI: try block, with file cleanup # DCI: need to check that there are no uncommited files! site_root = os.path.join(self.params['TMP_DIR'], HTML_DUMP_DIR) dump_wiki_html(os.path.join(self.repo.root, self.params['WIKI_ROOT']), site_root, False) infos = get_file_infos(site_root) set_index_file(infos, self.params['SITE_DEFAULT_FILE']) self.debug('start_freesite_insert -- dumped %i files' % len(infos)) self.trace('--- files ---') for info in infos: self.trace('%s %s' % (info[0], info[1])) self.trace('---') request = StatefulRequest(self.update_sm) request.tag = 'freesite_insert' request.in_params.definition = PUT_COMPLEX_DIR_DEF request.in_params.fcp_params = self.params.copy() request.in_params.fcp_params['DontCompress'] = False request.in_params.fcp_params['URI'] = self._freesite_insert_uri() # dir_data_source() creates an IDataSource which allows # the FCPConnection to slurp the files up over the # FCP socket as one contiguous blob. # Sets up in_params for ClientPutComplexDir as a side effect. request.custom_data_source = ( dir_data_source(infos, request.in_params, 'text/html')) request.cancel_time_secs = (time.time() + self.params['CANCEL_TIME_SECS']) self.update_sm.start_single_request(request)
def _start_freesite_insert(self): """ INTERNAL: Start asynchronous insert of Wiki freesite. """ assert self.update_sm is None self.debug("start_freesite_insert -- starting insert of edition: %i" % (latest_site_index(self.repo) + 1)) self.update_sm = setup_sm(self.ui_, self.repo, self.runner, self.params) # LATER: Replace UICallbacks and back out dorky chaining? self.update_sm.transition_callback = (ChainedCallback.chain( (self.update_sm.transition_callback, self._freesite_transition))) # DCI: try block, with file cleanup # DCI: need to check that there are no uncommited files! site_root = os.path.join(self.params['TMP_DIR'], HTML_DUMP_DIR) dump_wiki_html(os.path.join(self.repo.root, self.params['WIKI_ROOT']), site_root, False) infos = get_file_infos(site_root) set_index_file(infos, self.params['SITE_DEFAULT_FILE']) self.debug('start_freesite_insert -- dumped %i files' % len(infos)) self.trace('--- files ---') for info in infos: self.trace('%s %s' % (info[0], info[1])) self.trace('---') request = StatefulRequest(self.update_sm) request.tag = 'freesite_insert' request.in_params.definition = PUT_COMPLEX_DIR_DEF request.in_params.fcp_params = self.params.copy() request.in_params.fcp_params['DontCompress'] = False request.in_params.fcp_params['URI'] = self._freesite_insert_uri() # dir_data_source() creates an IDataSource which allows # the FCPConnection to slurp the files up over the # FCP socket as one contiguous blob. # Sets up in_params for ClientPutComplexDir as a side effect. request.custom_data_source = (dir_data_source(infos, request.in_params, 'text/html')) request.cancel_time_secs = (time.time() + self.params['CANCEL_TIME_SECS']) self.update_sm.start_single_request(request)
def next_runnable(self): """ Implementation of RequestQueueState virtual. """ if self.queued: return None self.queued = True uri = self.insert_uri if is_usk(uri): # Hack to keep freenet from doing a USK search. uri = get_ssk_for_usk_version(uri, 0) request = StatefulRequest(self.parent) request.in_params.definition = GET_REQUEST_URI_DEF request.in_params.fcp_params = {'URI': uri, 'MaxRetries': 1, 'PriorityClass':1, 'UploadFrom':'direct', 'GetCHKOnly':True} request.in_params.send_data = '@' * 9 request.in_params.fcp_params['DataLength'] = ( len(request.in_params.send_data)) request.tag = 'only_invert' # Hmmmm... self.parent.ctx.set_cancel_time(request) return request
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay( ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx.bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate( ('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)
def execute_wiki_submit(ui_, repo, params, stored_cfg): """ Insert and overlayed wiki change submission CHK into freenet and return a notification message string. """ update_sm = None try: # Read submitter out of stored_cfg submitter = stored_cfg.defaults.get('FMS_ID', None) assert not submitter is None assert submitter.find('@') == -1 # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) if not params.get('OVERLAYED', False): raise util.Abort("Can't submit from non-overlayed wiki edits!") if not params.get('CLIENT_WIKI_GROUP', None): # DCI: test code path raise util.Abort("No wiki_group in fnwiki.cfg. Don't " + "know where to post to!") ui_.status("\nPreparing to submit to %s FMS group as %s.\n" % (params['CLIENT_WIKI_GROUP'], submitter)) # Create submission zip file in RAM. overlay = get_file_funcs(os.path.join(repo.root, params['WIKI_ROOT']), True) try: raw_bytes = bundle_wikitext(overlay, version, submitter) except NoChangesError: raise util.Abort("There are no overlayed changes to submit.") # Punt if it's too big. if len(raw_bytes) >= FREENET_BLOCK_LEN: raise util.Abort("Too many changes. Change .zip must be <32K") update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.send_data = raw_bytes ui_.status("Inserting %i byte submission CHK...\n" % len(raw_bytes)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 # DCI: why normalize??? # (usk_hash, base_version, chk, length) ret = ':'.join( ('W', normalize(params['REQUEST_URI']), version[:12], chk, str(len(raw_bytes)))) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret, params['CLIENT_WIKI_GROUP'] raise util.Abort("Submission CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def execute_wiki_submit(ui_, repo, params, stored_cfg): """ Insert and overlayed wiki change submission CHK into freenet and return a notification message string. """ update_sm = None try: # Read submitter out of stored_cfg submitter = stored_cfg.defaults.get('FMS_ID', None) assert not submitter is None assert submitter.find('@') == -1 # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) if not params.get('OVERLAYED', False): raise util.Abort("Can't submit from non-overlayed wiki edits!") if not params.get('CLIENT_WIKI_GROUP', None): # DCI: test code path raise util.Abort("No wiki_group in fnwiki.cfg. Don't " + "know where to post to!") ui_.status("\nPreparing to submit to %s FMS group as %s.\n" % (params['CLIENT_WIKI_GROUP'], submitter)) # Create submission zip file in RAM. overlay = get_file_funcs(os.path.join(repo.root, params['WIKI_ROOT']), True) try: raw_bytes = bundle_wikitext(overlay, version, submitter) except NoChangesError: raise util.Abort("There are no overlayed changes to submit.") # Punt if it's too big. if len(raw_bytes) >= FREENET_BLOCK_LEN: raise util.Abort("Too many changes. Change .zip must be <32K") update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.send_data = raw_bytes ui_.status("Inserting %i byte submission CHK...\n" % len(raw_bytes)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 # DCI: why normalize??? # (usk_hash, base_version, chk, length) ret = ':'.join(('W', normalize(params['REQUEST_URI']), version[:12], chk, str(len(raw_bytes)))) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret, params['CLIENT_WIKI_GROUP'] raise util.Abort("Submission CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay(ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx. bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate(('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)