def decide_next_state(self, from_state): """ DecisionState implementation. """ if not hasattr(from_state, 'get_top_key_tuple'): raise Exception("Illegal Transition from: %s" % from_state.name) top_key = from_state.get_top_key_tuple() #top_key = break_top_key(top_key) #archivetop.dump_top_key_tuple(top_key) uri = self.parent.ctx['REQUEST_URI'] # WRONG FOR INSERT (+1) # Hmmmm... push this into the context? ctx.request_uri() insert_uri = self.parent.ctx.get('INSERT_URI', None) if not insert_uri is None and insert_uri != 'CHK@': version = get_version(insert_uri) uri = get_usk_for_usk_version(uri, max(version, get_version(uri))) self.parent.ctx.arch_cache_top_key(uri, top_key) self.cached_blocks = self.parent.ctx.required_blocks(top_key) if len(self.cached_blocks) > 0: #print "NEEDS BLOCKS: ", len(self.cached_blocks) return self.needs_blocks_state #print "HAS BLOCKS" return self.has_blocks_state
def execute_arc_pull(ui_, params, stored_cfg): """ Update from an existing incremental archive in Freenet. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, None, params, stored_cfg) ui_.status( "%sRequest URI:\n%s\n" % (is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) # Pull changes into the local block cache. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({ 'REQUEST_URI': params['REQUEST_URI'], 'ARCHIVE_CACHE_DIR': params['ARCHIVE_CACHE_DIR'] }) start_requesting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): uri = update_sm.get_state(ARC_REQUESTING_URI).get_latest_uri() blocks = update_sm.get_state(ARC_CACHING_TOPKEY).get_blocks() plural = '' if len(blocks) != 1: plural = 's' ui_.status("Fetched %i bytes in %i CHK%s from:\n%s\n" % (sum([block[0] for block in blocks]), len(blocks), plural, uri)) ui_.status("Updating local directory...\n") local_synch( ui_, params['ARCHIVE_CACHE_DIR'], # Use the updated URI below so we get the # right cached topkey. uri, params['TO_DIR']) top_key_state = ARC_REQUESTING_URI else: ui_.status("Synchronize failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg, True) finally: arc_cleanup(update_sm, top_key_state)
def do_key_setup(ui_, update_sm, params, stored_cfg): """ INTERNAL: Handle inverting/updating keys before running a command.""" insert_uri = params.get('INSERT_URI') if not insert_uri is None and insert_uri.startswith('USK@/'): insert_uri = ('USK' + stored_cfg.defaults['DEFAULT_PRIVATE_KEY'][3:] + insert_uri[5:]) ui_.status("Filled in the insert URI using the default private key.\n") if insert_uri is None or not (is_usk(insert_uri) or is_ssk(insert_uri)): return (params.get('REQUEST_URI'), False) update_sm.start_inverting(insert_uri) run_until_quiescent(update_sm, params['POLL_SECS'], False) if update_sm.get_state(QUIESCENT).prev_state != INVERTING_URI: raise util.Abort("Couldn't invert private key:\n%s" % insert_uri) inverted_uri = update_sm.get_state(INVERTING_URI).get_request_uri() params['INVERTED_INSERT_URI'] = inverted_uri if is_usk(insert_uri): # Determine the highest known index for the insert uri. max_index = max(stored_cfg.get_index(inverted_uri), get_version(insert_uri)) # Update the insert uri to the latest known version. params['INSERT_URI'] = get_usk_for_usk_version(insert_uri, max_index) # Update the inverted insert URI to the latest known version. params['INVERTED_INSERT_URI'] = get_usk_for_usk_version( inverted_uri, max_index) # Update the index of the request uri using the stored config. request_uri = params.get('REQUEST_URI') if not request_uri is None and is_usk(request_uri): assert not params['NO_SEARCH'] or not request_uri is None if not params['NO_SEARCH']: max_index = max(stored_cfg.get_index(request_uri), get_version(request_uri)) request_uri = get_usk_for_usk_version(request_uri, max_index) if (params['NO_SEARCH'] and # Force the insert URI down to the version in the request URI. usks_equal(request_uri, params['INVERTED_INSERT_URI'])): params['INVERTED_INSERT_URI'] = request_uri params['INSERT_URI'] = get_usk_for_usk_version( insert_uri, get_version(request_uri)) # Skip key inversion if we already inverted the insert_uri. is_keypair = False if (request_uri is None and not params.get('INVERTED_INSERT_URI') is None): request_uri = params['INVERTED_INSERT_URI'] is_keypair = True return (request_uri, is_keypair)
def execute_arc_pull(ui_, params, stored_cfg): """ Update from an existing incremental archive in Freenet. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, None, params, stored_cfg) ui_.status("%sRequest URI:\n%s\n" % ( is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) # Pull changes into the local block cache. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({'REQUEST_URI':params['REQUEST_URI'], 'ARCHIVE_CACHE_DIR':params['ARCHIVE_CACHE_DIR']}) start_requesting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): uri = update_sm.get_state(ARC_REQUESTING_URI).get_latest_uri() blocks = update_sm.get_state(ARC_CACHING_TOPKEY).get_blocks() plural = '' if len(blocks) != 1: plural = 's' ui_.status("Fetched %i bytes in %i CHK%s from:\n%s\n" % (sum([block[0] for block in blocks]), len(blocks), plural, uri)) ui_.status("Updating local directory...\n") local_synch(ui_, params['ARCHIVE_CACHE_DIR'], # Use the updated URI below so we get the # right cached topkey. uri, params['TO_DIR']) top_key_state = ARC_REQUESTING_URI else: ui_.status("Synchronize failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg, True) finally: arc_cleanup(update_sm, top_key_state)
def arc_handle_updating_config(update_sm, params, stored_cfg, is_pulling=False): """ INTERNAL: Write updates into the config file IFF the previous command succeeded. """ base_dir = params['ARCHIVE_CACHE_DIR'] if not is_pulling: if not update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): return if (params['INSERT_URI'] is None or # <- re-insert w/o insert uri not is_usk_file(params['INSERT_URI'])): return inverted_uri = params['INVERTED_INSERT_URI'] # Cache the request_uri - insert_uri mapping. stored_cfg.set_insert_uri(inverted_uri, update_sm.ctx['INSERT_URI']) # Cache the updated index for the insert. version = get_version(update_sm.ctx['INSERT_URI']) stored_cfg.update_index(inverted_uri, version) stored_cfg.update_dir(base_dir, inverted_uri) # Hmmm... if we wanted to be clever we could update the request # uri too when it doesn't match the insert uri. Ok for now. # Only for usks and only on success. #print "UPDATED STORED CONFIG(0)" Config.to_file(stored_cfg) else: # Only finishing required. same. REDFLAG: look at this again if not update_sm.get_state(QUIESCENT).arrived_from((FINISHING, )): return if not is_usk(params['REQUEST_URI']): return state = update_sm.get_state(ARC_REQUESTING_URI) updated_uri = state.get_latest_uri() version = get_version(updated_uri) stored_cfg.update_index(updated_uri, version) stored_cfg.update_dir(base_dir, updated_uri) #print "UPDATED STORED CONFIG(1)" Config.to_file(stored_cfg)
def update_latest_index(self, uri): """ Update the latest known version of the stored repo usk. """ if uri is None: return version = get_version(uri) if version > self.store_info['LATEST_INDEX']: self.store_info['LATEST_INDEX'] = version
def parse(text, is_lines=False): """ Parse updates and announcements from raw text. """ if is_lines: lines = text else: lines = text.split('\n') announcements = set([]) updates = set([]) for line in lines: line = line.strip() # Handle crlf bs on Windoze. fields = line.split(':') if parse_updates(fields, updates): continue if fields[0] == 'A' and len(fields) >= 2: try: if is_usk_file(fields[1]): announcements.add(fields[1]) # Implicit update. updates.add((get_usk_hash(fields[1]), get_version(fields[1]))) except ValueError: continue # else, silently fail... hmmmm # Perhaps a bit too metrosexual... # Make sure you always get the same tuple for a given text. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() return (tuple(updates), tuple(announcements))
def parse(text, is_lines=False): """ Parse updates and announcements from raw text. """ if is_lines: lines = text else: lines = text.split('\n') announcements = set([]) updates = set([]) for line in lines: line = line.strip() # Handle crlf bs on Windoze. fields = line.split(':') if parse_updates(fields, updates): continue if fields[0] == 'A' and len(fields) >= 2: try: if is_usk_file(fields[1]): announcements.add(fields[1]) # Implicit update. updates.add( (get_usk_hash(fields[1]), get_version(fields[1]))) except ValueError: continue # else, silently fail... hmmmm # Perhaps a bit too metrosexual... # Make sure you always get the same tuple for a given text. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() return (tuple(updates), tuple(announcements))
def arc_handle_updating_config(update_sm, params, stored_cfg, is_pulling=False): """ INTERNAL: Write updates into the config file IFF the previous command succeeded. """ base_dir = params['ARCHIVE_CACHE_DIR'] if not is_pulling: if not update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): return if (params['INSERT_URI'] is None or # <- re-insert w/o insert uri not is_usk_file(params['INSERT_URI'])): return inverted_uri = params['INVERTED_INSERT_URI'] # Cache the request_uri - insert_uri mapping. stored_cfg.set_insert_uri(inverted_uri, update_sm.ctx['INSERT_URI']) # Cache the updated index for the insert. version = get_version(update_sm.ctx['INSERT_URI']) stored_cfg.update_index(inverted_uri, version) stored_cfg.update_dir(base_dir, inverted_uri) # Hmmm... if we wanted to be clever we could update the request # uri too when it doesn't match the insert uri. Ok for now. # Only for usks and only on success. #print "UPDATED STORED CONFIG(0)" Config.to_file(stored_cfg) else: # Only finishing required. same. REDFLAG: look at this again if not update_sm.get_state(QUIESCENT).arrived_from((FINISHING,)): return if not is_usk(params['REQUEST_URI']): return state = update_sm.get_state(ARC_REQUESTING_URI) updated_uri = state.get_latest_uri() version = get_version(updated_uri) stored_cfg.update_index(updated_uri, version) stored_cfg.update_dir(base_dir, updated_uri) #print "UPDATED STORED CONFIG(1)" Config.to_file(stored_cfg)
def arch_cache_top_key(self, uri, top_key_tuple): """ Store top key in local archive cache. """ out_file = open(os.path.join(self.arch_cache_dir(), TOP_KEY_NAME_FMT % get_version(uri)), 'wb') try: out_file.write(top_key_tuple_to_bytes(top_key_tuple)) finally: out_file.close()
def add_default_repos(self, default_repos): """ Add table entries from a [(fms_id, usk), ...] list. """ for repo_entry in default_repos: clean_id = clean_nym(repo_entry[0]) usk_hash = get_usk_hash(repo_entry[1]) self.handle_announcement(clean_id, repo_entry[0], repo_entry[1]) # Implicit in announcement self.handle_update(clean_id, repo_entry[0], usk_hash, get_version(repo_entry[1]))
def get_request_uri(self, for_dir): """ Get the repo USK used to pull changes into for_dir or None. """ uri = self.request_usks.get(norm_path(for_dir)) if uri is None: return None version = self.get_index(uri) if not version is None: if version > get_version(uri): uri = get_usk_for_usk_version(uri, version) return uri
def get_insert_uri(self, for_usk_or_id): """ Get the insert USK for the request USK or None. """ uri = self.insert_usks.get(normalize(for_usk_or_id)) if uri is None: return None version = self.get_index(for_usk_or_id) if not version is None: if version > get_version(uri): uri = get_usk_for_usk_version(uri, version) return uri
def execute_pull(ui_, repo, params, stored_cfg): """ Run the pull command. """ update_sm = None try: assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, repo, params, stored_cfg) ui_.status("%sRequest URI:\n%s\n" % (is_redundant(params[ 'REQUEST_URI']), params['REQUEST_URI'])) #ui_.status("Current tip: %s\n" % hex_version(repo)[:12]) update_sm.start_pulling(params['REQUEST_URI']) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): ui_.status("Pulled from:\n%s\n" % update_sm.get_state('REQUESTING_URI'). get_latest_uri()) #ui_.status("New tip: %s\n" % hex_version(repo)[:12]) else: ui_.status("Pull failed.\n") handle_updating_config(repo, update_sm, params, stored_cfg, True) finally: cleanup(update_sm)
def leave(self, to_state): """ Override to update REQUEST_URI in the parent's context. """ InsertingUri.leave(self, to_state) if to_state.name != self.success_state: return if self.parent.ctx['INSERT_URI'] is None: # Assert reinserting??? return # i.e. for reinserting. if not (is_usk(self.parent.ctx['INSERT_URI']) and is_usk(self.parent.ctx['REQUEST_URI'])): return if (get_version(self.parent.ctx['INSERT_URI']) > get_version(self.parent.ctx['REQUEST_URI'])): version = get_version(self.parent.ctx['INSERT_URI']) self.parent.ctx['REQUEST_URI'] = ( get_usk_for_usk_version(self.parent.ctx['REQUEST_URI'], version))
def leave(self, to_state): """ Implementation of State virtual. """ if to_state.name == self.success_state: # Hmmm... what about chks? # Update the index in the insert_uri on success if (should_increment(self) and is_usk(self.parent.ctx['INSERT_URI'])): version = get_version(self.parent.ctx['INSERT_URI']) + 1 self.parent.ctx['INSERT_URI'] = (get_usk_for_usk_version( self.parent.ctx['INSERT_URI'], version)) if self.parent.params.get('DUMP_URIS', False): self.parent.ctx.ui_.status( ("INSERT UPDATED INSERT " + "URI:\n%s\n") % self.parent.ctx['INSERT_URI'])
def execute_pull(ui_, repo, params, stored_cfg): """ Run the pull command. """ update_sm = None try: assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, repo, params, stored_cfg) ui_.status( "%sRequest URI:\n%s\n" % (is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) #ui_.status("Current tip: %s\n" % hex_version(repo)[:12]) update_sm.start_pulling(params['REQUEST_URI']) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): ui_.status("Pulled from:\n%s\n" % update_sm.get_state('REQUESTING_URI').get_latest_uri()) #ui_.status("New tip: %s\n" % hex_version(repo)[:12]) else: ui_.status("Pull failed.\n") handle_updating_config(repo, update_sm, params, stored_cfg, True) finally: cleanup(update_sm)
def leave(self, to_state): """ Implementation of State virtual. """ if to_state.name == self.success_state: self.parent.ctx['REQUEST_URI'] = self.get_latest_uri() if is_usk(self.parent.ctx['REQUEST_URI']): self.parent.ctx.ui_.status("Current USK version: %i\n" % get_version(self.parent .ctx['REQUEST_URI'])) if (self.parent.ctx['IS_KEYPAIR'] and is_usk(self.parent.ctx['REQUEST_URI']) and # lose usk checks? is_usk(self.parent.ctx['INSERT_URI'])): version = get_version(self.parent.ctx['REQUEST_URI']) self.parent.ctx['INSERT_URI'] = ( get_usk_for_usk_version(self.parent.ctx['INSERT_URI'], version)) #print "SEARCH UPDATED INSERT URI: ", \ # self.parent.ctx['INSERT_URI'] # Allow pending requests to run to completion. self.parent.ctx.orphan_requests(self) if self.parent.params.get('DUMP_TOP_KEY', False): self.topkey_funcs.dump_top_key_tuple(self.get_top_key_tuple(), self.parent.ctx.ui_.status)
def leave(self, to_state): """ Implementation of State virtual. """ if to_state.name == self.success_state: # Hmmm... what about chks? # Update the index in the insert_uri on success if (should_increment(self) and is_usk(self.parent.ctx['INSERT_URI'])): version = get_version(self.parent.ctx['INSERT_URI']) + 1 self.parent.ctx['INSERT_URI'] = ( get_usk_for_usk_version(self.parent.ctx['INSERT_URI'], version)) if self.parent.params.get('DUMP_URIS', False): self.parent.ctx.ui_.status(("INSERT UPDATED INSERT " + "URI:\n%s\n") % self.parent.ctx['INSERT_URI'])
def leave(self, to_state): """ Implementation of State virtual. """ if to_state.name == self.success_state: self.parent.ctx['REQUEST_URI'] = self.get_latest_uri() if is_usk(self.parent.ctx['REQUEST_URI']): self.parent.ctx.ui_.status( "Current USK version: %i\n" % get_version(self.parent.ctx['REQUEST_URI'])) if (self.parent.ctx['IS_KEYPAIR'] and is_usk(self.parent.ctx['REQUEST_URI']) and # lose usk checks? is_usk(self.parent.ctx['INSERT_URI'])): version = get_version(self.parent.ctx['REQUEST_URI']) self.parent.ctx['INSERT_URI'] = (get_usk_for_usk_version( self.parent.ctx['INSERT_URI'], version)) #print "SEARCH UPDATED INSERT URI: ", \ # self.parent.ctx['INSERT_URI'] # Allow pending requests to run to completion. self.parent.ctx.orphan_requests(self) if self.parent.params.get('DUMP_TOP_KEY', False): self.topkey_funcs.dump_top_key_tuple( self.get_top_key_tuple(), self.parent.ctx.ui_.status)
def load_cached_top_key(cache_dir, uri): """ Return a top key tuple from a cached top key. """ full_path = os.path.join(cache_dir_name(cache_dir, uri), TOP_KEY_NAME_FMT % get_version(uri)) in_file = open(full_path, 'rb') try: try: return archivetop.bytes_to_top_key_tuple(in_file.read())[0] except ValueError: # Remove the corrupt file from the cache. in_file.close() if os.path.exists(full_path): os.remove(full_path) raise finally: in_file.close()
def cleanup_dirs(ui_, cache_dir, uri, top_key=None): """ Remove unneeded files from the archive cache dir. """ # Remove temp dir tmp_dir = os.path.join(cache_dir, TMP_DIR) if os.path.exists(tmp_dir): ui_.status("Removing: %s\n" % tmp_dir) shutil.rmtree(tmp_dir) # Remove block dir block_dir = os.path.join(cache_dir, BLOCK_DIR) if os.path.exists(block_dir): ui_.status("Removing: %s\n" % block_dir) shutil.rmtree(block_dir) if top_key is None: return # Remove old cached top keys and unneeded cached CHKs. survivors = set([]) survivors.add(TOP_KEY_NAME_FMT % get_version(uri)) for block in top_key[0]: for chk in block[1]: survivors.add(chk_file_name(chk)) archive_dir = os.path.join(cache_dir, get_usk_hash(uri)) for name in os.listdir(archive_dir): if not (name.startswith(CHK_NAME_PREFIX) or name.startswith(TOP_KEY_NAME_PREFIX)): # Hmmm leave other files alone. Too paranoid? continue if not name in survivors: full_path = os.path.join(archive_dir, name) ui_.status("Removing: %s\n" % full_path) os.remove(full_path) if len(survivors) > 0: ui_.status("Leaving %i file%s in : %s\n" % ( len(survivors), choose_word(len(survivors) == 1, '','s'), archive_dir))
def get_latest_uri(self): """ Returns the URI with the version part update if the URI is a USK.""" if (is_usk(self.parent.ctx['REQUEST_URI']) and self.parent.params['NO_SEARCH']): return self.parent.ctx['REQUEST_URI'] max_version = None for candidate in self.ordered: result = candidate[5] if result is None or result[0] != 'AllData': continue uri = result[1]['URI'] if not is_usk_file(uri): return uri max_version = max(max_version, abs(get_version(uri))) break assert not max_version is None # The .R1 URI is queued first. assert (len(self.ordered) < 2 or self.ordered[0][0].find('.R1') != -1) return get_usk_for_usk_version(self.ordered[0][0], max_version)
def monitor_callback(self, update_sm, client, msg): """ FCP message status callback which writes to a ui. """ if self.verbosity < 2: return #prefix = update_sm.current_state.name prefix = '' if self.verbosity > 2: prefix = client.request_id()[:10] + ':' if hasattr(update_sm.current_state, 'pending') and self.verbosity > 1: prefix = ("{%i}:" % len(update_sm.runner.running)) + prefix if msg[0] == 'SimpleProgress': text = str(parse_progress(msg)) elif msg[0] == 'URIGenerated': return # shows up twice #elif msg[0] == 'PutSuccessful': # text = 'PutSuccessful:' + msg[1]['URI'] elif msg[0] == 'ProtocolError': text = 'ProtocolError:' + str(msg) elif msg[0] == 'AllData': # Don't try to print raw data. text = 'AllData: length=%s' % msg[1].get('DataLength', '???') elif msg[0].find('Failed') != -1: code = get_code(msg) or -1 redirect = '' if (code == 27 and 'RedirectURI' in msg[1] and is_usk(msg[1]['RedirectURI'])): redirect = ", redirected to version: %i" % \ get_version(msg[1]['RedirectURI']) text = "%s: code=%i%s" % (msg[0], code, redirect) else: text = msg[0] self.ui_.status("%s%s:%s\n" % (prefix, str(client.tag), text))