def parse(text, is_lines=False): """ Parse updates and announcements from raw text. """ if is_lines: lines = text else: lines = text.split('\n') announcements = set([]) updates = set([]) for line in lines: line = line.strip() # Handle crlf bs on Windoze. fields = line.split(':') if parse_updates(fields, updates): continue if fields[0] == 'A' and len(fields) >= 2: try: if is_usk_file(fields[1]): announcements.add(fields[1]) # Implicit update. updates.add((get_usk_hash(fields[1]), get_version(fields[1]))) except ValueError: continue # else, silently fail... hmmmm # Perhaps a bit too metrosexual... # Make sure you always get the same tuple for a given text. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() return (tuple(updates), tuple(announcements))
def to_msg_string(updates, announcements=None, separator='\n'): """ Dump updates and announcements in a format which can be read by parse. """ if updates is None: updates = [] if announcements is None: announcements = [] # Make sure we always get the same string rep. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() # Hmmm... extra loops for assert paranoia. for value in announcements: assert is_usk_file(value) for update in updates: assert is_hex_string(update[0], 12) assert update[1] >= 0 tmp = [ separator.join(["A:%s" % value for value in announcements]), separator.join( ["U:%s:%i" % (update[0], update[1]) for update in updates]) ] while '' in tmp: tmp.remove('') return separator.join(tmp)
def parse(text, is_lines=False): """ Parse updates and announcements from raw text. """ if is_lines: lines = text else: lines = text.split('\n') announcements = set([]) updates = set([]) for line in lines: line = line.strip() # Handle crlf bs on Windoze. fields = line.split(':') if parse_updates(fields, updates): continue if fields[0] == 'A' and len(fields) >= 2: try: if is_usk_file(fields[1]): announcements.add(fields[1]) # Implicit update. updates.add( (get_usk_hash(fields[1]), get_version(fields[1]))) except ValueError: continue # else, silently fail... hmmmm # Perhaps a bit too metrosexual... # Make sure you always get the same tuple for a given text. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() return (tuple(updates), tuple(announcements))
def to_msg_string(updates, announcements=None, separator='\n'): """ Dump updates and announcements in a format which can be read by parse. """ if updates is None: updates = [] if announcements is None: announcements = [] # Make sure we always get the same string rep. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() # Hmmm... extra loops for assert paranoia. for value in announcements: assert is_usk_file(value) for update in updates: assert is_hex_string(update[0], 12) assert update[1] >= 0 tmp = [separator.join(["A:%s" % value for value in announcements]), separator.join(["U:%s:%i" % (update[0], update[1]) for update in updates])] while '' in tmp: tmp.remove('') return separator.join(tmp)
def execute_info(ui_, repo, params, stored_cfg): """ Run the info command. """ request_uri = params['REQUEST_URI'] if request_uri is None or not is_usk_file(request_uri): ui_.status("Only works with USK file URIs.\n") return usk_hash = normalize(request_uri) max_index = stored_cfg.get_index(request_uri) if max_index is None: ui_.status(NO_INFO_FMT % usk_hash) return insert_uri = str(stored_cfg.get_insert_uri(usk_hash)) # fix index request_uri = get_usk_for_usk_version(request_uri, max_index) trusted = stored_cfg.trusted_notifiers(usk_hash) if not trusted: trusted = ' None' else: trusted = ' ' + '\n '.join(trusted) ui_.status(INFO_FMT % (usk_hash, max_index or -1, trusted, request_uri, insert_uri)) update_sm = setup(ui_, repo, params, stored_cfg) try: ui_.status('Freenet head(s): %s\n' % ' '.join([ver[:12] for ver in read_freenet_heads(params, update_sm, request_uri)])) finally: cleanup(update_sm)
def execute_info(ui_, repo, params, stored_cfg): """ Run the info command. """ request_uri = params['REQUEST_URI'] if request_uri is None or not is_usk_file(request_uri): ui_.status("Only works with USK file URIs.\n") return usk_hash = normalize(request_uri) max_index = stored_cfg.get_index(request_uri) if max_index is None: ui_.status(NO_INFO_FMT % usk_hash) return insert_uri = str(stored_cfg.get_insert_uri(usk_hash)) # fix index request_uri = get_usk_for_usk_version(request_uri, max_index) trusted = stored_cfg.trusted_notifiers(usk_hash) if not trusted: trusted = ' None' else: trusted = ' ' + '\n '.join(trusted) ui_.status(INFO_FMT % (usk_hash, max_index or -1, trusted, request_uri, insert_uri)) update_sm = setup(ui_, repo, params, stored_cfg) try: ui_.status('Freenet head(s): %s\n' % ' '.join([ ver[:12] for ver in read_freenet_heads(params, update_sm, request_uri) ])) finally: cleanup(update_sm)
def execute_arc_pull(ui_, params, stored_cfg): """ Update from an existing incremental archive in Freenet. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, None, params, stored_cfg) ui_.status( "%sRequest URI:\n%s\n" % (is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) # Pull changes into the local block cache. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({ 'REQUEST_URI': params['REQUEST_URI'], 'ARCHIVE_CACHE_DIR': params['ARCHIVE_CACHE_DIR'] }) start_requesting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): uri = update_sm.get_state(ARC_REQUESTING_URI).get_latest_uri() blocks = update_sm.get_state(ARC_CACHING_TOPKEY).get_blocks() plural = '' if len(blocks) != 1: plural = 's' ui_.status("Fetched %i bytes in %i CHK%s from:\n%s\n" % (sum([block[0] for block in blocks]), len(blocks), plural, uri)) ui_.status("Updating local directory...\n") local_synch( ui_, params['ARCHIVE_CACHE_DIR'], # Use the updated URI below so we get the # right cached topkey. uri, params['TO_DIR']) top_key_state = ARC_REQUESTING_URI else: ui_.status("Synchronize failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg, True) finally: arc_cleanup(update_sm, top_key_state)
def is_redundant(uri): """ Return True if uri is a file USK and ends in '.R1', False otherwise. """ if not is_usk_file(uri): return '' fields = uri.split('/') if not fields[-2].endswith('.R1'): return '' return 'Redundant '
def execute_arc_pull(ui_, params, stored_cfg): """ Update from an existing incremental archive in Freenet. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, None, params, stored_cfg) ui_.status("%sRequest URI:\n%s\n" % ( is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) # Pull changes into the local block cache. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({'REQUEST_URI':params['REQUEST_URI'], 'ARCHIVE_CACHE_DIR':params['ARCHIVE_CACHE_DIR']}) start_requesting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): uri = update_sm.get_state(ARC_REQUESTING_URI).get_latest_uri() blocks = update_sm.get_state(ARC_CACHING_TOPKEY).get_blocks() plural = '' if len(blocks) != 1: plural = 's' ui_.status("Fetched %i bytes in %i CHK%s from:\n%s\n" % (sum([block[0] for block in blocks]), len(blocks), plural, uri)) ui_.status("Updating local directory...\n") local_synch(ui_, params['ARCHIVE_CACHE_DIR'], # Use the updated URI below so we get the # right cached topkey. uri, params['TO_DIR']) top_key_state = ARC_REQUESTING_URI else: ui_.status("Synchronize failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg, True) finally: arc_cleanup(update_sm, top_key_state)
def get_request_uris(self): """ Return the inserted request uri(s). """ ret = [] was_usk = is_usk_file(self.parent.ctx['INSERT_URI']) for candidate in self.ordered: uri = candidate[5][1]['URI'] if is_ssk(uri) and was_usk: uri = ssk_to_usk(uri) ret.append(uri) return ret
def arc_handle_updating_config(update_sm, params, stored_cfg, is_pulling=False): """ INTERNAL: Write updates into the config file IFF the previous command succeeded. """ base_dir = params['ARCHIVE_CACHE_DIR'] if not is_pulling: if not update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): return if (params['INSERT_URI'] is None or # <- re-insert w/o insert uri not is_usk_file(params['INSERT_URI'])): return inverted_uri = params['INVERTED_INSERT_URI'] # Cache the request_uri - insert_uri mapping. stored_cfg.set_insert_uri(inverted_uri, update_sm.ctx['INSERT_URI']) # Cache the updated index for the insert. version = get_version(update_sm.ctx['INSERT_URI']) stored_cfg.update_index(inverted_uri, version) stored_cfg.update_dir(base_dir, inverted_uri) # Hmmm... if we wanted to be clever we could update the request # uri too when it doesn't match the insert uri. Ok for now. # Only for usks and only on success. #print "UPDATED STORED CONFIG(0)" Config.to_file(stored_cfg) else: # Only finishing required. same. REDFLAG: look at this again if not update_sm.get_state(QUIESCENT).arrived_from((FINISHING, )): return if not is_usk(params['REQUEST_URI']): return state = update_sm.get_state(ARC_REQUESTING_URI) updated_uri = state.get_latest_uri() version = get_version(updated_uri) stored_cfg.update_index(updated_uri, version) stored_cfg.update_dir(base_dir, updated_uri) #print "UPDATED STORED CONFIG(1)" Config.to_file(stored_cfg)
def check_uri(ui_, uri): """ INTERNAL: Abort if uri is not supported. """ if uri is None: return if is_usk(uri): if not is_usk_file(uri): ui_.status("Only file USKs are allowed." + "\nMake sure the URI ends with '/<number>' " + "with no trailing '/'.\n") raise util.Abort("Non-file USK %s\n" % uri) # Just fix it instead of doing B&H? if is_negative_usk(uri): ui_.status("Negative USK index values are not allowed." + "\nUse --aggressive instead. \n") raise util.Abort("Negative USK %s\n" % uri)
def arc_handle_updating_config(update_sm, params, stored_cfg, is_pulling=False): """ INTERNAL: Write updates into the config file IFF the previous command succeeded. """ base_dir = params['ARCHIVE_CACHE_DIR'] if not is_pulling: if not update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): return if (params['INSERT_URI'] is None or # <- re-insert w/o insert uri not is_usk_file(params['INSERT_URI'])): return inverted_uri = params['INVERTED_INSERT_URI'] # Cache the request_uri - insert_uri mapping. stored_cfg.set_insert_uri(inverted_uri, update_sm.ctx['INSERT_URI']) # Cache the updated index for the insert. version = get_version(update_sm.ctx['INSERT_URI']) stored_cfg.update_index(inverted_uri, version) stored_cfg.update_dir(base_dir, inverted_uri) # Hmmm... if we wanted to be clever we could update the request # uri too when it doesn't match the insert uri. Ok for now. # Only for usks and only on success. #print "UPDATED STORED CONFIG(0)" Config.to_file(stored_cfg) else: # Only finishing required. same. REDFLAG: look at this again if not update_sm.get_state(QUIESCENT).arrived_from((FINISHING,)): return if not is_usk(params['REQUEST_URI']): return state = update_sm.get_state(ARC_REQUESTING_URI) updated_uri = state.get_latest_uri() version = get_version(updated_uri) stored_cfg.update_index(updated_uri, version) stored_cfg.update_dir(base_dir, updated_uri) #print "UPDATED STORED CONFIG(1)" Config.to_file(stored_cfg)
def get_latest_uri(self): """ Returns the URI with the version part update if the URI is a USK.""" if (is_usk(self.parent.ctx['REQUEST_URI']) and self.parent.params['NO_SEARCH']): return self.parent.ctx['REQUEST_URI'] max_version = None for candidate in self.ordered: result = candidate[5] if result is None or result[0] != 'AllData': continue uri = result[1]['URI'] if not is_usk_file(uri): return uri max_version = max(max_version, abs(get_version(uri))) break assert not max_version is None # The .R1 URI is queued first. assert (len(self.ordered) < 2 or self.ordered[0][0].find('.R1') != -1) return get_usk_for_usk_version(self.ordered[0][0], max_version)
def execute_pull(ui_, repo, params, stored_cfg): """ Run the pull command. """ update_sm = None try: assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, repo, params, stored_cfg) ui_.status("%sRequest URI:\n%s\n" % (is_redundant(params[ 'REQUEST_URI']), params['REQUEST_URI'])) #ui_.status("Current tip: %s\n" % hex_version(repo)[:12]) update_sm.start_pulling(params['REQUEST_URI']) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): ui_.status("Pulled from:\n%s\n" % update_sm.get_state('REQUESTING_URI'). get_latest_uri()) #ui_.status("New tip: %s\n" % hex_version(repo)[:12]) else: ui_.status("Pull failed.\n") handle_updating_config(repo, update_sm, params, stored_cfg, True) finally: cleanup(update_sm)
def execute_pull(ui_, repo, params, stored_cfg): """ Run the pull command. """ update_sm = None try: assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, repo, params, stored_cfg) ui_.status( "%sRequest URI:\n%s\n" % (is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) #ui_.status("Current tip: %s\n" % hex_version(repo)[:12]) update_sm.start_pulling(params['REQUEST_URI']) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): ui_.status("Pulled from:\n%s\n" % update_sm.get_state('REQUESTING_URI').get_latest_uri()) #ui_.status("New tip: %s\n" % hex_version(repo)[:12]) else: ui_.status("Pull failed.\n") handle_updating_config(repo, update_sm, params, stored_cfg, True) finally: cleanup(update_sm)
def update_dir(self, repo_dir, usk): """ Updated the repo USK used pull changes into repo_dir. """ assert is_usk_file(usk) repo_dir = norm_path(repo_dir) self.request_usks[repo_dir] = usk