def parse(text, is_lines=False): """ Parse updates and announcements from raw text. """ if is_lines: lines = text else: lines = text.split('\n') announcements = set([]) updates = set([]) for line in lines: line = line.strip() # Handle crlf bs on Windoze. fields = line.split(':') if parse_updates(fields, updates): continue if fields[0] == 'A' and len(fields) >= 2: try: if is_usk_file(fields[1]): announcements.add(fields[1]) # Implicit update. updates.add((get_usk_hash(fields[1]), get_version(fields[1]))) except ValueError: continue # else, silently fail... hmmmm # Perhaps a bit too metrosexual... # Make sure you always get the same tuple for a given text. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() return (tuple(updates), tuple(announcements))
def parse(text, is_lines=False): """ Parse updates and announcements from raw text. """ if is_lines: lines = text else: lines = text.split('\n') announcements = set([]) updates = set([]) for line in lines: line = line.strip() # Handle crlf bs on Windoze. fields = line.split(':') if parse_updates(fields, updates): continue if fields[0] == 'A' and len(fields) >= 2: try: if is_usk_file(fields[1]): announcements.add(fields[1]) # Implicit update. updates.add( (get_usk_hash(fields[1]), get_version(fields[1]))) except ValueError: continue # else, silently fail... hmmmm # Perhaps a bit too metrosexual... # Make sure you always get the same tuple for a given text. updates = list(updates) updates.sort() announcements = list(announcements) announcements.sort() return (tuple(updates), tuple(announcements))
def add_default_repos(self, default_repos): """ Add table entries from a [(fms_id, usk), ...] list. """ for repo_entry in default_repos: clean_id = clean_nym(repo_entry[0]) usk_hash = get_usk_hash(repo_entry[1]) self.handle_announcement(clean_id, repo_entry[0], repo_entry[1]) # Implicit in announcement self.handle_update(clean_id, repo_entry[0], usk_hash, get_version(repo_entry[1]))
def create_dirs(ui_, cache_dir, uri): """ Create cache and temp directories for an archive. """ full_path = os.path.join(cache_dir, get_usk_hash(uri)) if not os.path.exists(full_path): ui_.status("Creating cache dir:\n%s\n" % full_path) os.makedirs(full_path) tmp_dir = os.path.join(cache_dir, TMP_DIR) if not os.path.exists(tmp_dir): ui_.status("Creating temp dir:\n%s\n" % tmp_dir) os.makedirs(tmp_dir)
def build_trust_list(id_usk_list): """ INTERNAL: Compile the default trust map from a list of (trusted_fms_id, USK) tuples. """ table = {} for fms_id, usk in id_usk_list: hashes = table.get(fms_id, []) usk_hash = get_usk_hash(usk) if not usk_hash in hashes: hashes.append(usk_hash) table[fms_id] = hashes for fms_id in table.keys()[:]: table[fms_id] = tuple(table[fms_id]) return table
def dump_trust_map(ui_, params, trust_map, force=False): """ Show verbose trust map information. """ if not force and params['VERBOSITY'] < 2: return if not force and not params['REQUEST_URI'] is None: ui_.status("USK hash for local repository: %s\n" % get_usk_hash(params['REQUEST_URI'])) fms_ids = trust_map.keys() fms_ids.sort() ui_.status("Update Trust Map:\n") for fms_id in fms_ids: ui_.status(" %s\n %s\n" % (fms_id, '\n '.join(trust_map[fms_id]))) ui_.status("\n")
def show_table(parser, out_func): """ Dump the announcements and updates in a human readable format. """ fms_id_map, announce_map, update_map = parser.invert_table() usks = announce_map.keys() usks.sort() for usk in usks: usk_hash = get_usk_hash(usk) out_func("USK Hash: %s\n" % usk_hash) out_func("USK: %s\n" % usk) out_func("Announced by:\n") for clean_id in announce_map[usk]: out_func(" %s\n" % fms_id_map[clean_id]) out_func("Updated by:\n") for clean_id in update_map[usk_hash]: out_func(" %i:%s\n" % (parser.table[clean_id][1][usk_hash], fms_id_map[clean_id])) out_func("\n")
def show_table(parser, out_func): """ Dump the announcements and updates in a human readable format. """ fms_id_map, announce_map, update_map = parser.invert_table() usks = announce_map.keys() usks.sort() for usk in usks: usk_hash = get_usk_hash(usk) out_func("USK Hash: %s\n" % usk_hash) out_func("USK: %s\n" % usk) out_func("Announced by:\n") for clean_id in announce_map[usk]: out_func(" %s\n" % fms_id_map[clean_id]) out_func("Updated by:\n") for clean_id in update_map[usk_hash]: out_func( " %i:%s\n" % (parser.table[clean_id][1][usk_hash], fms_id_map[clean_id])) out_func("\n")
def cleanup_dirs(ui_, cache_dir, uri, top_key=None): """ Remove unneeded files from the archive cache dir. """ # Remove temp dir tmp_dir = os.path.join(cache_dir, TMP_DIR) if os.path.exists(tmp_dir): ui_.status("Removing: %s\n" % tmp_dir) shutil.rmtree(tmp_dir) # Remove block dir block_dir = os.path.join(cache_dir, BLOCK_DIR) if os.path.exists(block_dir): ui_.status("Removing: %s\n" % block_dir) shutil.rmtree(block_dir) if top_key is None: return # Remove old cached top keys and unneeded cached CHKs. survivors = set([]) survivors.add(TOP_KEY_NAME_FMT % get_version(uri)) for block in top_key[0]: for chk in block[1]: survivors.add(chk_file_name(chk)) archive_dir = os.path.join(cache_dir, get_usk_hash(uri)) for name in os.listdir(archive_dir): if not (name.startswith(CHK_NAME_PREFIX) or name.startswith(TOP_KEY_NAME_PREFIX)): # Hmmm leave other files alone. Too paranoid? continue if not name in survivors: full_path = os.path.join(archive_dir, name) ui_.status("Removing: %s\n" % full_path) os.remove(full_path) if len(survivors) > 0: ui_.status("Leaving %i file%s in : %s\n" % ( len(survivors), choose_word(len(survivors) == 1, '','s'), archive_dir))
def arch_cache_dir(self): """ Return the local cache directory. """ return os.path.join(self['ARCHIVE_CACHE_DIR'], get_usk_hash(self['REQUEST_URI']))
def get_uri_from_hash(ui_, dummy, params, stored_cfg): """ Use FMS to get the URI for a repo hash. """ show_fms_info(ui_, params, stored_cfg) parser = USKNotificationParser(get_trust_map(ui_, params, stored_cfg)) parser.add_default_repos(KNOWN_REPOS) ui_.status("Raking through fms messages. This may take a while...\n") recv_msgs(get_connection(stored_cfg.defaults['FMS_HOST'], stored_cfg.defaults['FMS_PORT'], None), parser, stored_cfg.fmsread_groups, None, True) target_usk = None fms_id_map, announce_map, update_map = parser.invert_table() # Find URI for usk in announce_map: if params['FMSREAD_HASH'] == get_usk_hash(usk): # We don't care who announced. The hash matches. target_usk = usk break if target_usk is None: raise util.Abort(("No announcement found for [%s]. " +"Use --uri to set the URI.") % params['FMSREAD_HASH']) if params['VERBOSITY'] >= 2: ui_.status("Found URI announcement:\n%s\n" % target_usk) trusted_notifiers = stored_cfg.trusted_notifiers(params['FMSREAD_HASH']) notifiers = {} for clean_id in update_map[params['FMSREAD_HASH']]: notifiers[fms_id_map[clean_id]] = (parser.table[clean_id][1] [params['FMSREAD_HASH']]) fms_ids = notifiers.keys() fms_ids.sort() ui_.status("Found Updates:\n") for fms_id in fms_ids: if fms_id in trusted_notifiers: ui_.status(" [trusted]:%i:%s\n" % (notifiers[fms_id], fms_id)) else: ui_.status(" [untrusted]:%i:%s\n" % (notifiers[fms_id], fms_id)) check_trust_map(ui_, stored_cfg, params['FMSREAD_HASH'], notifiers, trusted_notifiers) # Check for updates against the updated trust map. trusted_notifiers = stored_cfg.trusted_notifiers(params['FMSREAD_HASH']) for fms_id in fms_ids: if fms_id in trusted_notifiers: if (notifiers[fms_id] > stored_cfg.get_index(params['FMSREAD_HASH'])): stored_cfg.update_index(params['FMSREAD_HASH'], notifiers[fms_id]) return target_usk
def normalize(usk_or_id): """ Returns a USK hash. """ if usk_or_id.startswith('USK'): usk_or_id = get_usk_hash(usk_or_id) return usk_or_id
def cache_dir_name(cache_dir, uri): """ Return the name of the cache directory. """ return os.path.join(cache_dir, get_usk_hash(uri))
# Your repo Request (not Insert!) URIs go here: # # The versions don't matter, they are read from your .infocalpse file. # Hmmm... using request uris means you can broadcast information about # repos you have pulled but didn't insert. REPO_USKS = ('USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw,' + '2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,' + 'AQACAAE/fred_staging.R1/1', 'USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw,' + '2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,' + 'AQACAAE/infocalypse.hgext.R1/12', 'USK@EbQbLWtWLRBgQl4Ly-SjQJvzADdJPfIXNQfCbKzgCFI,' + 'XDLYQTC0nYD4rhIIP~Ff~itkvVVF2u4WU8YVSL2f5RA,' + 'AQACAAE/collaborate.R1/1') USK_HASHES = tuple([get_usk_hash(usk) for usk in REPO_USKS]) def print_updates(): """ Print a sig message with embedded update strings or nothing at all if there's an error. """ try: stored_cfg = Config.from_file(CFG_PATH) updates = [] for usk_hash in USK_HASHES: index = stored_cfg.get_index(usk_hash) if index is None: # Uncomment this and run from the command line if # you get no output. #print "No stored index for usk hash: ", usk_hash continue
def execute_fmsnotify(ui_, repo, params, stored_cfg): """ Run fmsnotify command. """ update_sm = None try: # Insert URI MUST be stored. update_sm = setup(ui_, repo, params, stored_cfg) request_uri, dummy = do_key_setup(ui_, update_sm, params, stored_cfg) if request_uri is None: # Just assert? ui_.warn("Only works for USK file URIs.\n") return check_fms_cfg(ui_, params, stored_cfg) usk_hash = get_usk_hash(request_uri) index = stored_cfg.get_index(usk_hash) if index is None and not (params.get('SUBMIT_BUNDLE', False) or params.get('SUBMIT_WIKI', False)): ui_.warn("Can't notify because there's no stored index " + "for %s.\n" % usk_hash) return group = stored_cfg.defaults.get('FMSNOTIFY_GROUP', None) subject = 'Update:' + '/'.join(request_uri.split('/')[1:]) if params['ANNOUNCE']: text = to_msg_string(None, (request_uri, )) elif params['SUBMIT_BUNDLE']: params['REQUEST_URI'] = request_uri # REDFLAG: Think through. text = execute_insert_patch(ui_, repo, params, stored_cfg) subject = 'Patch:' + '/'.join(request_uri.split('/')[1:]) elif params['SUBMIT_WIKI']: params['REQUEST_URI'] = request_uri # REDFLAG: Think through. text, group = execute_wiki_submit(ui_, repo, params, stored_cfg) subject = 'Submit:' + '/'.join(request_uri.split('/')[1:]) else: text = to_msg_string(((usk_hash, index), )) msg_tuple = (stored_cfg.defaults['FMS_ID'], group, subject, text) show_fms_info(ui_, params, stored_cfg, False) ui_.status('Sender : %s\nGroup : %s\nSubject: %s\n%s\n' % (stored_cfg.defaults['FMS_ID'], group, subject, text)) if params['VERBOSITY'] >= 5: ui_.status('--- Raw Message ---\n%s\n---\n' % ( MSG_TEMPLATE % (msg_tuple[0], msg_tuple[1], msg_tuple[2], msg_tuple[3]))) if params['DRYRUN']: ui_.status('Exiting without sending because --dryrun was set.\n') return # REDFLAG: for testing! if 'MSG_SPOOL_DIR' in params: ui_.warn("DEBUG HACK!!! Writing fms msg to local spool:\n%s\n" % params['MSG_SPOOL_DIR']) import fmsstub # LATER: fix config file to store full fmsid? # grrrr... hacks piled upon hacks. lut = {'djk':'djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks'} fmsstub.FMSStub(params['MSG_SPOOL_DIR'], group, lut).send_msgs( get_connection(stored_cfg.defaults['FMS_HOST'], stored_cfg.defaults['FMS_PORT'], None), (msg_tuple, ), True) else: send_msgs(get_connection(stored_cfg.defaults['FMS_HOST'], stored_cfg.defaults['FMS_PORT'], None), (msg_tuple, ), True) ui_.status('Notification message sent.\n' 'Be patient. It may take up to a day to show up.\n') finally: cleanup(update_sm)
def get_params(base_dir): """ Return the parameters to run a WikiBot. """ # Get working directories. (tmp_dir, # MUST exist repo_dir, # MUST exist and contain wikitext hg repo. bot_storage_dir, # MUST exist ) = get_dirs(base_dir) params = read_fnwiki_cfg(os.path.join(repo_dir, 'fnwiki.cfg')) # MUST contain SSK private key key_file = KEY_FILE_FMT % get_usk_hash(params['WIKI_REPO_USK']) print "Read insert key from: %s" % key_file # Load private key for the repo from a file.. insert_ssk = open(os.path.expanduser(key_file), 'rb').read().strip() assert insert_ssk.startswith('SSK@') # Raw SSK insert key. insert_ssk = insert_ssk.split('/')[0].strip() # Make insert URI from request URI in config file. human = '/'.join(params['WIKI_REPO_USK'].split('/')[1:]) insert_uri = 'U' + insert_ssk[1:] + '/' + human # Then invert the request_uri from it. print "Inverting public key from private one..." request_uri = FCPClient.connect(FCP_HOST, FCP_PORT). \ get_request_uri(insert_uri) print request_uri if get_usk_hash(request_uri) != get_usk_hash(params['WIKI_REPO_USK']): print "The insert SSK doesn't match WIKI_REPO_USK in fnwiki.cfg!" assert False # LATER: Name convention. # USK@/foo.wikitext.R1/0 -- wiki source # USK@/foo/0 -- freesite #print "Reading latest index from Freenet... This can take minutes." #index = prefetch_usk(FCPClient.connect(fcp_host, fcp_port), # request_uri) #insert_uri = get_usk_for_usk_version(insert_uri, index) #request_uri = get_usk_for_usk_version(request_uri, index) # needed? # Hmmmm... freesite index is read from 'I_<n>' tags in # repo. There is no way to set it. params.update({ # FCP 2.0 'MaxRetries':3, 'PriorityClass':1, #'DontCompress':True, 'Verbosity':1023, # MUST set this to get progress messages. # FCPConnection / RequestRunner 'FCP_HOST':FCP_HOST, 'FCP_PORT':FCP_PORT, 'FCP_POLL_SECS':0.25, 'N_CONCURRENT':4, 'CANCEL_TIME_SECS': 15 * 60, # FMSBotRunner 'FMS_HOST':FMS_HOST, 'FMS_PORT':FMS_PORT, 'FMS_POLL_SECS': 3 * 60, 'BOT_STORAGE_DIR':bot_storage_dir, # WikiBot 'FMS_NOTIFY_GROUP': ('infocalypse.notify' if POST_TO_INFOCALYPSE_NOTIFY else ''), # extra group to notify. 'LATEST_INDEX':INDEX_HINT, # Just a hint, it is also stored in shelve db 'SITE_KEY':insert_ssk, 'INSERT_URI':insert_uri, 'REQUEST_URI':request_uri, 'VERBOSITY':VERBOSITY, 'TMP_DIR':tmp_dir, 'NO_SEARCH':False, # REQUIRED 'USK_HASH':get_usk_hash(request_uri), 'FNPUSH_COALESCE_SECS':60, # Time to wait before pushing 'SITE_COALESCE_SECS':60, # Time to wait before inserting. 'NOTIFY_COALESCE_SECS':60, # Time 2w8b4 sending fms repo update msg 'COMMIT_COALESCE_SECS':-1, # Hack to force immediate commit 'FMS_TRUST_CACHE_SECS': 1 * 60 * 60, 'FMS_MIN_TRUST':55, # peer message trust 'NONE_TRUST':49, # i.e. disable posting for 'None' peer msg trust 'REPO_DIR':repo_dir, # Only uncomment for testing. #'MSG_SPOOL_DIR':'/tmp/fake_msgs', }) return params
# # The versions don't matter, they are read from your .infocalpse file. # Hmmm... using request uris means you can broadcast information about # repos you have pulled but didn't insert. REPO_USKS = ('USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw,' + '2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,' + 'AQACAAE/fred_staging.R1/1', 'USK@kRM~jJVREwnN2qnA8R0Vt8HmpfRzBZ0j4rHC2cQ-0hw,' + '2xcoQVdQLyqfTpF2DpkdUIbHFCeL4W~2X1phUYymnhM,' + 'AQACAAE/infocalypse.hgext.R1/12', 'USK@EbQbLWtWLRBgQl4Ly-SjQJvzADdJPfIXNQfCbKzgCFI,' + 'XDLYQTC0nYD4rhIIP~Ff~itkvVVF2u4WU8YVSL2f5RA,' +'AQACAAE/collaborate.R1/1' ) USK_HASHES = tuple([get_usk_hash(usk) for usk in REPO_USKS]) def print_updates(): """ Print a sig message with embedded update strings or nothing at all if there's an error. """ try: stored_cfg = Config.from_file(CFG_PATH) updates = [] for usk_hash in USK_HASHES: index = stored_cfg.get_index(usk_hash) if index is None: # Uncomment this and run from the command line if # you get no output. #print "No stored index for usk hash: ", usk_hash continue updates.append((usk_hash, index))
def execute_fmsread(ui_, params, stored_cfg): """ Run the fmsread command. """ if handled_trust_cmd(ui_, params, stored_cfg): return show_fms_info(ui_, params, stored_cfg) # Listing announced Repo USKs if handled_list(ui_, params, stored_cfg): return # Updating Repo USK indices for repos which are # listed in the fmsread_trust_map section of the # config file. trust_map = stored_cfg.fmsread_trust_map.copy() # paranoid copy dump_trust_map(ui_, params, trust_map) ui_.status("Raking through fms messages. This may take a while...\n") parser = USKNotificationParser() recv_msgs(get_connection(stored_cfg.defaults['FMS_HOST'], stored_cfg.defaults['FMS_PORT'], None), parser, stored_cfg.fmsread_groups) # IMPORTANT: Must include versions that are in the trust map # but which we haven't seen before. full_version_table = stored_cfg.version_table.copy() for usk_hash in known_hashes(trust_map): if not usk_hash in full_version_table: full_version_table[usk_hash] = None # works changed, untrusted = parser.get_updated(trust_map, full_version_table) if params['VERBOSITY'] >= 2 and len(untrusted) > 0: text = 'Skipped untrusted updates:\n' for usk_hash in untrusted: text += " %i:%s\n" % (untrusted[usk_hash][0], usk_hash) text += '\n' ui_.status(text) if len(changed) == 0: ui_.status('No updates found.\n') return # Back map to uris ? Can't always do it. if len(changed) > 0: text = 'Updates:\n' for usk_hash in changed: text += '%s:%i\n' % (usk_hash, changed[usk_hash]) ui_.status(text) if ((not params['REQUEST_URI'] is None) and get_usk_hash(params['REQUEST_URI']) in changed): ui_.status("Current repo has update to index %s.\n" % changed[get_usk_hash(params['REQUEST_URI'])]) if params['DRYRUN']: ui_.status('Exiting without saving because --dryrun was set.\n') return for usk_hash in changed: stored_cfg.update_index(usk_hash, changed[usk_hash]) Config.to_file(stored_cfg) ui_.status('Saved updated indices.\n')