def execute_arc_pull(ui_, params, stored_cfg): """ Update from an existing incremental archive in Freenet. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, None, params, stored_cfg) ui_.status( "%sRequest URI:\n%s\n" % (is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) # Pull changes into the local block cache. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({ 'REQUEST_URI': params['REQUEST_URI'], 'ARCHIVE_CACHE_DIR': params['ARCHIVE_CACHE_DIR'] }) start_requesting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): uri = update_sm.get_state(ARC_REQUESTING_URI).get_latest_uri() blocks = update_sm.get_state(ARC_CACHING_TOPKEY).get_blocks() plural = '' if len(blocks) != 1: plural = 's' ui_.status("Fetched %i bytes in %i CHK%s from:\n%s\n" % (sum([block[0] for block in blocks]), len(blocks), plural, uri)) ui_.status("Updating local directory...\n") local_synch( ui_, params['ARCHIVE_CACHE_DIR'], # Use the updated URI below so we get the # right cached topkey. uri, params['TO_DIR']) top_key_state = ARC_REQUESTING_URI else: ui_.status("Synchronize failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg, True) finally: arc_cleanup(update_sm, top_key_state)
def execute_arc_pull(ui_, params, stored_cfg): """ Update from an existing incremental archive in Freenet. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert not params['REQUEST_URI'] is None if not params['NO_SEARCH'] and is_usk_file(params['REQUEST_URI']): index = stored_cfg.get_index(params['REQUEST_URI']) if not index is None: if index >= get_version(params['REQUEST_URI']): # Update index to the latest known value # for the --uri case. params['REQUEST_URI'] = get_usk_for_usk_version( params['REQUEST_URI'], index) else: ui_.status(("Cached index [%i] < index in USK [%i]. " + "Using the index from the USK.\n" + "You're sure that index exists, right?\n") % (index, get_version(params['REQUEST_URI']))) update_sm = setup(ui_, None, params, stored_cfg) ui_.status("%sRequest URI:\n%s\n" % ( is_redundant(params['REQUEST_URI']), params['REQUEST_URI'])) # Pull changes into the local block cache. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({'REQUEST_URI':params['REQUEST_URI'], 'ARCHIVE_CACHE_DIR':params['ARCHIVE_CACHE_DIR']}) start_requesting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): uri = update_sm.get_state(ARC_REQUESTING_URI).get_latest_uri() blocks = update_sm.get_state(ARC_CACHING_TOPKEY).get_blocks() plural = '' if len(blocks) != 1: plural = 's' ui_.status("Fetched %i bytes in %i CHK%s from:\n%s\n" % (sum([block[0] for block in blocks]), len(blocks), plural, uri)) ui_.status("Updating local directory...\n") local_synch(ui_, params['ARCHIVE_CACHE_DIR'], # Use the updated URI below so we get the # right cached topkey. uri, params['TO_DIR']) top_key_state = ARC_REQUESTING_URI else: ui_.status("Synchronize failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg, True) finally: arc_cleanup(update_sm, top_key_state)
def execute_arc_push(ui_, params, stored_cfg): """ Push an update into an incremental archive in Freenet. """ assert params.get('REQUEST_URI', None) is None # REDFLAG: why ? update_sm = None top_key_state = None try: update_sm = setup(ui_, None, params, stored_cfg) request_uri, dummy_is_keypair = do_key_setup(ui_, update_sm, params, stored_cfg) create_dirs(ui_, params['ARCHIVE_CACHE_DIR'], request_uri) ui_.debug("%sInsert URI:\n%s\n" % (is_redundant(params['INSERT_URI']), params['INSERT_URI'])) # Update the local archive. files, top_key = local_update(params['ARCHIVE_CACHE_DIR'], request_uri, params['FROM_DIR']) if files is None: raise util.Abort("There are no local changes to add.") for block in top_key[0]: if block[1][0] == 'CHK@': ui_.status("Created new %i byte block.\n" % block[0]) # Insert them into Freenet. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({'REQUEST_URI':request_uri, 'INSERT_URI':params['INSERT_URI'], 'ARCHIVE_CACHE_DIR':params['ARCHIVE_CACHE_DIR'], 'PROVISIONAL_TOP_KEY':top_key, 'ARCHIVE_BLOCK_FILES':files}) start_inserting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): ui_.status("Inserted to:\n%s\n" % '\n'.join(update_sm.get_state(ARC_INSERTING_URI). get_request_uris())) top_key_state = ARC_INSERTING_URI else: ui_.status("Push to archive failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg) finally: arc_cleanup(update_sm, top_key_state)
def execute_arc_push(ui_, params, stored_cfg): """ Push an update into an incremental archive in Freenet. """ assert params.get('REQUEST_URI', None) is None # REDFLAG: why ? update_sm = None top_key_state = None try: update_sm = setup(ui_, None, params, stored_cfg) request_uri, dummy_is_keypair = do_key_setup(ui_, update_sm, params, stored_cfg) create_dirs(ui_, params['ARCHIVE_CACHE_DIR'], request_uri) ui_.status("%sInsert URI:\n%s\n" % (is_redundant(params['INSERT_URI']), params['INSERT_URI'])) # Update the local archive. files, top_key = local_update(params['ARCHIVE_CACHE_DIR'], request_uri, params['FROM_DIR']) if files is None: raise util.Abort("There are no local changes to add.") for block in top_key[0]: if block[1][0] == 'CHK@': ui_.status("Created new %i byte block.\n" % block[0]) # Insert them into Freenet. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({ 'REQUEST_URI': request_uri, 'INSERT_URI': params['INSERT_URI'], 'ARCHIVE_CACHE_DIR': params['ARCHIVE_CACHE_DIR'], 'PROVISIONAL_TOP_KEY': top_key, 'ARCHIVE_BLOCK_FILES': files }) start_inserting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): ui_.status("Inserted to:\n%s\n" % '\n'.join( update_sm.get_state(ARC_INSERTING_URI).get_request_uris())) top_key_state = ARC_INSERTING_URI else: ui_.status("Push to archive failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg) finally: arc_cleanup(update_sm, top_key_state)
def execute_arc_reinsert(ui_, params, stored_cfg): """ Reinsert the archive into Freenet. """ assert not params.get('REQUEST_URI', None) is None assert params.get('REINSERT_LEVEL', 0) > 0 update_sm = None try: update_sm = setup(ui_, None, params, stored_cfg) request_uri, dummy_is_keypair = do_key_setup(ui_, update_sm, params, stored_cfg) create_dirs(ui_, params['ARCHIVE_CACHE_DIR'], request_uri) ui_.status("%sRequest URI:\n%s\n" % (is_redundant(request_uri), request_uri)) # Get the blocks to re-insert. files, top_key = local_reinsert(params['ARCHIVE_CACHE_DIR'], request_uri) # Tell the user about them. for block in top_key[0]: if block[1][0] == 'CHK@': ui_.status("Re-inserting %i byte block.\n" % block[0]) # Start re-inserting them. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({ 'REQUEST_URI': request_uri, 'INSERT_URI': params['INSERT_URI'], 'ARCHIVE_CACHE_DIR': params['ARCHIVE_CACHE_DIR'], 'PROVISIONAL_TOP_KEY': top_key, 'ARCHIVE_BLOCK_FILES': files, 'REINSERT': params['REINSERT_LEVEL'] }) start_inserting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): ui_.status("Re-insert finished.\n") else: ui_.status("Re-insert failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg) finally: arc_cleanup(update_sm, None) # Don't prune cache.
def execute_arc_create(ui_, params, stored_cfg): """ Create a new incremental archive. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert 'FROM_DIR' in params update_sm = setup(ui_, None, params, stored_cfg) request_uri, dummy = do_key_setup(ui_, update_sm, params, stored_cfg) create_dirs(ui_, params['ARCHIVE_CACHE_DIR'], request_uri) ui_.status("%sInsert URI:\n%s\n" % (is_redundant(params['INSERT_URI']), params['INSERT_URI'])) # Create the local blocks. files, top_key = local_create(params['ARCHIVE_CACHE_DIR'], request_uri, params['FROM_DIR']) for block in top_key[0]: if block[1][0] == 'CHK@': ui_.status("Created new %i byte block.\n" % block[0]) # Insert them into Freenet. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({ 'REQUEST_URI': request_uri, 'INSERT_URI': params['INSERT_URI'], 'ARCHIVE_CACHE_DIR': params['ARCHIVE_CACHE_DIR'], 'PROVISIONAL_TOP_KEY': top_key, 'ARCHIVE_BLOCK_FILES': files }) start_inserting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): ui_.status("Inserted to:\n%s\n" % '\n'.join( update_sm.get_state(ARC_INSERTING_URI).get_request_uris())) top_key_state = ARC_INSERTING_URI else: ui_.status("Archive create failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg) finally: arc_cleanup(update_sm, top_key_state)
def execute_arc_reinsert(ui_, params, stored_cfg): """ Reinsert the archive into Freenet. """ assert not params.get('REQUEST_URI', None) is None assert params.get('REINSERT_LEVEL', 0) > 0 update_sm = None try: update_sm = setup(ui_, None, params, stored_cfg) request_uri, dummy_is_keypair = do_key_setup(ui_, update_sm, params, stored_cfg) create_dirs(ui_, params['ARCHIVE_CACHE_DIR'], request_uri) ui_.status("%sRequest URI:\n%s\n" % (is_redundant(request_uri), request_uri)) # Get the blocks to re-insert. files, top_key = local_reinsert(params['ARCHIVE_CACHE_DIR'], request_uri) # Tell the user about them. for block in top_key[0]: if block[1][0] == 'CHK@': ui_.status("Re-inserting %i byte block.\n" % block[0]) # Start re-inserting them. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({'REQUEST_URI':request_uri, 'INSERT_URI':params['INSERT_URI'], 'ARCHIVE_CACHE_DIR':params['ARCHIVE_CACHE_DIR'], 'PROVISIONAL_TOP_KEY':top_key, 'ARCHIVE_BLOCK_FILES':files, 'REINSERT':params['REINSERT_LEVEL']}) start_inserting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): ui_.status("Re-insert finished.\n") else: ui_.status("Re-insert failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg) finally: arc_cleanup(update_sm, None) # Don't prune cache.
def execute_arc_create(ui_, params, stored_cfg): """ Create a new incremental archive. """ update_sm = None top_key_state = None try: assert 'ARCHIVE_CACHE_DIR' in params assert 'FROM_DIR' in params update_sm = setup(ui_, None, params, stored_cfg) request_uri, dummy = do_key_setup(ui_, update_sm, params, stored_cfg) create_dirs(ui_, params['ARCHIVE_CACHE_DIR'], request_uri) ui_.debug("%sInsert URI:\n%s\n" % (is_redundant(params['INSERT_URI']), params['INSERT_URI'])) # Create the local blocks. files, top_key = local_create(params['ARCHIVE_CACHE_DIR'], request_uri, params['FROM_DIR']) for block in top_key[0]: if block[1][0] == 'CHK@': ui_.status("Created new %i byte block.\n" % block[0]) # Insert them into Freenet. ctx = ArchiveUpdateContext(update_sm, ui_) ctx.update({'REQUEST_URI':request_uri, 'INSERT_URI':params['INSERT_URI'], 'ARCHIVE_CACHE_DIR':params['ARCHIVE_CACHE_DIR'], 'PROVISIONAL_TOP_KEY':top_key, 'ARCHIVE_BLOCK_FILES':files}) start_inserting_blocks(update_sm, ctx) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): ui_.status("Inserted to:\n%s\n" % '\n'.join(update_sm.get_state(ARC_INSERTING_URI). get_request_uris())) top_key_state = ARC_INSERTING_URI else: ui_.status("Archive create failed.\n") arc_handle_updating_config(update_sm, params, stored_cfg) finally: arc_cleanup(update_sm, top_key_state)
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay( ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx.bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate( ('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)
def execute_wiki_submit(ui_, repo, params, stored_cfg): """ Insert and overlayed wiki change submission CHK into freenet and return a notification message string. """ update_sm = None try: # Read submitter out of stored_cfg submitter = stored_cfg.defaults.get('FMS_ID', None) assert not submitter is None assert submitter.find('@') == -1 # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) if not params.get('OVERLAYED', False): raise util.Abort("Can't submit from non-overlayed wiki edits!") if not params.get('CLIENT_WIKI_GROUP', None): # DCI: test code path raise util.Abort("No wiki_group in fnwiki.cfg. Don't " + "know where to post to!") ui_.status("\nPreparing to submit to %s FMS group as %s.\n" % (params['CLIENT_WIKI_GROUP'], submitter)) # Create submission zip file in RAM. overlay = get_file_funcs(os.path.join(repo.root, params['WIKI_ROOT']), True) try: raw_bytes = bundle_wikitext(overlay, version, submitter) except NoChangesError: raise util.Abort("There are no overlayed changes to submit.") # Punt if it's too big. if len(raw_bytes) >= FREENET_BLOCK_LEN: raise util.Abort("Too many changes. Change .zip must be <32K") update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.send_data = raw_bytes ui_.status("Inserting %i byte submission CHK...\n" % len(raw_bytes)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING, ))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 # DCI: why normalize??? # (usk_hash, base_version, chk, length) ret = ':'.join( ('W', normalize(params['REQUEST_URI']), version[:12], chk, str(len(raw_bytes)))) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret, params['CLIENT_WIKI_GROUP'] raise util.Abort("Submission CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def execute_wiki_submit(ui_, repo, params, stored_cfg): """ Insert and overlayed wiki change submission CHK into freenet and return a notification message string. """ update_sm = None try: # Read submitter out of stored_cfg submitter = stored_cfg.defaults.get('FMS_ID', None) assert not submitter is None assert submitter.find('@') == -1 # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) if not params.get('OVERLAYED', False): raise util.Abort("Can't submit from non-overlayed wiki edits!") if not params.get('CLIENT_WIKI_GROUP', None): # DCI: test code path raise util.Abort("No wiki_group in fnwiki.cfg. Don't " + "know where to post to!") ui_.status("\nPreparing to submit to %s FMS group as %s.\n" % (params['CLIENT_WIKI_GROUP'], submitter)) # Create submission zip file in RAM. overlay = get_file_funcs(os.path.join(repo.root, params['WIKI_ROOT']), True) try: raw_bytes = bundle_wikitext(overlay, version, submitter) except NoChangesError: raise util.Abort("There are no overlayed changes to submit.") # Punt if it's too big. if len(raw_bytes) >= FREENET_BLOCK_LEN: raise util.Abort("Too many changes. Change .zip must be <32K") update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP file insert request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_insert' request.in_params.definition = PUT_FILE_DEF request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = 'CHK@' request.in_params.send_data = raw_bytes ui_.status("Inserting %i byte submission CHK...\n" % len(raw_bytes)) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) heads = [hexlify(head) for head in repo.heads()] heads.sort() if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): chk = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[1]['URI'] ui_.status("Patch CHK:\n%s\n" % chk) # ':', '|' not in freenet base64 # DCI: why normalize??? # (usk_hash, base_version, chk, length) ret = ':'.join(('W', normalize(params['REQUEST_URI']), version[:12], chk, str(len(raw_bytes)))) ui_.status("\nNotification:\n%s\n" % ret + '\n') return ret, params['CLIENT_WIKI_GROUP'] raise util.Abort("Submission CHK insert failed.") finally: # Cleans up out file. cleanup(update_sm)
def execute_wiki_apply(ui_, repo, params, stored_cfg): """ Fetch a wiki change submission CHK and apply it to a local directory. """ update_sm = None try: assert 'REQUEST_URI' in params # Get version, i.e. just the hg parent == hg head version = get_hg_version(repo) # Get target directory. params['ISWIKI'] = True read_freesite_cfg(ui_, repo, params, stored_cfg) update_sm = setup(ui_, repo, params, stored_cfg) # Make an FCP download request which will run on the # on the state machine. request = StatefulRequest(update_sm) request.tag = 'submission_zip_request' request.in_params.definition = GET_DEF # To RAM. request.in_params.fcp_params = update_sm.params.copy() request.in_params.fcp_params['URI'] = params['REQUEST_URI'] # Knee high barrier against abuse. request.in_params.fcp_params['MaxSize'] = FREENET_BLOCK_LEN ui_.status("Requesting wiki submission from...\n%s\n" % params['REQUEST_URI']) update_sm.start_single_request(request) run_until_quiescent(update_sm, params['POLL_SECS']) if update_sm.get_state(QUIESCENT).arrived_from(((FINISHING,))): raw_bytes = update_sm.get_state(RUNNING_SINGLE_REQUEST).\ final_msg[2] assert request.response[0] == 'AllData' ui_.status("Fetched %i byte submission.\n" % len(raw_bytes)) base_ver, submitter = get_info(StringIO.StringIO(raw_bytes)) ui_.status("Base version: %s, Submitter: %s (unverifiable!)\n" % (base_ver[:12], submitter)) #print "H_ACKING base_ver to test exception!" #base_ver = 'da2f653c5c47b7ee7a814e668aa1d63c50c3a4f3' if not has_version(repo, base_ver): ui_.warn("That version isn't in the local repo.\n" + "Try running hg fn-pull --aggressive.\n") raise util.Abort("%s not in local repo" % base_ver[:12]) if base_ver != version: ui_.warn("Version mismatch! You might have to " + "manually merge.\n") # Set up an IFileFunctions that reads the correct versions of # the unpatched files out of Mercurial. overlay = HgFileOverlay(ui_, repo, # i.e. "<>/wiki_root" NOT " # <>/wiki_root/wikitext" os.path.join(repo.root, params['WIKI_ROOT']), # cleanup() in finally deletes this. make_temp_file(update_sm.ctx. bundle_cache.base_dir)) overlay.version = base_ver validate_wikitext(overlay) updates = unbundle_wikitext(overlay, StringIO.StringIO(raw_bytes)) for index, label in enumerate(('CREATED', 'MODIFIED', 'REMOVED', 'ALREADY PATCHED')): if len(updates[index]) > 0: values = list(updates[index]) values.sort() ui_.status('%s:\n%s\n' % (label, '\n'.join(values))) finally: cleanup(update_sm)
def execute_fmsnotify(ui_, repo, params, stored_cfg): """ Run fmsnotify command. """ update_sm = None try: # Insert URI MUST be stored. update_sm = setup(ui_, repo, params, stored_cfg) request_uri, dummy = do_key_setup(ui_, update_sm, params, stored_cfg) if request_uri is None: # Just assert? ui_.warn("Only works for USK file URIs.\n") return check_fms_cfg(ui_, params, stored_cfg) usk_hash = get_usk_hash(request_uri) index = stored_cfg.get_index(usk_hash) if index is None and not (params.get('SUBMIT_BUNDLE', False) or params.get('SUBMIT_WIKI', False)): ui_.warn("Can't notify because there's no stored index " + "for %s.\n" % usk_hash) return group = stored_cfg.defaults.get('FMSNOTIFY_GROUP', None) subject = 'Update:' + '/'.join(request_uri.split('/')[1:]) if params['ANNOUNCE']: text = to_msg_string(None, (request_uri, )) elif params['SUBMIT_BUNDLE']: params['REQUEST_URI'] = request_uri # REDFLAG: Think through. text = execute_insert_patch(ui_, repo, params, stored_cfg) subject = 'Patch:' + '/'.join(request_uri.split('/')[1:]) elif params['SUBMIT_WIKI']: params['REQUEST_URI'] = request_uri # REDFLAG: Think through. text, group = execute_wiki_submit(ui_, repo, params, stored_cfg) subject = 'Submit:' + '/'.join(request_uri.split('/')[1:]) else: text = to_msg_string(((usk_hash, index), )) msg_tuple = (stored_cfg.defaults['FMS_ID'], group, subject, text) show_fms_info(ui_, params, stored_cfg, False) ui_.status('Sender : %s\nGroup : %s\nSubject: %s\n%s\n' % (stored_cfg.defaults['FMS_ID'], group, subject, text)) if params['VERBOSITY'] >= 5: ui_.status('--- Raw Message ---\n%s\n---\n' % ( MSG_TEMPLATE % (msg_tuple[0], msg_tuple[1], msg_tuple[2], msg_tuple[3]))) if params['DRYRUN']: ui_.status('Exiting without sending because --dryrun was set.\n') return # REDFLAG: for testing! if 'MSG_SPOOL_DIR' in params: ui_.warn("DEBUG HACK!!! Writing fms msg to local spool:\n%s\n" % params['MSG_SPOOL_DIR']) import fmsstub # LATER: fix config file to store full fmsid? # grrrr... hacks piled upon hacks. lut = {'djk':'djk@isFiaD04zgAgnrEC5XJt1i4IE7AkNPqhBG5bONi6Yks'} fmsstub.FMSStub(params['MSG_SPOOL_DIR'], group, lut).send_msgs( get_connection(stored_cfg.defaults['FMS_HOST'], stored_cfg.defaults['FMS_PORT'], None), (msg_tuple, ), True) else: send_msgs(get_connection(stored_cfg.defaults['FMS_HOST'], stored_cfg.defaults['FMS_PORT'], None), (msg_tuple, ), True) ui_.status('Notification message sent.\n' 'Be patient. It may take up to a day to show up.\n') finally: cleanup(update_sm)