def mixnet_url_process(mixnet_url): catalog_url, endpoint_id = split_mixnet_url(mixnet_url) cfg.set_value("CATALOG_URL", catalog_url) client.register_catalog_url(catalog_url) print endpoint_id endpoint = client.endpoint_info(endpoint_id) if endpoint["status"] != "OPEN": abort("Endpoint is not open.") if endpoint["endpoint_type"] != "SPHINXMIX_GATEWAY": abort("Not a SPHINXMIX_GATEWAY.") peer_id = endpoint["peer_id"] cfg.set_value("PEER_ID", peer_id) peer = client.peer_info(peer_id) assert peer["crypto_backend"] == TYPE backend = common.BACKENDS[TYPE] client.register_backend(backend) cfg.set_value("CRYPTO_BACKEND", backend) crypto_params = canonical.from_unicode_canonical(peer["crypto_params"]) cfg.set_value("CRYPTO_PARAMS", crypto_params) description = {"gateway": endpoint, "mixnet_peer": peer} cfg.set_value("MIXNET_DESCRIPTION", description)
def split_mixnet_url(mixnet_url): url = mixnet_url.rstrip('/') segments = url.rsplit('/', 3) if len(segments) != 4: abort() [catalog_url, prefix, resources, endpoint_id] = segments return catalog_url, endpoint_id
def check_compare_texts(orig_text, new_text): orig_body = orig_text["body"] new_body = new_text["body"] me = cfg.get("PEER_ID") orig_data = orig_body["data"].copy() new_data = new_body["data"].copy() orig_owners = orig_data.pop("owners") new_owners = new_data.pop("owners") orig_data.pop("key_data") orig_data.pop("peer_id") new_key_data = new_data.pop("key_data") new_peer_id = new_data.pop("peer_id") if orig_data != new_data: abort("Contribution data has changed.") if orig_owners[0] not in new_owners: abort("Coordinator missing from owners.") if mk_owner_d(me) not in new_owners: abort("Peer missing from owners.") new_owners_list = unpack_owners(new_owners) for owner in new_owners_list: client.peer_import(owner) combined_data = client.crypto_client.combine_keys(new_owners_list) combined_key = client.crypto_client.get_key_id_from_key_data(combined_data) if combined_data != new_key_data: abort("Wrong combined key data.") if combined_key != new_peer_id: abort("Wrong combined peer id.")
def process_own_endpoint(endpoint_id): peer_id = cfg.get("PEER_ID") messages, log = client.inbox_process(endpoint_id, peer_id, upload=True) params = client.record_process_prepare(endpoint_id, log) if params == "wrongstatus": abort("Wrong status") endpoint = client.with_self_consensus(client.endpoint_action, params) endpoint_id = endpoint["endpoint_id"] ui.inform("Processed endpoint %s" % endpoint_id) return endpoint_id
def close_own_endpoint(endpoint_id): params = client.close_on_minimum_prepare(endpoint_id) if params == "wrongstatus": abort("Wrong status") if params == "nomin": raise Block("Waiting until minimum inbox size is reached.") endpoint = client.with_self_consensus(client.endpoint_action, params) endpoint_id = endpoint["endpoint_id"] ui.inform("Closed endpoint %s" % endpoint_id) return endpoint_id
def get_join_response(): default = "yes" if autodefault: return default response = ui.ask_value( "response", "Send join contribution? (yes/no) (default: '%s')" % default) if not response: return default if response != "yes": abort()
def create_ep_record_contribution(endpoint_id, negotiation_id, log): next_neg = read_next_negotiation_id() r = client.record_process(endpoint_id, negotiation_id, log, next_negotiation_id=next_neg) if r == "wrongstatus": abort("Wrong status") is_contrib, d = r assert is_contrib return d["data"]
def create_ep_close_contribution(endpoint_id, negotiation_id): next_neg = read_next_negotiation_id() r = client.close_on_minimum(endpoint_id, negotiation_id, next_negotiation_id=next_neg) if r == "wrongstatus": abort("Wrong status") if r == "nomin": raise Block("Waiting until minimum inbox size is reached.") is_contrib, d = r assert is_contrib return d["data"]
def join_crypto_params_set(contrib): text = get_contribution_text(contrib) crypto_params = text["body"]["data"]["crypto_params"] crypto_params = canonical.from_unicode_canonical(crypto_params) default = "accept" response = ui.ask_value( "response", "Proposed crypto params: '%s'; " "'accept' or 'abort'? (default: '%s')" % (crypto_params, default)) if not response: response = default if response == "accept": return crypto_params elif response == "abort": abort()
def join_backend_set(contrib): text = get_contribution_text(contrib) crypto_backend = text["body"]["data"]["crypto_backend"] default = "accept" response = ui.ask_value( "response", "Proposed crypto backend: '%s'; " "'accept' or 'abort'? (default: '%s')" % (crypto_backend, default)) if not response: response = default if response == "accept": crypto_backend = BACKENDS[crypto_backend] return crypto_backend elif response == "abort": abort()
def _join_coord_second_contribution(negotiation_id, initial_contrib, for_ep=False): coordinator = initial_contrib["signer_key_id"] contributions = get_contributions(negotiation_id) contrib = get_latest_of_peer(contributions, coordinator) assert contrib is not None if initial_contrib["id"] == contrib["id"]: raise Block("Waiting for the negotiation's second round.") text = get_contribution_text(contrib) meta = text["meta"] if not meta["accept"]: abort("Not an accepted contribution.") initial_text = get_contribution_text(initial_contrib) check = check_ep_compare_texts if for_ep else check_compare_texts check(initial_text, text) return contrib
def join_ep_process_contribution(endpoint_id, negotiation_id, initial_contrib): text = get_contribution_text(initial_contrib) body = text["body"] suggested_hashes = body["data"]["message_hashes"] r = client.get_input_from_link(endpoint_id, PROCESSBOX, serialized=True, dry_run=True) if r is None: raise ValueError("input is missing") responses, computed_hashes = r if suggested_hashes != computed_hashes: abort("Couldn't agree on message hashes when processing.") meta = hash_meta_next_negotiation(text["meta"]) r = client.run_contribution(negotiation_id, body, accept=True, extra_meta=meta) d = r.json() contribution = d["data"] ui.inform("Sent contribution %s" % contribution["id"]) return contribution
def join_ep_close_contribution(endpoint_id, negotiation_id, initial_contrib): text = get_contribution_text(initial_contrib) body = text["body"] suggested_hashes = body["data"]["message_hashes"] endpoint = client.endpoint_info(endpoint_id) try: hashes = get_endpoint_input(endpoint_id, INBOX, dry_run=True) if hashes is not None and suggested_hashes != hashes: abort("Hash mismatch for linked inbox") except (InputNotReady, NoLinks) as e: pass computed_hashes = client.check_endpoint_on_minimum(endpoint) if suggested_hashes != computed_hashes: abort("Couldn't agree on message hashes when closing.") meta = hash_meta_next_negotiation(text["meta"]) r = client.run_contribution(negotiation_id, body, accept=True, extra_meta=meta) d = r.json() contribution = d["data"] ui.inform("Sent contribution %s" % contribution["id"]) return contribution
def clean_url(url): http, sep, right = url.partition("://") if http not in ["http", "https"]: abort("Malformed URL") return right.rstrip('/')
def check_ep_compare_texts(orig_text, new_text): orig_body = orig_text["body"] new_body = new_text["body"] if orig_body != new_body: abort("Contribution body has changed.")