f.write("some_text") f.close() cache_inst2 = RedisNetInfCache(storage_root, logger) cache_inst2.set_redis_conn(redis_conn) if os.path.isfile(cache_inst2.temp_path+"temp"): print"Temporaries not cleared" #---------------------------------------------------------------------------# ni_url = "ni://mumble.org/sha-256-32;uzFqGA" ni_name = NIname(ni_url) ni_name.validate_ni_url(has_params=True) ni_name_uv = NIname(ni_url) ni_name_np = NIname("ni:///sha-256") ni_name_np.validate_ni_url(has_params=False) print(str(ni_name.get_url())) md = NetInfMetaData(ni_name.get_canonical_ni_url(), "now", loc1="http://www.example.com", extrameta={ "something" : "else" }) print(md) md1 = NetInfMetaData("ni://abr.org/sha_256_64;fjhaie8978", "now", loc2="https://zzz.mumble.org", extrameta={ "something" : "else" }) print md1 try: m, f, n, i = cache_inst.cache_put(ni_name_uv, md, None) print "Fault: cache_put accepted unvalidated ni_name" except Exception, e: print "Error correctly detected: %s" % str(e) try: m, f, n, i = cache_inst.cache_put(ni_name_np, md, None)
def insert_resp_metadata(self, response): """ @brief Insert metadata from GET-RESP message into internal form. @param response Either JSON format dict or JSON format string in format of of GET-RESP metadata. @retval None. The response is converted to a JSON dictionary and/or checked to verify that it is in good shape. Can be used either with an 'empty' NetInfMetaData instance identified by the 'ni' field being the empty string or with a previously populated entry. For an empty instance, the header fields ('ni', 'ct' and 'size') are populated from the response where possible (may be no 'ct' or 'size' in the response). The remainder of the information is converted into a new 'details' entry which is either used as the first entry or appended to the list if there are existing entries. @raises Exceptions if cannot parse response """ if type(response) == StringType: resp_dict = json.loads(response) elif type(response) == DictType: resp_dict = response # Check the response is really a JSON dictionary js = json.dumps(response) else: raise TypeError("Parameter 'response' is not a string or dictionary") curr_ni = self.get_ni() resp_ni_name = NIname(resp_dict["ni"]) ret = resp_ni_name.validate_ni_url() if ret != ni_errs.niSUCCESS: raise InvalidNIname("Response ni field '%s' is not a valid ni URI: %s" % (resp_dict["ni"], ni_errs_txt[ret])) if curr_ni == "" : # Empty metadata case self.json_obj["ni"] = resp_ni_name.get_canonical_ni_url() if resp_dict.has_key("ct"): self.json_obj["ct"] = resp_dict["ct"] if resp_dict.has_key("size"): self.json_obj["size"] = resp_dict["size"] self.json_obj["details"] = [] else: # The metadata is not empty # Create validated NIname for the current metadata ni_name = NIname(curr_ni) # If this fails the metadata database is corrupt assert(ni_name.validate_ni_url() == ni_errs.niSUCCESS) if ni_name.cmp(resp_ni_name) == 0: # Update with data about same ni name if resp_dict.has_key("ct") and (resp_dict["ct"] != ""): if self.json_obj["ct"] == "": self.json_obj["ct"] = resp_dict["ct"] elif self.json_obj["ct"] != resp_dict["ct"]: raise MetadataMismatch("Content Type fields are unmatched") if resp_dict.has_key("size") and (resp_dict["size"] >= 0): if self.json_obj["size"] == -1: self.json_obj["size"] = resp_dict["size"] elif self.json_obj["size"] != resp_dict["size"]: raise MetadataMismatch("Size fields are unmatched") else: raise MetadataMismatch("NI name fields are unmatched curr: %s, got: %s" % (curr_ni, resp_dict["ni"])) new_detail = {} new_detail["loc"] = [] for loc_key in ("loc", "loclist"): if resp_dict.has_key(loc_key): if type(resp_dict[loc_key]) == ListType: new_detail["loc"] = resp_dict[loc_key] else: raise TypeError("Response '%s' value is not a list" % loc_key) auth = resp_ni_name.get_netloc() if auth is not None and (auth != ""): new_detail["loc"].append(auth) if resp_dict.has_key("metadata"): if type(resp_dict["metadata"]) == DictType: new_detail["metadata"] = resp_dict["metadata"] else: raise TypeError("Response metadata is not an object dictionary") if resp_dict.has_key("searches"): if not new_detail.has_key("metadata"): new_detail["metadata"] = {} new_detail["metadata"]["search"] = resp_dict["searches"] new_detail["ts"] = self.metadata_timestamp_for_now() self.json_obj["details"].append(new_detail) self.curr_detail = new_detail return
# Extract JSON values from message # Check the message is a application/json if json_msg.get("Content-type") != "application/json": self.loginfo("do_fwd: weird content type: %s" % json_msg.get("Content-type")) continue # Extract the JSON structure try: json_report = json.loads(json_msg.get_payload()) except Exception, e: self.loginfo("do_fwd: can't decode json: %s" % str(e)); continue curi=NIname(uri) curi.validate_ni_url() metadata = NetInfMetaData(curi.get_canonical_ni_url()) self.loginfo("Metadata I got: %s" % str(json_report)) metadata.insert_resp_metadata(json_report) # if I've the role GET_RES and there's locators then # follow those now if ct_msg == None and self.check_role(GET_RES): self.loginfo("I'm a GET_RES type of node - going to try follow") self.loginfo("meta: %s" % str(json_report)) # check for locators locators = metadata.get_loclist() self.loginfo("locs: %s" % str(locators)) # try follow locators for loc in locators: self.loginfo("GET_RES following: %s" % loc)
def publish_with_dtn(ni_name, destination, authority, hash_alg, ext_json, locs, scheme, full_put, file_name, rform, verbose): """ @brief Action a NetInf publish request using the HTTP convergence layer @param ni_name NIname object instance or None - ni URI to publish if given on comand line - otherwise will be constructed @param destination string netloc (FQDN or IP address with optional port) indicating where to send publish request @param authority string netloc component to insert into ni name (may be "") @param hash_alg string name of hash algorithm used for ni URI @param ext_json dictionary additional information to send with request if any in the form of a JSON dictionary or None @param locs list of strings with locators to publish - maybe None @param scheme URI scheme used for ni URI @param full_put boolean True if the file_name with the content was given @param file_name string name of file to publish or None if only doing metadata @param rform string request format of response @param verbose bolean indicates how much error message output is produced @return 2-tuple - target - string the actual ni name published payload - string - the response received on publication """ debug("Publishing via: %s" % destination) # Handle full_put = True cases - we have a file with the octets in it if full_put: if ni_name is None: # Make a ni_name template from specified components ni_name = NIname((scheme, authority, "/%s" % hash_alg)) # Construct the digest from the file name and the template rv = NIproc.makenif(ni_name, file_name) if rv != ni_errs.niSUCCESS: raise PublishFailure("Unable to construct digest of file %s: %s" % (file_name, ni_errs_txt[rv]), -20) else: # Check the ni_name and the file match rv = Niproc.checknif(ni_name, file_name) if rv != ni_errs.niSUCCESS: raise PublishFailure("Digest of file %s does not match ni_name %s: %s" % (file_name, ni_name.get_url(), ni_errs_txt[rv]), -21) # Guess the mimetype of the file m = magic.Magic(mime=True) ctype = m.from_file(file_name) debug("Content-Type: %s" % ctype) if ctype is None: # Guessing didn't work - default ctype = "application/octet-stream" else: ctype = None target = ni_name.get_canonical_ni_url() debug("Using URI string: %s" % target) # Add extra items to ext_json to pass across as metadata ext_json["ni"] = target if ctype is not None: ext_json["ct"] = ctype if authority != "": ext_json["http_auth"] = authority # Send at most two locators as a list if (locs is not None): ext_json["loclist"] = locs[:2] ext_json["fullPut"] = full_put ext_json["rform"] = rform # Create a connection to the DTN daemon dtn_handle = dtnapi.dtn_open() if dtn_handle == -1: raise PublishFailure("Error: unable to open connection with DTN daemon", -22) # Generate EID + service tag for service to be accessed via DTN if destination is None: remote_service_eid = \ dtnapi.dtn_build_local_eid(dtn_handle, "netinfproto/service/publish") i = remote_service_eid.find("/netinfproto") destination = remote_service_eid[:i] else: remote_service_eid = destination + "/netinfproto/service/publish" # Add destination to locs if it isn't there already if locs is None: locs = [] if destination not in locs: locs.append(destination) # Generate the EID and service tag for this service local_service_eid = dtnapi.dtn_build_local_eid(dtn_handle, "netinfproto/app/response") debug("Local Service EID: %s" % local_service_eid) debug("Remote Service EID: %s" % remote_service_eid) # Check if service_eid registration exists and register if not # Otherwise bind to the existing registration regid = dtnapi.dtn_find_registration(dtn_handle, local_service_eid) if (regid == -1): # Need to register the EID.. make it permanent with 'DEFER' # characteristics so that bundles are saved if they arrive # while the handler is inactive # Expire the registration an hour in the future exp = 60 * 60 # The registration is immediately active passive = False # We don't want to execute a script script = "" regid = dtnapi.dtn_register(dtn_handle, local_service_eid, dtnapi.DTN_REG_DEFER, exp, passive, script) else: dtnapi.dtn_bind(dtn_handle, regid) # Build the bundle to send # First a suitable BPQ block bpq = BPQ() bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH) bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT) bpq.set_src_eid(local_service_eid) sent_msgid = str(random.randint(1, 32000)) bpq.set_bpq_id(sent_msgid) bpq.set_bpq_val(target) bpq.clear_frag_desc() # Build an extension blocks structure to hold the block ext_blocks = dtnapi.dtn_extension_block_list(1) # Construct the extension block bpq_block = dtnapi.dtn_extension_block() bpq_block.type = QUERY_EXTENSION_BLOCK bpq_block.flags = 0 bpq_block.data = bpq.build_for_net() ext_blocks.blocks.append(bpq_block) # Build an extension blocks structure to hold the block meta_blocks = dtnapi.dtn_extension_block_list(2) # Build a metadata block for JSON data md = Metadata() md.set_ontology(Metadata.ONTOLOGY_JSON) md.set_ontology_data(json.dumps(ext_json)) json_block = dtnapi.dtn_extension_block() json_block.type = METADATA_BLOCK json_block.flags = 0 json_block.data = md.build_for_net() meta_blocks.blocks.append(json_block) # Set up payload and placeholder if needed if full_put: # No placeholder required (obviously!) pt = dtnapi.DTN_PAYLOAD_FILE pv = file_name else: # DTN bundle always has a payload - distinguish # zero length file form no content available # Payload is the empty string sent via memory pt = dtnapi.DTN_PAYLOAD_MEM pv = "" # Add a payload placeholder metablock md = Metadata() md.set_ontology(Metadata.ONTOLOGY_PAYLOAD_PLACEHOLDER) md.set_ontology_data("No content supplied") pp_block = dtnapi.dtn_extension_block() pp_block.type = METADATA_BLOCK pp_block.flags = 0 pp_block.data = md.build_for_net() meta_blocks.blocks.append(pp_block) # We want delivery reports and publication reports # (and maybe deletion reports?) dopts = dtnapi.DOPTS_DELIVERY_RCPT | dtnapi.DOPTS_PUBLICATION_RCPT # - Send with normal priority. pri = dtnapi.COS_NORMAL # NetInf bundles should last a while.. exp = (24 *60 * 60) # Send the bundle bundle_id = dtnapi.dtn_send(dtn_handle, regid, local_service_eid, remote_service_eid, local_service_eid, pri, dopts, exp, pt, pv, ext_blocks, meta_blocks, "", "") if bundle_id == None: raise PublishFailure("dtn_send failed - %s" % dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)), -23) # Wait for a reponse - maybe also some reports while(True): # NOTE: BUG in dtnapi - timeout is in msecs recv_timeout = 2000 * 60 bpq_bundle = dtnapi.dtn_recv(dtn_handle, dtnapi.DTN_PAYLOAD_FILE, recv_timeout) # If bpq_bundle is None then either the dtn_recv timed out or # there was some other error. if bpq_bundle != None: # Filter out report bundles if bpq_bundle.status_report != None: debug("Received status report") if bpq_bundle.status_report.flags == dtnapi.STATUS_DELIVERED: if verbose: print("Received delivery report re from %s sent %d seq %d" % (bpq_bundle.status_report.bundle_id.source, bpq_bundle.status_report.bundle_id.creation_secs, bpq_bundle.status_report.bundle_id.creation_seqno)) elif bpq_bundle.status_report.flags == dtnapi.STATUS_DELETED: if verbose: print("Received deletion report re from %s sent %d seq %d" % (bpq_bundle.status_report.bundle_id.source, bpq_bundle.status_report.bundle_id.creation_secs, bpq_bundle.status_report.bundle_id.creation_seqno)) elif bpq_bundle.status_report.flags == dtnapi.STATUS_PUBLISHED: if verbose: print("Received publication report re from %s sent %d seq %d" % (bpq_bundle.status_report.bundle_id.source, bpq_bundle.status_report.bundle_id.creation_secs, bpq_bundle.status_report.bundle_id.creation_seqno)) else: if verbose: print("Received unexpected report: Flags: %d" % bpq_bundle.status_report.flags) # Wait for more status reports and incoming response continue # Check the payload really is in a file if not bpq_bundle.payload_file: raise PublishFailure("Received bundle payload not in file - " "ignoring bundle", -24) # Have to delete this file before an error exit or if empty pfn = bpq_bundle.payload l = len(pfn) if pfn[l-1] == "\x00": pfn = pfn[:-1] debug("Got incoming bundle with response in file %s" % pfn) # Does the bundle have a BPQ block bpq_data = None if bpq_bundle.extension_cnt == 0: os.remove(pfn) raise PublishFailure("Error: Received bundle with no " "extension block.", -25) for blk in bpq_bundle.extension_blks: if blk.type == QUERY_EXTENSION_BLOCK: bpq_data = BPQ() if not bpq_data.init_from_net(blk.data): os.remove(pfn) raise PublishFailure("Error: Bad BPQ block received", -26) if bpq_data is None: os.remove(pfn) raise PublishFailure("Error: Received bundle with no BPQ block " "in extension blocks", -27) debug(bpq_data) # OK.. got the response - finish with daemon break elif dtnapi.dtn_errno(dtn_handle) != dtnapi.DTN_ETIMEOUT: raise PublishFailure(dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)), -28) else: raise PublishFailure("dtn_recv timed out without receiving " "response bundle", 1) dtnapi.dtn_close(dtn_handle) # Check the BPQ data is right bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH) bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT) if bpq_data.bpq_kind != BPQ.BPQ_BLOCK_KIND_PUBLISH: raise PublishFailure("Returned BPQ block is not PUBLISH kind: %d" % bpq_data.bpq_kind, -29) if bpq_data.matching_rule != BPQ.BPQ_MATCHING_RULE_NEVER: raise PublishFailure("Returned BPQ block does not have NEVER matching rule: %d" % bpq_data.matching_rule, -30) if bpq_data.bpq_id != sent_msgid: raise PublishFailure("Returned BPQ block has unmatched msgis %s vs %s" % (bpq_data.bpq_id, sent_msgid), -31) # Verify the format of the response (a bit) try: pfd = open(pfn, "rb") payload = pfd.read() pfd.close() os.remove(pfn) except Exception, e: raise PublishFailure("Failed to read response from payload file %s" % pfn, -32)