def check_cache_dirs(self): """ @brief Check existence of object cache directories and create if necessary @retval string pathname for temporaries directory @throwIOError if file operations go wrong The storage_root directory has to be created and writeable before starting the server. For the rest of the tree, directories will be created if they do not exist. The temporaries directory will also be be created if it does not exist and its pathname returned as the result of sucessfuloperations. Directories are checked to see they are readable, writeable and searchable if they exist. If an file IO operation fails, the appropriate exception is raised, logged and propagated. """ if not os.path.isdir(self.storage_root): self.logerror("Storage root directory %s does not exist." % self.storage_root) raise IOError("Storage root directory does not exist") # There is only one tree for the content files - metadata is in Redis for tree_name in (self.NDO_DIR,): tree_root = "%s%s" % (self.storage_root, tree_name) if not os.path.isdir(tree_root): self.loginfo("Creating object cache tree directory: %s" % tree_root) try: os.mkdir(tree_root, 0755) except Exception, e: self.logerror("Unable to create tree directory %s : %s." % \ (tree_root, str(e))) raise for auth_name in NIname.get_all_algs(): dir_name = "%s%s" % (tree_root, auth_name) if not os.path.isdir(dir_name): self.loginfo("Creating object cache directory: %s" % dir_name) try: os.mkdir(dir_name, 0755) except Exception, e: self.logerror("Unable to create cache directory %s : %s." % (dir_name, str(e))) raise elif not os.access(dir_name, os.R_OK | os.W_OK | os.X_OK): self.logerror("Existing cache directory %s does not have rwx" "access permissions." % dir_name) raise OSError("Cannot access cache directory")
def cache_list(self, alg_list = None): """ @brief Construct a dictionary listing current cache contents for specified digest algorithms @param alg_list list of strings of digest algorithm names or None (= all) @return dictionary with listing or None Check if alg_list contains valid names - or get all from NIname. Return None if no valid names Read the metadata directory entries for selected algorithm names Build a dictionary with an entry for each selected algorithm name Value for each is an array of objects with two entries: - "dgst": digest (ni format) - "ce": boolean indicating if content file exists Return dictionary constructed if alg_list contains known algorithms Return None if anything goes wrong and log infomational message. Note that we don't use the lock here. At present cache entries are never explicitly deleted so the worst that can happen is that the listing is shy of a (very) few last microsedond entries """ all_algs = NIname.get_all_algs() if alg_list is None: alg_list = all_algs else: for alg in alg_list: if not alg in all_algs: self.loginfo("cache_list: Unknown algorithm name requested %s" % alg) return None rslt = {} for alg in alg_list: mfd = "%s%s%s" % (self.storage_root, self.META_DIR, alg) cfd = "%s%s%s" % (self.storage_root, self.NDO_DIR, alg) entries = [] # All cache entries are required to have metadata file, # and may have content file try: for dgst in os.listdir(mfd): ce = os.path.isfile("%s/%s" % (cfd, dgst)) entries.append( { "dgst": dgst, "ce": ce }) except Exception, e: self.logerror("cache_list: error while listing for alg %s: %s" % (alg, str(e))) return None rslt[alg] = entries
sys.exit(1) import redis redis_conn = redis.StrictRedis() cache_inst = RedisNetInfCache(storage_root, logger) cache_inst.set_redis_conn(redis_conn) f = open(cache_inst.temp_path+"temp", "w") f.write("some_text") f.close() cache_inst2 = RedisNetInfCache(storage_root, logger) cache_inst2.set_redis_conn(redis_conn) if os.path.isfile(cache_inst2.temp_path+"temp"): print"Temporaries not cleared" #---------------------------------------------------------------------------# ni_url = "ni://mumble.org/sha-256-32;uzFqGA" ni_name = NIname(ni_url) ni_name.validate_ni_url(has_params=True) ni_name_uv = NIname(ni_url) ni_name_np = NIname("ni:///sha-256") ni_name_np.validate_ni_url(has_params=False) print(str(ni_name.get_url())) md = NetInfMetaData(ni_name.get_canonical_ni_url(), "now", loc1="http://www.example.com", extrameta={ "something" : "else" }) print(md) md1 = NetInfMetaData("ni://abr.org/sha_256_64;fjhaie8978", "now", loc2="https://zzz.mumble.org", extrameta={ "something" : "else" }) print md1 try:
def py_niget(): """ @brief Command line program to perform a NetInf 'get' operation using http @brief convergence layer. Uses NIproc global instance of NI operations class Run: > niget.py --help to see usage and options. Exit code is 0 for success, 1 if HTTP returned something except 200, and negative for local errors. """ # Options parsing and verification stuff usage = "%prog [-q] [-l] [-d] [-m|-v] [-f <pathname of content file>] <ni name>\n" + \ "<ni name> must include location (netloc) from which to retrieve object." parser = OptionParser(usage) parser.add_option("-f", "--file", dest="file_name", type="string", help="File to hold retrieved content. Defaults to hash code in current directory if not present") parser.add_option("-q", "--quiet", default=False, action="store_true", dest="quiet", help="Suppress textual output") parser.add_option("-s", "--server", dest="server", type="string", help="hostname:port of server to send the NetInf GET to") parser.add_option("-l", "--lax", default=False, action="store_true", dest="lax", help="Store returned content even if digest doesn't validate") parser.add_option("-m", "--metadata", default=False, action="store_true", dest="metadata", help="Output returned metadata as JSON string") parser.add_option("-v", "--view", default=False, action="store_true", dest="view", help="Pretty print returned metadata.") parser.add_option("-d", "--dump", default=False, action="store_true", dest="dump", help="Dump raw HTTP response to stdout.") (options, args) = parser.parse_args() # Check command line options - -q, -f, -l, -m, -v and -d are optional, <ni name> is mandatory if len(args) != 1: parser.error("URL <ni name> not specified.") sys.exit(-1) verbose = not options.quiet # Create NIname instance for supplied URL and validate it ni_url = NIname(args[0]) # Must be a complete ni: URL with non-empty params field rv = ni_url.validate_ni_url(has_params = True) if (rv != ni_errs.niSUCCESS): if verbose: print("Error: %s is not a complete, valid ni scheme URL: %s" % (ni_url.get_url(), ni_errs_txt[rv])) sys.exit(-2) # Generate file name for output if not specified if (options.file_name == None): options.file_name = ni_url.get_digest() # Generate NetInf form access URL if (options.server != None): server = options.server else: server = ni_url.get_netloc() http_url = "http://%s/netinfproto/get" % server """ if (http_url == None): if verbose: print("Error: Unable to generate http: transformed URL for ni URL %s" % ni_urlparse.get_url()) sys.exit(-3) """ # Set up HTTP form data for get request form_data = urllib.urlencode({ "URI": ni_url.get_url(), "msgid": random.randint(1, 32000), "ext": "" }) # Send POST request to destination server try: http_object = urllib2.urlopen(http_url, form_data) except Exception, e: if verbose: print("Error: Unable to access http URL %s: %s" % (http_url, str(e))) sys.exit(-4)
def py_nicl(): """ @brief Command line program to generate and validate digests in ni: URLs. Uses NIproc global instance of NI operations class Run: > nicl.py --help to see usage and options. """ # Options parsing and verification stuff usage = "%prog [-g|-w|-v] -n <name> -f <pathname of content file> [-V]\n" usage = usage + " %prog -m -n <name> [-V]\n" usage = usage + " %prog -b -s <suite_number> -f <pathname of content file> [-V]\n" usage = usage + " The name can be either an ni: or nih: scheme URI\n" usage = usage + " Return code: success 0, failure non-zero (-V for more info)\n" usage = usage + " Available hashalg (suite number) options:\n" usage = usage + " %s" % NIname.list_algs() parser = OptionParser(usage) parser.add_option("-g", "--generate", default=False, action="store_true", dest="generate", help="Generate hash based on content file, " + \ "and output name with encoded hash after the hashalg string") parser.add_option("-w", "--well-known", default=False, action="store_true", dest="well_known", help="Generate hash based on content file, " + \ "and output name with encoded hash in the .well_known URL " + \ "after the hashalg string. Applies to ni: scheme only.") parser.add_option("-v", "--verify", default=False, action="store_true", dest="verify", help="Verify hash in name is correct for content file") parser.add_option("-m", "--map", default=False, action="store_true", dest="map_wkn", help="Maps from an ni: name to a .well-known URL") parser.add_option("-b", "--binary", default=False, action="store_true", dest="bin", help="Outputs the name in binary format for a given suite number") parser.add_option("-V", "--verbose", default=False, action="store_true", dest="verbose", help="Be more long winded.") parser.add_option("-n", "--ni-name", dest="ni_name", type="string", help="The ni name template for (-g) or ni name matching (-v) content file.") parser.add_option("-f", "--file", dest="file_name", type="string", help="File with content data named by ni name.") parser.add_option("-s", "--suite-no", dest="suite_no", type="int", help="Suite number for hash algorithm to use.") (opts, args) = parser.parse_args() if not (opts.generate or opts.well_known or opts.verify or opts.map_wkn or opts.bin ): parser.error( "Must specify one of -g/--generate, -w/--well-known, -v/--verify, -m/--map or -b/--binary.") if opts.generate or opts.well_known or opts.verify: if (opts.ni_name == None) or (opts.file_name == None): parser.error("Must specify both name and content file name for -g, -w or -v.") if opts.map_wkn: if (opts.ni_name == None): parser.error("Must specify ni name for -m.") if opts.bin: if (opts.suite_no == None) or (opts.file_name == None): parser.error("Must specify both suite number and content file name for -b.") if len(args) != 0: parser.error("Too many or unrecognised arguments specified") # Execute requested action if opts.generate: n = NIname(opts.ni_name) ret = NIproc.makenif(n, opts.file_name) if ret == ni_errs.niSUCCESS: if opts.verbose: print("Name generated successfully.") print "%s" % n.get_url() sys.exit(0) if opts.verbose: print "Name could not be successfully generated." elif opts.well_known: n = NIname(opts.ni_name) if n.get_scheme() == "nih": if opts.verbose: print "Only applicable to ni: scheme names." sys.exit(1) ret = NIproc.makenif(n, opts.file_name) if ret == ni_errs.niSUCCESS: if opts.verbose: print("Name generated successfully.") print "%s" % n.get_wku_transform() sys.exit(0) if opts.verbose: print "Name could not be successfully generated" elif opts.verify: n = NIname(opts.ni_name) ret = NIproc.checknif(n, opts.file_name) if ret == ni_errs.niSUCCESS: if opts.verbose: print("Name matches content file.") print "%s" % n.get_url() sys.exit(0) if opts.verbose: print "Check of name against content failed." elif opts.map_wkn: n = NIname(opts.ni_name) ret = n.validate_ni_url(has_params = True) if ret == ni_errs.niSUCCESS: if n.get_scheme() == "nih": if opts.verbose: print "Only applicable to ni: scheme names." sys.exit(1) if opts.verbose: print("Name validated successfully.") print "%s" % n.get_wku_transform() sys.exit(0) else: if opts.verbose: print "Name could not be successfully validated." elif opts.bin: (ret, bin_name) = NIproc.makebnf(opts.suite_no, opts.file_name) if ret == ni_errs.niSUCCESS: if opts.verbose: print("Name generated successfully.") print base64.b16encode(str(bin_name)) sys.exit(0) else: if opts.verbose: print "Name could not be successfully generated." else: print"Should not have happened" sys.exit(2) # Print appropriate error message if opts.verbose: print "Error: %s" % ni_errs_txt[ret] sys.exit(1) sys.exit(0)
def py_nigetalt(): """ @brief Command line program to perform a NetInf 'get' operation using http @brief convergence layer. Uses NIproc global instance of NI operations class Run: > nigetalt.py --help to see usage and options. Exit code is 0 for success, 1 if HTTP returned something except 200, and negative for local errors. """ # Options parsing and verification stuff usage = "%prog [-q] [-l] [-d] [-m|-v] [-f <pathname of content file>] [-w <locator>] <ni name>\n" + \ "Either <ni name> must include location (netloc) from which to retrieve object, or\n" + \ "a locator must be given with the -w/--whence option.\n" + \ "The locator may be prefixed with an HTTP ('http://')or DTN ('dtn://') URI scheme identifier.\n" + \ "If no scheme identifier is given then HTTP is assumed. The DTN scheme does not accept ports." parser = OptionParser(usage) parser.add_option("-f", "--file", dest="file_name", type="string", help="File to hold retrieved content. Defaults to hash code in current directory if not present") parser.add_option("-w", "--whence", dest="loc", type="string", default=None, help="Locator to which to send NetInf GET request. May be prefixed with http:// or dtn://") parser.add_option("-q", "--quiet", default=False, action="store_true", dest="quiet", help="Suppress textual output") parser.add_option("-l", "--lax", default=False, action="store_true", dest="lax", help="Store returned content even if digest doesn't validate") parser.add_option("-m", "--metadata", default=False, action="store_true", dest="metadata", help="Output returned metadata as JSON string") parser.add_option("-v", "--view", default=False, action="store_true", dest="view", help="Pretty print returned metadata.") (options, args) = parser.parse_args() # Check command line options - -q, -f, -l, -m, and -v are optional, # <ni name> is mandatory # -w is optional if <ni name> contains a netloc if len(args) != 1: parser.error("URL <ni name> not specified.") sys.exit(-1) verbose = not options.quiet # Create NIname instance for supplied URL and validate it ni_url = NIname(args[0]) # Must be a complete ni: URL with non-empty params field rv = ni_url.validate_ni_url(has_params = True) if (rv != ni_errs.niSUCCESS): if verbose: print("Error: %s is not a complete, valid ni scheme URL: %s" % (ni_url.get_url(), ni_errs_txt[rv])) sys.exit(-2) # Generate file name for output if not specified if (options.file_name == None): options.file_name = ni_url.get_digest() # Decide Convergence Layer to use and locator to access netloc = ni_url.get_netloc() cl = HTTP_SCHEME if netloc == "": # Must have -w option if options.loc is None: if verbose: print("Error: Must provide a locator either in ni URI or via -w/--whence") sys.exit(-3) loc = options.loc.lower() elif options.loc is not None: if verbose: print("Warning: -w/--whence locator option overrides netloc in ni URI") loc = options.loc.lower() else: loc = netloc.lower() # See if URI scheme was specified if loc.startswith(HTTP_SCHEME): loc = loc[len(HTTP_SCHEME):] elif loc.startswith(DTN_SCHEME): loc = loc[len(DTN_SCHEME):] cl = DTN_SCHEME else: ssep = loc.find("://") if ssep != -1: if verbose: print("Error: Convergence Layer for scheme %s is not supported - use dtn or http" % loc[:ssep]) sys.exit(-4) # Default assume HTTP # Action the GET according to CL selected if cl == HTTP_SCHEME: json_report, got_content, faulty = get_via_http(ni_url, loc, options.file_name, verbose, options.lax) else: json_report, got_content, faulty = get_via_dtn(ni_url, loc, options.file_name, verbose, options.lax) if options.view: print("Returned metadata for %s:" % args[0]) print json.dumps(json_report, indent = 4) elif options.metadata: print json.dumps(json_report, separators=(",", ":")) if not got_content: rv = 1 elif faulty: rv = 2 else: rv = 0 if verbose and got_content: if not faulty: print("Content successfully retrieved and placed in file %s." % options.file_name) else: print("Content retrieved and placed in file %s but digest didn't verify." % options.file_name) elif verbose: print("Only metadata retrieved") sys.exit(rv)
def py_nipub(): """ @brief Command line program to perform a NetInf 'publish' operation using http @brief convergence layer. Uses NIproc global instance of NI operations class Run: > nipub.py --help to see usage and options. Exit code is 0 for success, 1 if HTTP returned something except 200, and negative for local errors. """ # Options parsing and verification stuff usage = "%%prog %s\n %%prog %s\n%s\n %%prog %s\n %%prog %s\n%s\n%s" % \ ("[-q] [-e] [-j|-v|-w|-p] -f <pathname of content file> -d <digest alg> [-l <FQDN - locator>]{1,2}", "[-q] [-e] [-j|-v|-w|-p] [-f <pathname of content file>] -n <ni name> [-l <FQDN - locator>]{0,2}", " -- publish file via NI URI over HTTP", "[-q] [-e] [-j|-v|-w|-p] -u <HTTP URI of content file> -d <digest alg> [-l <FQDN - locator>]{1,2}", "[-q] [-e] [-j|-v|-w|-p] [-u <HTTP URI of content file>] -n <ni name> [-l <FQDN - locator>]{0,2}", " -- publish web content via NI URI over HTTP", "Send response as HTML document (-w), plain text (-p), or JSON (-v or -j)\n" "Unless -q is specified, the response is sent to standard output.\n" "For a JSON response, it can either be output as a 'raw' JSON string (-j) or pretty printed (-v).\n" "If none of -j, -v, -w or -p are specified, a raw JSON response will be requested.") parser = OptionParser(usage) parser.add_option("-f", "--file", dest="file_name", type="string", help="Pathname for local file to be published.") parser.add_option("-u", "--uri", dest="http_name", type="string", help="HTTP URL for content to be published.") parser.add_option("-d", "--digest", dest="hash_alg", type="string", help="Digest algorithm to be used to hash content " "and create NI URI. Defaults to sha-256.") parser.add_option("-n", "--name", dest="ni_name", type="string", help="Complete ni name. If specified with a file or " "HTTP URL, the digest generated from the content " "will be checked against th digest in the name.") parser.add_option("-e", "--ext", dest="ext", type="string", help="A JSON encoded object to be sent as the 'ext' " "parameter for the Publish message.") parser.add_option("-l", "--loc", dest="locs", action="append", type="string", help="An FQDN where NI might be retrieved. Maybe be " "zero to two if -n is present and has a non-empty netloc. " "Otherwise must be one or two. HTTP is sent to first " "loc if no authority in -n.") parser.add_option("-q", "--quiet", default=False, action="store_true", dest="quiet", help="Suppress textual output") parser.add_option("-j", "--json", default=False, action="store_true", dest="json_raw", help="Request response as JSON string and output raw JSON " "string returned on stdout.") parser.add_option("-v", "--view", default=False, action="store_true", dest="json_pretty", help="Request response as JSON string and pretty print " "JSON string returned on stdout.") parser.add_option("-w", "--web", default=False, action="store_true", dest="html", help="Request response as HTML document and output HTML " "returned on stdout.") parser.add_option("-p", "--plain", default=False, action="store_true", dest="plain", help="Request response as plain text document and output text " "returned on stdout.") (options, args) = parser.parse_args() # Check command line options: # Arguments -q, -e, -w, -p, -j and -v are optional; there must be one of a -n with an authority in it or at least one -l. # Either -d or -n must be specified. # If -d is specified, there must be either a -f or a -u but not both at once. # If -n is specified, one of -f or -u may be specified. No leftover arguments allowed. # Specifying more than one of -w, -p, -j and -v is inappropriate. if len(args) != 0: parser.error("Unrecognized arguments %s supplied." % str(args)) sys.exit(-1) if ((options.locs is not None) and (len(options.locs) > 2)): parser.error("Initial version only supports two locators (-l/--loc).") sys.exit(-1) if ((options.ni_name == None) and (options.locs == None)): parser.error("Must specify a locator (-l/--loc) or a name (-n/--name) with a netloc component to define where to send the request.") sys.exit(-1) if ((options.hash_alg != None) and (options.ni_name != None)): parser.error("Cannot specify both digest algorithm to be used (-d) and complete ni name with algorithm and digest (-n).") sys.exit(-1) if ((options.hash_alg == None) and (options.ni_name == None)): parser.error("Must specify either digest algorithm to be used (-d) or complete ni name with algorithm and digest (-n).") sys.exit(-1) if ((((options.ni_name == None) and (options.file_name == None) and (options.http_name == None))) or ((options.file_name != None) and (options.http_name != None))): parser.error("Exactly one of -f/--file and -u/--uri must be specified with -d and optionally with -n.") sys.exit(-1) fc = 0 for flag in [options.json_raw, options.json_pretty, options.html, options.plain]: if flag: fc += 1 if fc > 1: parser.error("Should specify at most one response type argument out of -j, -v, -w and -p.") sys.exit(-1) file_name = None # **** -u is not implemented yet if options.http_name != None: target = options.http_name print "Web name as source(-u/--uri option) not yet implemented. Exiting" sys.exit(-2) if options.file_name != None: target = options.file_name file_name = options.file_name full_put = True else: target = None full_put = False debug("full_put: %s" %full_put) verbose = not options.quiet # If we have a full ni name (-n option) given.. if options.ni_name is not None: # Check the validity of the ni name try: ni_name = NIname(options.ni_name) except Exception, e: if verbose: print("Error: value of -n/--name option '%s' is not a valid ni name" % options.ni_name) sys.exit(-3) rv = ni_name.validate_ni_url() if rv != ni_errs.niSUCCESS: if verbose: print("Error: value of -n/--name option '%s' is not a valid ni name" % options.ni_name) sys.exit(-3) # Extract the scheme and hash algorithm from the name scheme = ni_name.get_scheme() hash_alg = ni_name.get_alg_name() # If the ni name has a netloc in it then that is where to send; if not must have a loc nl = ni_name.get_netloc() if ((nl == "") and (options.locs == None)): print("Error: name (-n/--name) mist have a netloc if no locator options given,") sys.exit(-4) if nl != "": destination = nl authority = nl else: destination = options.locs[0] authority = ""
ct_msg = None # Extract JSON values from message # Check the message is a application/json if json_msg.get("Content-type") != "application/json": self.loginfo("do_fwd: weird content type: %s" % json_msg.get("Content-type")) continue # Extract the JSON structure try: json_report = json.loads(json_msg.get_payload()) except Exception, e: self.loginfo("do_fwd: can't decode json: %s" % str(e)); continue curi=NIname(uri) curi.validate_ni_url() metadata = NetInfMetaData(curi.get_canonical_ni_url()) self.loginfo("Metadata I got: %s" % str(json_report)) metadata.insert_resp_metadata(json_report) # if I've the role GET_RES and there's locators then # follow those now if ct_msg == None and self.check_role(GET_RES): self.loginfo("I'm a GET_RES type of node - going to try follow") self.loginfo("meta: %s" % str(json_report)) # check for locators locators = metadata.get_loclist() self.loginfo("locs: %s" % str(locators)) # try follow locators for loc in locators:
def insert_resp_metadata(self, response): """ @brief Insert metadata from GET-RESP message into internal form. @param response Either JSON format dict or JSON format string in format of of GET-RESP metadata. @retval None. The response is converted to a JSON dictionary and/or checked to verify that it is in good shape. Can be used either with an 'empty' NetInfMetaData instance identified by the 'ni' field being the empty string or with a previously populated entry. For an empty instance, the header fields ('ni', 'ct' and 'size') are populated from the response where possible (may be no 'ct' or 'size' in the response). The remainder of the information is converted into a new 'details' entry which is either used as the first entry or appended to the list if there are existing entries. @raises Exceptions if cannot parse response """ if type(response) == StringType: resp_dict = json.loads(response) elif type(response) == DictType: resp_dict = response # Check the response is really a JSON dictionary js = json.dumps(response) else: raise TypeError("Parameter 'response' is not a string or dictionary") curr_ni = self.get_ni() resp_ni_name = NIname(resp_dict["ni"]) ret = resp_ni_name.validate_ni_url() if ret != ni_errs.niSUCCESS: raise InvalidNIname("Response ni field '%s' is not a valid ni URI: %s" % (resp_dict["ni"], ni_errs_txt[ret])) if curr_ni == "" : # Empty metadata case self.json_obj["ni"] = resp_ni_name.get_canonical_ni_url() if resp_dict.has_key("ct"): self.json_obj["ct"] = resp_dict["ct"] if resp_dict.has_key("size"): self.json_obj["size"] = resp_dict["size"] self.json_obj["details"] = [] else: # The metadata is not empty # Create validated NIname for the current metadata ni_name = NIname(curr_ni) # If this fails the metadata database is corrupt assert(ni_name.validate_ni_url() == ni_errs.niSUCCESS) if ni_name.cmp(resp_ni_name) == 0: # Update with data about same ni name if resp_dict.has_key("ct") and (resp_dict["ct"] != ""): if self.json_obj["ct"] == "": self.json_obj["ct"] = resp_dict["ct"] elif self.json_obj["ct"] != resp_dict["ct"]: raise MetadataMismatch("Content Type fields are unmatched") if resp_dict.has_key("size") and (resp_dict["size"] >= 0): if self.json_obj["size"] == -1: self.json_obj["size"] = resp_dict["size"] elif self.json_obj["size"] != resp_dict["size"]: raise MetadataMismatch("Size fields are unmatched") else: raise MetadataMismatch("NI name fields are unmatched curr: %s, got: %s" % (curr_ni, resp_dict["ni"])) new_detail = {} new_detail["loc"] = [] for loc_key in ("loc", "loclist"): if resp_dict.has_key(loc_key): if type(resp_dict[loc_key]) == ListType: new_detail["loc"] = resp_dict[loc_key] else: raise TypeError("Response '%s' value is not a list" % loc_key) auth = resp_ni_name.get_netloc() if auth is not None and (auth != ""): new_detail["loc"].append(auth) if resp_dict.has_key("metadata"): if type(resp_dict["metadata"]) == DictType: new_detail["metadata"] = resp_dict["metadata"] else: raise TypeError("Response metadata is not an object dictionary") if resp_dict.has_key("searches"): if not new_detail.has_key("metadata"): new_detail["metadata"] = {} new_detail["metadata"]["search"] = resp_dict["searches"] new_detail["ts"] = self.metadata_timestamp_for_now() self.json_obj["details"].append(new_detail) self.curr_detail = new_detail return
def py_nipubalt(): """ @brief Command line program to perform a NetInf 'publish' operation using http @brief convergence layer. Uses NIproc global instance of NI operations class Run: > nipubalt.py --help to see usage and options. Exit code is 0 for success, 1 if HTTP returned something except 200, and negative for local errors. """ # Options parsing and verification stuff usage = "%%prog %s\n %%prog %s\n%s\n%s" % \ ("[-q] [-e] [-j|-v|-w|-p] -f <pathname of content file> -d <digest alg> [-l <FQDN - locator>]{1,2}", "[-q] [-e] [-j|-v|-w|-p] [-f <pathname of content file>] -n <ni name> [-l <FQDN - locator>]{0,2}", " -- publish file via NI URI over HTTP and/or DTN", "At least one locator must be given either as part of the -n option or via a -l option.\n" "Locators given with -l options can optionally be prefixed with the HTTP scheme (http://) or \n" "the DTN scheme (dtn://). If a -l option is given, this is used to determine the initial\n" "publication destination and the convergence layer used will be HTPP unless the -l option\n" "explicitly gives the DTN scheme prefix. If there are no -l options but the -n option has\n" "a netloc compnent (FQDN or IP address with optional port) the this will be used with the\n" "HTTP convergence layer\n" "The response will be sent as HTML document (-w), plain text (-p), or JSON (-v or -j)\n" "Unless -q is specified, the response is sent to standard output.\n" "For a JSON response, it can either be output as a 'raw' JSON string (-j) or pretty printed (-v).\n" "If none of -j, -v, -w or -p are specified, a raw JSON response will be requested.") parser = OptionParser(usage) parser.add_option("-f", "--file", dest="file_name", type="string", help="Pathname for local file to be published.") parser.add_option("-d", "--digest", dest="hash_alg", type="string", help="Digest algorithm to be used to hash content " "and create NI URI. Defaults to sha-256.") parser.add_option("-n", "--name", dest="ni_name", type="string", help="Complete ni name. If specified with a file or " "HTTP URL, the digest generated from the content " "will be checked against th digest in the name.") parser.add_option("-e", "--ext", dest="ext", type="string", help="A JSON encoded object to be sent as the 'ext' " "parameter for the Publish message.") parser.add_option("-l", "--loc", dest="locs", action="append", type="string", help="A locator where NI might be retrieved. Maybe be " "zero to two if -n is present and has a non-empty netloc. " "Otherwise must be one or two. HTTP or DTN is sent to first " "loc if present. Otherwise sent to netloc (authority) in -n." "NOTE: this precedence differs from earlier versions of nipub.") parser.add_option("-q", "--quiet", default=False, action="store_true", dest="quiet", help="Suppress textual output") parser.add_option("-j", "--json", default=False, action="store_true", dest="json_raw", help="Request response as JSON string and output raw JSON " "string returned on stdout.") parser.add_option("-v", "--view", default=False, action="store_true", dest="json_pretty", help="Request response as JSON string and pretty print " "JSON string returned on stdout.") parser.add_option("-w", "--web", default=False, action="store_true", dest="html", help="Request response as HTML document and output HTML " "returned on stdout.") parser.add_option("-p", "--plain", default=False, action="store_true", dest="plain", help="Request response as plain text document and output text " "returned on stdout.") (options, args) = parser.parse_args() # Check command line options: # Arguments -q, -e, -w, -p, -j and -v are optional; there must be one of a -n with an authority in it or at least one -l. # If -n option is specified then there must not be a -d. # If -d is specified, there must be a -f. # If -n is specified, -f may be specified - otherwise only metadata is published. No leftover arguments allowed. # Specifying more than one of -w, -p, -j and -v is inappropriate. if len(args) != 0: parser.error("Unrecognized arguments %s supplied." % str(args)) sys.exit(-1) if ((options.locs is not None) and (len(options.locs) > 2)): parser.error("Initial version only supports two locators (-l/--loc).") sys.exit(-1) if ((options.ni_name == None) and (options.locs == None)): parser.error("Must specify a locator (-l/--loc) or a name (-n/--name) with a netloc component to define where to send the request.") sys.exit(-1) if ((options.hash_alg != None) and (options.ni_name != None)): parser.error("Cannot specify both digest algorithm to be used (-d) and complete ni name with algorithm and digest (-n).") sys.exit(-1) fc = 0 for flag in [options.json_raw, options.json_pretty, options.html, options.plain]: if flag: fc += 1 if fc > 1: parser.error("Should specify at most one response type argument out of -j, -v, -w and -p.") sys.exit(-1) file_name = None if options.file_name != None: file_name = os.path.abspath(options.file_name) # Check the file is readable if not os.access(file_name, os.R_OK): if verbose: print("File to be published %s is not readable" % file_name) sys.exit(1) full_put = True else: full_put = False debug("full_put: %s" %full_put) verbose = not options.quiet if ((options.locs is not None) and (len(options.locs) > 2)): if verbose: print "Warning: only first two -l/--loc locators will be published" # If we have a full ni name (-n option) given.. if options.ni_name is not None: # Check the validity of the ni name try: ni_name = NIname(options.ni_name) except Exception, e: if verbose: print("Error: value of -n/--name option '%s' is not a valid ni name" % options.ni_name) sys.exit(-3) rv = ni_name.validate_ni_url() if rv != ni_errs.niSUCCESS: if verbose: print("Error: value of -n/--name option '%s' is not a valid ni name" % options.ni_name) sys.exit(-3) # Extract the scheme and hash algorithm from the name scheme = ni_name.get_scheme() hash_alg = ni_name.get_alg_name() # If there is a -l option, that is where the request is sent. nl = ni_name.get_netloc() if ((options.locs is None) and (nl == "")) : print("Error: name (-n/--name) must have a netloc if no locator options given,") sys.exit(-4) # NOTE: The following logic ie reversed from earlier versions so that # can force use of DTN convergence layer with a -l option. if nl == "": # Already checked this exists destination = options.locs[0] else: destination = nl authority = nl
def publish_with_dtn(ni_name, destination, authority, hash_alg, ext_json, locs, scheme, full_put, file_name, rform, verbose): """ @brief Action a NetInf publish request using the HTTP convergence layer @param ni_name NIname object instance or None - ni URI to publish if given on comand line - otherwise will be constructed @param destination string netloc (FQDN or IP address with optional port) indicating where to send publish request @param authority string netloc component to insert into ni name (may be "") @param hash_alg string name of hash algorithm used for ni URI @param ext_json dictionary additional information to send with request if any in the form of a JSON dictionary or None @param locs list of strings with locators to publish - maybe None @param scheme URI scheme used for ni URI @param full_put boolean True if the file_name with the content was given @param file_name string name of file to publish or None if only doing metadata @param rform string request format of response @param verbose bolean indicates how much error message output is produced @return 2-tuple - target - string the actual ni name published payload - string - the response received on publication """ debug("Publishing via: %s" % destination) # Handle full_put = True cases - we have a file with the octets in it if full_put: if ni_name is None: # Make a ni_name template from specified components ni_name = NIname((scheme, authority, "/%s" % hash_alg)) # Construct the digest from the file name and the template rv = NIproc.makenif(ni_name, file_name) if rv != ni_errs.niSUCCESS: raise PublishFailure("Unable to construct digest of file %s: %s" % (file_name, ni_errs_txt[rv]), -20) else: # Check the ni_name and the file match rv = Niproc.checknif(ni_name, file_name) if rv != ni_errs.niSUCCESS: raise PublishFailure("Digest of file %s does not match ni_name %s: %s" % (file_name, ni_name.get_url(), ni_errs_txt[rv]), -21) # Guess the mimetype of the file m = magic.Magic(mime=True) ctype = m.from_file(file_name) debug("Content-Type: %s" % ctype) if ctype is None: # Guessing didn't work - default ctype = "application/octet-stream" else: ctype = None target = ni_name.get_canonical_ni_url() debug("Using URI string: %s" % target) # Add extra items to ext_json to pass across as metadata ext_json["ni"] = target if ctype is not None: ext_json["ct"] = ctype if authority != "": ext_json["http_auth"] = authority # Send at most two locators as a list if (locs is not None): ext_json["loclist"] = locs[:2] ext_json["fullPut"] = full_put ext_json["rform"] = rform # Create a connection to the DTN daemon dtn_handle = dtnapi.dtn_open() if dtn_handle == -1: raise PublishFailure("Error: unable to open connection with DTN daemon", -22) # Generate EID + service tag for service to be accessed via DTN if destination is None: remote_service_eid = \ dtnapi.dtn_build_local_eid(dtn_handle, "netinfproto/service/publish") i = remote_service_eid.find("/netinfproto") destination = remote_service_eid[:i] else: remote_service_eid = destination + "/netinfproto/service/publish" # Add destination to locs if it isn't there already if locs is None: locs = [] if destination not in locs: locs.append(destination) # Generate the EID and service tag for this service local_service_eid = dtnapi.dtn_build_local_eid(dtn_handle, "netinfproto/app/response") debug("Local Service EID: %s" % local_service_eid) debug("Remote Service EID: %s" % remote_service_eid) # Check if service_eid registration exists and register if not # Otherwise bind to the existing registration regid = dtnapi.dtn_find_registration(dtn_handle, local_service_eid) if (regid == -1): # Need to register the EID.. make it permanent with 'DEFER' # characteristics so that bundles are saved if they arrive # while the handler is inactive # Expire the registration an hour in the future exp = 60 * 60 # The registration is immediately active passive = False # We don't want to execute a script script = "" regid = dtnapi.dtn_register(dtn_handle, local_service_eid, dtnapi.DTN_REG_DEFER, exp, passive, script) else: dtnapi.dtn_bind(dtn_handle, regid) # Build the bundle to send # First a suitable BPQ block bpq = BPQ() bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH) bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT) bpq.set_src_eid(local_service_eid) sent_msgid = str(random.randint(1, 32000)) bpq.set_bpq_id(sent_msgid) bpq.set_bpq_val(target) bpq.clear_frag_desc() # Build an extension blocks structure to hold the block ext_blocks = dtnapi.dtn_extension_block_list(1) # Construct the extension block bpq_block = dtnapi.dtn_extension_block() bpq_block.type = QUERY_EXTENSION_BLOCK bpq_block.flags = 0 bpq_block.data = bpq.build_for_net() ext_blocks.blocks.append(bpq_block) # Build an extension blocks structure to hold the block meta_blocks = dtnapi.dtn_extension_block_list(2) # Build a metadata block for JSON data md = Metadata() md.set_ontology(Metadata.ONTOLOGY_JSON) md.set_ontology_data(json.dumps(ext_json)) json_block = dtnapi.dtn_extension_block() json_block.type = METADATA_BLOCK json_block.flags = 0 json_block.data = md.build_for_net() meta_blocks.blocks.append(json_block) # Set up payload and placeholder if needed if full_put: # No placeholder required (obviously!) pt = dtnapi.DTN_PAYLOAD_FILE pv = file_name else: # DTN bundle always has a payload - distinguish # zero length file form no content available # Payload is the empty string sent via memory pt = dtnapi.DTN_PAYLOAD_MEM pv = "" # Add a payload placeholder metablock md = Metadata() md.set_ontology(Metadata.ONTOLOGY_PAYLOAD_PLACEHOLDER) md.set_ontology_data("No content supplied") pp_block = dtnapi.dtn_extension_block() pp_block.type = METADATA_BLOCK pp_block.flags = 0 pp_block.data = md.build_for_net() meta_blocks.blocks.append(pp_block) # We want delivery reports and publication reports # (and maybe deletion reports?) dopts = dtnapi.DOPTS_DELIVERY_RCPT | dtnapi.DOPTS_PUBLICATION_RCPT # - Send with normal priority. pri = dtnapi.COS_NORMAL # NetInf bundles should last a while.. exp = (24 *60 * 60) # Send the bundle bundle_id = dtnapi.dtn_send(dtn_handle, regid, local_service_eid, remote_service_eid, local_service_eid, pri, dopts, exp, pt, pv, ext_blocks, meta_blocks, "", "") if bundle_id == None: raise PublishFailure("dtn_send failed - %s" % dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)), -23) # Wait for a reponse - maybe also some reports while(True): # NOTE: BUG in dtnapi - timeout is in msecs recv_timeout = 2000 * 60 bpq_bundle = dtnapi.dtn_recv(dtn_handle, dtnapi.DTN_PAYLOAD_FILE, recv_timeout) # If bpq_bundle is None then either the dtn_recv timed out or # there was some other error. if bpq_bundle != None: # Filter out report bundles if bpq_bundle.status_report != None: debug("Received status report") if bpq_bundle.status_report.flags == dtnapi.STATUS_DELIVERED: if verbose: print("Received delivery report re from %s sent %d seq %d" % (bpq_bundle.status_report.bundle_id.source, bpq_bundle.status_report.bundle_id.creation_secs, bpq_bundle.status_report.bundle_id.creation_seqno)) elif bpq_bundle.status_report.flags == dtnapi.STATUS_DELETED: if verbose: print("Received deletion report re from %s sent %d seq %d" % (bpq_bundle.status_report.bundle_id.source, bpq_bundle.status_report.bundle_id.creation_secs, bpq_bundle.status_report.bundle_id.creation_seqno)) elif bpq_bundle.status_report.flags == dtnapi.STATUS_PUBLISHED: if verbose: print("Received publication report re from %s sent %d seq %d" % (bpq_bundle.status_report.bundle_id.source, bpq_bundle.status_report.bundle_id.creation_secs, bpq_bundle.status_report.bundle_id.creation_seqno)) else: if verbose: print("Received unexpected report: Flags: %d" % bpq_bundle.status_report.flags) # Wait for more status reports and incoming response continue # Check the payload really is in a file if not bpq_bundle.payload_file: raise PublishFailure("Received bundle payload not in file - " "ignoring bundle", -24) # Have to delete this file before an error exit or if empty pfn = bpq_bundle.payload l = len(pfn) if pfn[l-1] == "\x00": pfn = pfn[:-1] debug("Got incoming bundle with response in file %s" % pfn) # Does the bundle have a BPQ block bpq_data = None if bpq_bundle.extension_cnt == 0: os.remove(pfn) raise PublishFailure("Error: Received bundle with no " "extension block.", -25) for blk in bpq_bundle.extension_blks: if blk.type == QUERY_EXTENSION_BLOCK: bpq_data = BPQ() if not bpq_data.init_from_net(blk.data): os.remove(pfn) raise PublishFailure("Error: Bad BPQ block received", -26) if bpq_data is None: os.remove(pfn) raise PublishFailure("Error: Received bundle with no BPQ block " "in extension blocks", -27) debug(bpq_data) # OK.. got the response - finish with daemon break elif dtnapi.dtn_errno(dtn_handle) != dtnapi.DTN_ETIMEOUT: raise PublishFailure(dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)), -28) else: raise PublishFailure("dtn_recv timed out without receiving " "response bundle", 1) dtnapi.dtn_close(dtn_handle) # Check the BPQ data is right bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH) bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT) if bpq_data.bpq_kind != BPQ.BPQ_BLOCK_KIND_PUBLISH: raise PublishFailure("Returned BPQ block is not PUBLISH kind: %d" % bpq_data.bpq_kind, -29) if bpq_data.matching_rule != BPQ.BPQ_MATCHING_RULE_NEVER: raise PublishFailure("Returned BPQ block does not have NEVER matching rule: %d" % bpq_data.matching_rule, -30) if bpq_data.bpq_id != sent_msgid: raise PublishFailure("Returned BPQ block has unmatched msgis %s vs %s" % (bpq_data.bpq_id, sent_msgid), -31) # Verify the format of the response (a bit) try: pfd = open(pfn, "rb") payload = pfd.read() pfd.close() os.remove(pfn) except Exception, e: raise PublishFailure("Failed to read response from payload file %s" % pfn, -32)