Ejemplo n.º 1
0
 f = open(cache_inst.temp_path+"temp", "w")
 f.write("some_text")
 f.close()
 cache_inst2 = RedisNetInfCache(storage_root, logger)
 cache_inst2.set_redis_conn(redis_conn)
 if os.path.isfile(cache_inst2.temp_path+"temp"):
     print"Temporaries not cleared"
     
 #---------------------------------------------------------------------------#
 ni_url = "ni://mumble.org/sha-256-32;uzFqGA"
 ni_name = NIname(ni_url)
 ni_name.validate_ni_url(has_params=True)
 ni_name_uv = NIname(ni_url)
 ni_name_np = NIname("ni:///sha-256")
 ni_name_np.validate_ni_url(has_params=False)
 print(str(ni_name.get_url()))
 md = NetInfMetaData(ni_name.get_canonical_ni_url(), "now",
                     loc1="http://www.example.com",
                     extrameta={ "something" : "else" })
 print(md)
 md1 = NetInfMetaData("ni://abr.org/sha_256_64;fjhaie8978", "now",
                      loc2="https://zzz.mumble.org",
                      extrameta={ "something" : "else" })
 print md1
 
 try:
     m, f, n, i = cache_inst.cache_put(ni_name_uv, md, None)
     print "Fault: cache_put accepted unvalidated ni_name"
 except Exception, e:
     print "Error correctly detected: %s" % str(e)
 try:
Ejemplo n.º 2
0
def py_niget():
    """
    @brief Command line program to perform a NetInf 'get' operation using http
    @brief convergence layer.
    
    Uses NIproc global instance of NI operations class

    Run:
    
    >  niget.py --help

    to see usage and options.

    Exit code is 0 for success, 1 if HTTP returned something except 200,
    and negative for local errors.
    """
    
    # Options parsing and verification stuff
    usage = "%prog [-q] [-l] [-d] [-m|-v] [-f <pathname of content file>] <ni name>\n" + \
            "<ni name> must include location (netloc) from which to retrieve object."
    parser = OptionParser(usage)
    
    parser.add_option("-f", "--file", dest="file_name",
                      type="string",
                      help="File to hold retrieved content. Defaults to hash code in current directory if not present")
    parser.add_option("-q", "--quiet", default=False,
                      action="store_true", dest="quiet",
                      help="Suppress textual output")
    parser.add_option("-s", "--server", dest="server",
                      type="string",
                      help="hostname:port of server to send the NetInf GET to")
    parser.add_option("-l", "--lax", default=False,
                      action="store_true", dest="lax",
                      help="Store returned content even if digest doesn't validate")
    parser.add_option("-m", "--metadata", default=False,
                      action="store_true", dest="metadata",
                      help="Output returned metadata as JSON string")
    parser.add_option("-v", "--view", default=False,
                      action="store_true", dest="view",
                      help="Pretty print returned metadata.")
    parser.add_option("-d", "--dump", default=False,
                      action="store_true", dest="dump",
                      help="Dump raw HTTP response to stdout.")

    (options, args) = parser.parse_args()

    # Check command line options - -q, -f, -l, -m, -v and -d are optional, <ni name> is mandatory
    if len(args) != 1:
        parser.error("URL <ni name> not specified.")
        sys.exit(-1)
    verbose = not options.quiet

    # Create NIname instance for supplied URL and validate it
    ni_url = NIname(args[0])

    # Must be a complete ni: URL with non-empty params field
    rv = ni_url.validate_ni_url(has_params = True)
    if (rv != ni_errs.niSUCCESS):
        if verbose:
            print("Error: %s is not a complete, valid ni scheme URL: %s" % (ni_url.get_url(), ni_errs_txt[rv]))
        sys.exit(-2)

    # Generate file name for output if not specified
    if (options.file_name == None):
        options.file_name = ni_url.get_digest()
        
    # Generate NetInf form access URL
    if (options.server != None):
        server = options.server
    else:
        server = ni_url.get_netloc()

    http_url = "http://%s/netinfproto/get" % server
    """
    if (http_url == None):
        if verbose:
            print("Error: Unable to generate http: transformed URL for ni URL %s" % ni_urlparse.get_url())
        sys.exit(-3)
    """
    
    # Set up HTTP form data for get request
    form_data = urllib.urlencode({ "URI":   ni_url.get_url(),
                                   "msgid": random.randint(1, 32000),
                                   "ext":   "" })

    # Send POST request to destination server
    try:
        http_object = urllib2.urlopen(http_url, form_data)
    except Exception, e:
        if verbose:
            print("Error: Unable to access http URL %s: %s" % (http_url, str(e)))
        sys.exit(-4)
Ejemplo n.º 3
0
def py_nigetalt():
    """
    @brief Command line program to perform a NetInf 'get' operation using http
    @brief convergence layer.
    
    Uses NIproc global instance of NI operations class

    Run:
    
    >  nigetalt.py --help

    to see usage and options.

    Exit code is 0 for success, 1 if HTTP returned something except 200,
    and negative for local errors.
    """
    
    # Options parsing and verification stuff
    usage = "%prog [-q] [-l] [-d] [-m|-v] [-f <pathname of content file>] [-w <locator>] <ni name>\n" + \
            "Either <ni name> must include location (netloc) from which to retrieve object, or\n" + \
            "a locator must be given with the -w/--whence option.\n" + \
            "The locator may be prefixed with an HTTP ('http://')or DTN ('dtn://') URI scheme identifier.\n" + \
            "If no scheme identifier is given then HTTP is assumed.  The DTN scheme does not accept ports."
    parser = OptionParser(usage)
    
    parser.add_option("-f", "--file", dest="file_name",
                      type="string",
                      help="File to hold retrieved content. Defaults to hash code in current directory if not present")
    parser.add_option("-w", "--whence", dest="loc",
                      type="string", default=None,
                      help="Locator to which to send NetInf GET request.  May be prefixed with http:// or dtn://")
    parser.add_option("-q", "--quiet", default=False,
                      action="store_true", dest="quiet",
                      help="Suppress textual output")
    parser.add_option("-l", "--lax", default=False,
                      action="store_true", dest="lax",
                      help="Store returned content even if digest doesn't validate")
    parser.add_option("-m", "--metadata", default=False,
                      action="store_true", dest="metadata",
                      help="Output returned metadata as JSON string")
    parser.add_option("-v", "--view", default=False,
                      action="store_true", dest="view",
                      help="Pretty print returned metadata.")

    (options, args) = parser.parse_args()

    # Check command line options - -q, -f, -l, -m, and -v are optional,
    # <ni name> is mandatory
    # -w is optional if <ni name> contains a netloc
    if len(args) != 1:
        parser.error("URL <ni name> not specified.")
        sys.exit(-1)
    verbose = not options.quiet

    # Create NIname instance for supplied URL and validate it
    ni_url = NIname(args[0])

    # Must be a complete ni: URL with non-empty params field
    rv = ni_url.validate_ni_url(has_params = True)
    if (rv != ni_errs.niSUCCESS):
        if verbose:
            print("Error: %s is not a complete, valid ni scheme URL: %s" % (ni_url.get_url(), ni_errs_txt[rv]))
        sys.exit(-2)

    # Generate file name for output if not specified
    if (options.file_name == None):
        options.file_name = ni_url.get_digest()

    # Decide Convergence Layer to use and locator to access
    netloc = ni_url.get_netloc()
    cl = HTTP_SCHEME
    if netloc == "":
        # Must have -w option
        if options.loc is None:
            if verbose:
                print("Error: Must provide a locator either in ni URI or via -w/--whence")
            sys.exit(-3)
        loc = options.loc.lower()
    elif options.loc is not None:
        if verbose:
            print("Warning: -w/--whence locator option overrides netloc in ni URI")
        loc = options.loc.lower()
    else:
        loc = netloc.lower()

    # See if URI scheme was specified
    if loc.startswith(HTTP_SCHEME):
        loc = loc[len(HTTP_SCHEME):]
    elif loc.startswith(DTN_SCHEME):
        loc = loc[len(DTN_SCHEME):]
        cl = DTN_SCHEME
    else:
        ssep = loc.find("://")
        if ssep != -1:
            if verbose:
                print("Error: Convergence Layer for scheme %s is not supported - use dtn or http" %
                      loc[:ssep])
            sys.exit(-4)
        # Default assume HTTP

    # Action the GET according to CL selected
    if cl == HTTP_SCHEME:
        json_report, got_content, faulty = get_via_http(ni_url, loc,
                                                        options.file_name,
                                                        verbose, options.lax)
    else:
        json_report, got_content, faulty = get_via_dtn(ni_url, loc,
                                                       options.file_name,
                                                       verbose, options.lax)

    if options.view:
        print("Returned metadata for %s:" % args[0])
        print json.dumps(json_report, indent = 4)
    elif options.metadata:
        print json.dumps(json_report, separators=(",", ":"))

    if not got_content:
        rv = 1
    elif faulty:
        rv = 2
    else:
        rv = 0

    if verbose and got_content:
        if not faulty:
            print("Content successfully retrieved and placed in file %s." %
                  options.file_name)
        else:
            print("Content retrieved and placed in file %s but digest didn't verify." %
                  options.file_name)
    elif verbose:
        print("Only metadata retrieved")

    sys.exit(rv)
Ejemplo n.º 4
0
def py_nicl():
    """
    @brief Command line program to generate and validate digests in ni: URLs.
    
    Uses NIproc global instance of NI operations class

    Run:
    
    >  nicl.py --help

    to see usage and options.
    """
    
    # Options parsing and verification stuff
    usage = "%prog [-g|-w|-v] -n <name> -f <pathname of content file> [-V]\n"
    usage = usage + "       %prog -m -n <name> [-V]\n"
    usage = usage + "       %prog -b -s <suite_number> -f <pathname of content file> [-V]\n"
    usage = usage + "       The name can be either an ni: or nih: scheme URI\n"
    usage = usage + "       Return code: success 0, failure non-zero (-V for more info)\n"
    usage = usage + "       Available hashalg (suite number) options:\n"
    usage = usage + "       %s" % NIname.list_algs()
    parser = OptionParser(usage)
    
    parser.add_option("-g", "--generate", default=False,
                      action="store_true", dest="generate",
                      help="Generate hash based on content file, " + \
                           "and output name with encoded hash after the hashalg string")
    parser.add_option("-w", "--well-known", default=False,
                      action="store_true", dest="well_known",
                      help="Generate hash based on content file, " + \
                           "and output name with encoded hash in the .well_known URL " + \
                           "after the hashalg string. Applies to ni: scheme only.")
    parser.add_option("-v", "--verify", default=False,
                      action="store_true", dest="verify",
                      help="Verify hash in name is correct for content file")
    parser.add_option("-m", "--map", default=False,
                      action="store_true", dest="map_wkn",
                      help="Maps from an ni: name to a .well-known URL")
    parser.add_option("-b", "--binary", default=False,
                      action="store_true", dest="bin",
                      help="Outputs the name in binary format for a given suite number")
    parser.add_option("-V", "--verbose", default=False,
                      action="store_true", dest="verbose",
                      help="Be more long winded.")
    parser.add_option("-n", "--ni-name", dest="ni_name",
                      type="string",
                      help="The ni name template for (-g) or ni name matching (-v) content file.")
    parser.add_option("-f", "--file", dest="file_name",
                      type="string",
                      help="File with content data named by ni name.")
    parser.add_option("-s", "--suite-no", dest="suite_no",
                      type="int",
                      help="Suite number for hash algorithm to use.")

    (opts, args) = parser.parse_args()

    if not (opts.generate or opts.well_known or opts.verify or
            opts.map_wkn or opts.bin ):
        parser.error( "Must specify one of -g/--generate, -w/--well-known, -v/--verify, -m/--map or -b/--binary.")
    if opts.generate or opts.well_known or opts.verify:
        if (opts.ni_name == None) or (opts.file_name == None):
            parser.error("Must specify both name and content file name for -g, -w or -v.")
    if opts.map_wkn:
        if (opts.ni_name == None):
            parser.error("Must specify ni name for -m.")
    if opts.bin:
        if (opts.suite_no == None) or (opts.file_name == None):
            parser.error("Must specify both suite number and content file name for -b.")
    if len(args) != 0:
        parser.error("Too many or unrecognised arguments specified")

    # Execute requested action
    if opts.generate:
        n = NIname(opts.ni_name)
        ret = NIproc.makenif(n, opts.file_name)
        if ret == ni_errs.niSUCCESS:
            if opts.verbose:
                print("Name generated successfully.")
            print "%s" % n.get_url()
            sys.exit(0)
        if opts.verbose:
            print "Name could not be successfully generated."
    elif opts.well_known:
        n = NIname(opts.ni_name)
        if n.get_scheme() == "nih":
            if opts.verbose:
                print "Only applicable to ni: scheme names."
            sys.exit(1)
        ret = NIproc.makenif(n, opts.file_name)
        if ret == ni_errs.niSUCCESS:
            if opts.verbose:
                print("Name generated successfully.")
            print "%s" % n.get_wku_transform()
            sys.exit(0)
        if opts.verbose:
            print "Name could not be successfully generated"
    elif opts.verify:
        n = NIname(opts.ni_name)
        ret = NIproc.checknif(n, opts.file_name)
        if ret == ni_errs.niSUCCESS:
            if opts.verbose:
                print("Name matches content file.")
                print "%s" % n.get_url()
            sys.exit(0)
        if opts.verbose:
            print "Check of name against content failed."
    elif opts.map_wkn:
        n = NIname(opts.ni_name)
        ret = n.validate_ni_url(has_params = True)
        if ret == ni_errs.niSUCCESS:
            if n.get_scheme() == "nih":
                if opts.verbose:
                    print "Only applicable to ni: scheme names."
                sys.exit(1)
            if opts.verbose:
                print("Name validated successfully.")
            print "%s" % n.get_wku_transform()
            sys.exit(0)
        else:
            if opts.verbose:
                print "Name could not be successfully validated."
    elif opts.bin:
        (ret, bin_name) = NIproc.makebnf(opts.suite_no, opts.file_name)
        if ret == ni_errs.niSUCCESS:
            if opts.verbose:
                print("Name generated successfully.")
            print base64.b16encode(str(bin_name))
            sys.exit(0)
        else:
            if opts.verbose:
                print "Name could not be successfully generated."
    else:
        print"Should not have happened"
        sys.exit(2)

    # Print appropriate error message
    if opts.verbose:
        print "Error: %s" % ni_errs_txt[ret]
    sys.exit(1)
        
    sys.exit(0)
Ejemplo n.º 5
0
def publish_with_dtn(ni_name, destination, authority, hash_alg, ext_json, locs, 
                     scheme, full_put, file_name, rform, verbose):
    """
    @brief Action a NetInf publish request using the HTTP convergence layer
    @param ni_name NIname object instance or None - ni URI to publish if
                          given on comand line - otherwise will be constructed
    @param destination string netloc (FQDN or IP address with optional port)
                              indicating where to send publish request
    @param authority string netloc component to insert into ni name (may be "")
    @param hash_alg string name of hash algorithm used for ni URI
    @param ext_json dictionary additional information to send with request if any
                               in the form of a JSON dictionary or None
    @param locs list of strings with locators to publish - maybe None
    @param scheme URI scheme used for ni URI
    @param full_put boolean True if the file_name with the content was given
    @param file_name string name of file to publish or None if only doing metadata
    @param rform string request format of response
    @param verbose bolean indicates how much error message output is produced
    @return 2-tuple - target - string the actual ni name published
                      payload - string - the response received on publication
    """
    
    debug("Publishing via: %s" % destination)

    # Handle full_put = True cases - we have a file with the octets in it
    if full_put:
        if ni_name is None:
            # Make a ni_name template from specified components
            ni_name = NIname((scheme, authority, "/%s" % hash_alg))

            # Construct the digest from the file name and the template
            rv = NIproc.makenif(ni_name, file_name)
            if rv != ni_errs.niSUCCESS:
                raise PublishFailure("Unable to construct digest of file %s: %s" %
                                     (file_name, ni_errs_txt[rv]), -20)
        else:
            # Check the ni_name and the file match
            rv = Niproc.checknif(ni_name, file_name)
            if rv != ni_errs.niSUCCESS:
                raise PublishFailure("Digest of file %s does not match ni_name %s: %s" %
                                     (file_name,
                                      ni_name.get_url(),
                                      ni_errs_txt[rv]), -21)

        # Guess the mimetype of the file
        m = magic.Magic(mime=True)
        ctype = m.from_file(file_name)
        debug("Content-Type: %s" % ctype)
        if ctype is None:
            # Guessing didn't work - default
            ctype = "application/octet-stream"

    else:
        ctype = None

    target = ni_name.get_canonical_ni_url()
    debug("Using URI string: %s" % target)

    # Add extra items to ext_json to pass across as metadata
    ext_json["ni"] = target
    if ctype is not None:
        ext_json["ct"] = ctype
    if authority != "":
        ext_json["http_auth"] = authority
    # Send at most two locators as a list
    if (locs is not None):
        ext_json["loclist"] = locs[:2]
    ext_json["fullPut"] = full_put
    ext_json["rform"] = rform
    
    # Create a connection to the DTN daemon
    dtn_handle = dtnapi.dtn_open()
    if dtn_handle == -1:
        raise PublishFailure("Error: unable to open connection with DTN daemon",
                             -22)

    # Generate EID + service tag for service to be accessed via DTN
    if destination is None:
        remote_service_eid = \
                    dtnapi.dtn_build_local_eid(dtn_handle,
                                               "netinfproto/service/publish")
        i = remote_service_eid.find("/netinfproto")
        destination = remote_service_eid[:i]
    else:                           
        remote_service_eid = destination + "/netinfproto/service/publish"

    # Add destination to locs if it isn't there already
    if locs is None:
        locs = []
    if destination not in locs:
        locs.append(destination)
    
    # Generate the EID and service tag for this service
    local_service_eid = dtnapi.dtn_build_local_eid(dtn_handle,
                                                   "netinfproto/app/response")
    debug("Local Service EID: %s" % local_service_eid)
    debug("Remote Service EID: %s" % remote_service_eid)

    # Check if service_eid registration exists and register if not
    # Otherwise bind to the existing registration
    regid = dtnapi.dtn_find_registration(dtn_handle, local_service_eid)
    if (regid == -1):
        # Need to register the EID.. make it permanent with 'DEFER'
        # characteristics so that bundles are saved if they arrive
        # while the handler is inactive
        # Expire the registration an hour in the future
        exp = 60 * 60
        # The registration is immediately active
        passive = False
        # We don't want to execute a script
        script = ""
        
        regid = dtnapi.dtn_register(dtn_handle, local_service_eid,
                                    dtnapi.DTN_REG_DEFER,
                                    exp, passive, script)
    else:
        dtnapi.dtn_bind(dtn_handle, regid)

    # Build the bundle to send
    # First a suitable BPQ block
    bpq = BPQ()
    bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH)
    bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT)
    bpq.set_src_eid(local_service_eid)
    sent_msgid = str(random.randint(1, 32000))
    bpq.set_bpq_id(sent_msgid)
    bpq.set_bpq_val(target)
    bpq.clear_frag_desc()

    # Build an extension blocks structure to hold the block
    ext_blocks =  dtnapi.dtn_extension_block_list(1)

    # Construct the extension block
    bpq_block = dtnapi.dtn_extension_block()
    bpq_block.type = QUERY_EXTENSION_BLOCK
    bpq_block.flags = 0
    bpq_block.data = bpq.build_for_net()
    ext_blocks.blocks.append(bpq_block)

    # Build an extension blocks structure to hold the block
    meta_blocks =  dtnapi.dtn_extension_block_list(2)
            
    # Build a metadata block for JSON data
    md = Metadata()
    md.set_ontology(Metadata.ONTOLOGY_JSON)
    md.set_ontology_data(json.dumps(ext_json))
    json_block = dtnapi.dtn_extension_block()
    json_block.type = METADATA_BLOCK
    json_block.flags = 0
    json_block.data = md.build_for_net()
    meta_blocks.blocks.append(json_block)

    # Set up payload and placeholder if needed
    if full_put:
        # No placeholder required (obviously!)        
        pt = dtnapi.DTN_PAYLOAD_FILE
        pv = file_name
    else:
        # DTN bundle always has a payload - distinguish
        # zero length file form no content available
        # Payload is the empty string sent via memory
        pt = dtnapi.DTN_PAYLOAD_MEM
        pv = ""
        # Add a payload placeholder metablock
        md = Metadata()
        md.set_ontology(Metadata.ONTOLOGY_PAYLOAD_PLACEHOLDER)
        md.set_ontology_data("No content supplied")
        pp_block = dtnapi.dtn_extension_block()
        pp_block.type = METADATA_BLOCK
        pp_block.flags = 0
        pp_block.data = md.build_for_net()
        meta_blocks.blocks.append(pp_block)

    # We want delivery reports and publication reports
    # (and maybe deletion reports?)
    dopts = dtnapi.DOPTS_DELIVERY_RCPT | dtnapi.DOPTS_PUBLICATION_RCPT
    # - Send with normal priority.
    pri = dtnapi.COS_NORMAL
    # NetInf bundles should last a while..
    exp = (24 *60 * 60)

    # Send the bundle
    bundle_id = dtnapi.dtn_send(dtn_handle, regid, local_service_eid,
                                remote_service_eid, local_service_eid,
                                pri, dopts, exp, pt, pv, 
                                ext_blocks, meta_blocks, "", "")
    if bundle_id == None:
        raise PublishFailure("dtn_send failed - %s" %
                             dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)),
                             -23)

    # Wait for a reponse - maybe also some reports
    while(True):
        # NOTE: BUG in dtnapi - timeout is in msecs
        recv_timeout = 2000 * 60
        bpq_bundle = dtnapi.dtn_recv(dtn_handle, dtnapi.DTN_PAYLOAD_FILE,
                                     recv_timeout)
        # If bpq_bundle is None then either the dtn_recv timed out or
        # there was some other error.
        if bpq_bundle != None:
            # Filter out report bundles
            if bpq_bundle.status_report != None:
                debug("Received status report")
                if bpq_bundle.status_report.flags == dtnapi.STATUS_DELIVERED:
                    if verbose:
                        print("Received delivery report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                elif bpq_bundle.status_report.flags == dtnapi.STATUS_DELETED:
                    if verbose:
                        print("Received deletion report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                elif bpq_bundle.status_report.flags == dtnapi.STATUS_PUBLISHED:
                    if verbose:
                        print("Received publication report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                else:
                    if verbose:
                        print("Received unexpected report: Flags: %d" %
                              bpq_bundle.status_report.flags)
                        
                # Wait for more status reports and incoming response
                continue

            # Check the payload really is in a file
            if not bpq_bundle.payload_file:
                raise PublishFailure("Received bundle payload not in file - "
                                     "ignoring bundle", -24)
            
            # Have to delete this file before an error exit or if empty
            pfn = bpq_bundle.payload
            l = len(pfn)
            if pfn[l-1] == "\x00":
                pfn = pfn[:-1]
            debug("Got incoming bundle with response in file %s" % pfn)

            # Does the bundle have a BPQ block
            bpq_data = None
            if bpq_bundle.extension_cnt == 0:
                os.remove(pfn)
                raise PublishFailure("Error: Received bundle with no "
                               "extension block.", -25)
                          
            for blk in bpq_bundle.extension_blks:
                if blk.type == QUERY_EXTENSION_BLOCK:
                    bpq_data = BPQ()
                    if not bpq_data.init_from_net(blk.data):
                        os.remove(pfn)
                        raise PublishFailure("Error: Bad BPQ block received",
                                             -26)
    
            if bpq_data is None:
                os.remove(pfn)
                raise PublishFailure("Error: Received bundle with no BPQ block "
                                     "in extension blocks", -27)

            debug(bpq_data)
            # OK.. got the response - finish with daemon
            break
                
        elif dtnapi.dtn_errno(dtn_handle) != dtnapi.DTN_ETIMEOUT:
            raise PublishFailure(dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)),
                                 -28)
        else:
            raise PublishFailure("dtn_recv timed out without receiving "
                                 "response bundle", 1)
                           
    dtnapi.dtn_close(dtn_handle)

    # Check the BPQ data is right
    bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH)
    bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT)
    if bpq_data.bpq_kind != BPQ.BPQ_BLOCK_KIND_PUBLISH:
        raise PublishFailure("Returned BPQ block is not PUBLISH kind: %d" %
                             bpq_data.bpq_kind, -29)
    if bpq_data.matching_rule != BPQ.BPQ_MATCHING_RULE_NEVER:
        raise PublishFailure("Returned BPQ block does not have NEVER matching rule: %d" %
                             bpq_data.matching_rule, -30)
    if bpq_data.bpq_id != sent_msgid:
        raise PublishFailure("Returned BPQ block has unmatched msgis %s vs %s" %
                             (bpq_data.bpq_id, sent_msgid), -31)

    # Verify the format of the response (a bit)
    try:
        pfd = open(pfn, "rb")
        payload = pfd.read()
        pfd.close()
        os.remove(pfn)
    except Exception, e:
        raise PublishFailure("Failed to read response from payload file %s" %
                             pfn, -32)