示例#1
0
def get_via_dtn(ni_url, dtn_eid, file_name, verbose, lax):
    """
    @brief Perform a NetInf 'get' from the http_host for the ni_url.
    @param ni_url object instance of NIname with ni name to be retrieved
    @param dtn_eid string EID for node to be accessed
                            (FQDN or IP address with optional port number)
    @param file_name string path to save content if returned
    @param verbose boolean if True print error messages, otherwise be quiet
    @param lax boolean if True return content file even if it doesn't verify
    @return 3-tuple with:
                dictionary containing returned JSON metadata decoded
                boolean indicating if content was obtained (and is in file_name
                boolean indicating if contenmt failed to verify if lax was True
                
    Assume that ni_url has a valid ni URI
    """
    # Record if content failed to verify (for lax case)
    faulty = False

    # Record if content was retrieved at all
    got_content = False

    # Must be a complete ni: URL with non-empty params field
    rv = ni_url.validate_ni_url(has_params = True)
    if (rv != ni_errs.niSUCCESS):
        if verbose:
            print("Error: %s is not a complete, valid ni scheme URL: %s" %
                  (ni_url.get_url(), ni_errs_txt[rv]))
            sys.exit(-10)

    # Generate canonical form (no netloc, ni scheme) URI for ni name
    # This goes in the BPQ block - The canonical form ensures that the
    # BPQ EXACT_MATCH will find the NDO is it is cached along the DTN path.
    # If there is a netloc in here it is sent as a Metadats item.
    ni_url_str = ni_url.get_canonical_ni_url()

    # Generate EID + service tag for service to be accessed via DTN
    remote_service_eid = "dtn://" + dtn_eid + "/netinfproto/service/get"

    # Create a connection to the DTN daemon
    dtn_handle = dtnapi.dtn_open()
    if dtn_handle == -1:
        if verbose:
            print("Error: unable to open connection with DTN daemon")
        sys.exit(-20)

    # Generate the EID and service tag for this service
    local_service_eid = dtnapi.dtn_build_local_eid(dtn_handle,
                                                   "netinfproto/app/response")
    debug("Service EID: %s" % local_service_eid)

    # Check if service_eid registration exists and register if not
    # Otherwise bind to the existing registration
    regid = dtnapi.dtn_find_registration(dtn_handle, local_service_eid)
    if (regid == -1):
        # Need to register the EID.. make it permanent with 'DEFER'
        # characteristics so that bundles are saved if they arrive
        # while the handler is inactive
        # Expire the registration an hour in the future
        exp = 60 * 60
        # The registration is immediately active
        passive = False
        # We don't want to execute a script
        script = ""
        
        regid = dtnapi.dtn_register(dtn_handle, local_service_eid,
                                    dtnapi.DTN_REG_DEFER,
                                    exp, passive, script)
    else:
        dtnapi.dtn_bind(dtn_handle, regid)

    # Build the bundle to send
    # First a suitable BPQ block
    bpq = BPQ()
    bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_QUERY)
    bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT)
    bpq.set_src_eid(local_service_eid)
    sent_msgid = str(random.randint(1, 32000))
    print sent_msgid
    bpq.set_bpq_id(sent_msgid)
    bpq.set_bpq_val(ni_url_str)
    bpq.clear_frag_desc()

    # Only need to send metadata if there is a non-empty netloc in
    # the ni_url.
    netloc = ni_url.get_netloc()
    meta_blocks = None
    if netloc != "":
        # Create a JSON dictionary with netloc in it
        json_dict = { "http_auth" : netloc }
        
        # Build an extension blocks structure to hold the metadata block
        meta_blocks =  dtnapi.dtn_extension_block_list(1)
                
        # Build a metadata block for JSON data
        md = Metadata()
        md.set_ontology(Metadata.ONTOLOGY_JSON)
        md.set_ontology_data(json.dumps(json_dict))
        json_block = dtnapi.dtn_extension_block()
        json_block.type = METADATA_BLOCK
        json_block.flags = 0
        json_block.data = md.build_for_net()
        meta_blocks.blocks.append(json_block)

    # Don't need to send any payload placeholder

    # Payload is the empty string sent via memory
    pt = dtnapi.DTN_PAYLOAD_MEM
    pv = ""

    # - We want delivery reports (and maybe deletion reports?)
    dopts = dtnapi.DOPTS_DELIVERY_RCPT
    # - Send with normal priority.
    pri = dtnapi.COS_NORMAL
    # NetInf bundles should last a while..
    exp = (24 *60 * 60)

    # Build an extension blocks structure to hold the BPQ block
    ext_blocks =  dtnapi.dtn_extension_block_list(1)

    # Construct the extension block
    bpq_block = dtnapi.dtn_extension_block()
    bpq_block.type = QUERY_EXTENSION_BLOCK
    bpq_block.flags = 0
    bpq_block.data = bpq.build_for_net()
    ext_blocks.blocks.append(bpq_block)

    # Send the bundle
    bundle_id = dtnapi.dtn_send(dtn_handle, regid, local_service_eid,
                                remote_service_eid, local_service_eid,
                                pri, dopts, exp, pt, pv, 
                                ext_blocks, meta_blocks, "", "")

    # Wait for a reponse - maybe aalso some reports
    while(True):
        # NOTE: BUG in dtnapi - timeout is in msecs
        recv_timeout = 2000 * 60
        bpq_bundle = dtnapi.dtn_recv(dtn_handle, dtnapi.DTN_PAYLOAD_FILE,
                                     recv_timeout)
        # If bpq_bundle is None then either the dtn_recv timed out or
        # there was some other error.
        if bpq_bundle != None:
            # Filter out report bundles
            if bpq_bundle.status_report != None:
                debug("Received status report")
                if bpq_bundle.status_report.flags == dtnapi.STATUS_DELIVERED:
                    if verbose:
                        print("Received delivery report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                elif bpq_bundle.status_report.flags == dtnapi.STATUS_DELETED:
                    if verbose:
                        print("Received deletion report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                else:
                    if verbose:
                        print("Received unexpected report: Flags: %d" %
                              bpq_bundle.status_report.flags)
                        
                # Wait for more status reports and incoming response
                continue

            # Check the payload really is in a file
            if not bpq_bundle.payload_file:
                if verbose:
                    print("Received bundle payload not in file - ignoring bundle")
                sys.exit(-21)
            
            # Have to delete this file before an error exit or if empty
            pfn = bpq_bundle.payload
            l = len(pfn)
            if pfn[l-1] == "\x00":
                pfn = pfn[:-1]
            debug("Got incoming bundle in file %s" % pfn)

            # Does the bundle have a BPQ block
            bpq_data = None
            if bpq_bundle.extension_cnt == 0:
                if verbose:
                    print("Error: Received bundle with no extension block.")
                os.remove(pfn)
                sys.exit(-22)
                          
            for blk in bpq_bundle.extension_blks:
                if blk.type == QUERY_EXTENSION_BLOCK:
                    bpq_data = BPQ()
                    if not bpq_data.init_from_net(blk.data):
                        if verbose:
                          print("Error: Bad BPQ block received")
                        os.remove(pfn)
                        sys.exit(-23)

            if bpq_data is None:
                if verbose:
                    print("Error: Received bundle with no BPQ block in extension blocks")
                os.remove(pfn)
                sys.exit(-23)

            debug(bpq_data)

            # Does the bundle have a Metadata block of type JSON and optionally
            # a payload placeholder
            json_data = None
            got_content = True
            if bpq_bundle.metadata_cnt > 0:
                debug("Metadata count for bundle is %d" %
                      bpq_bundle.metadata_cnt)
                for blk in bpq_bundle.metadata_blks:
                    if blk.type == METADATA_BLOCK:
                        md = Metadata()
                        if not md.init_from_net(blk.data):
                            if verbose:
                                print("Error: Bad Metadata block received")
                            os.remove(pfn)
                            sys.exit(-24)
                        if md.ontology == Metadata.ONTOLOGY_JSON:
                            json_data = md
                        elif md.ontology == Metadata.ONTOLOGY_PAYLOAD_PLACEHOLDER:
                            got_content = False
                            debug("Have placeholder: %s" % md.ontology_data)
                        else:
                            if verbose:
                                print("Warning: Metadata (type %d) block not processed" %
                                      md.ontology)

            if json_data is not None:
                debug("JSON data: %s" % json_data)
                od = json_data.ontology_data
                if od[-1:] == '\x00':
                    od = od[:-1]
                json_dict = json.loads(od)
            else:
                json_dict = None

            # Check if bundle has a (non-empty) payload even if it has a placeholder
            if (bpq_bundle.payload_len > 0) and not got_content:
                if verbose:
                    print("Error: Bundle has payload placeholder and non-empty payload")
                os.remove(pfn)
                sys.exit(-25)
            

            # Validate the digest if there is content
            faulty = False
            if got_content:
                # Unfortunately there is no easy way to do this without reading the file again.
                # But that lets us copy it into the final destination at the same time.
                # Digest output
                bin_dgst = None

                h = ni_url.get_hash_function()()

                # Open the bundle payload file
                try:
                    fr = open(pfn, "rb")
                except Exception, e :
                    if verbose:
                        print("Error: Cannot open payload file %s: Reason: %s" %
                              (pfn, str(e)))
                    os.remove(pfn)
                    sys.exit(-26)

                # Open the destination file
                try:
                    fw = open(file_name, "wb")
                except Exception, e :
                    if verbose:
                        print("Error: Cannot open destination file %s: Reason: %s" %
                              (file_name, str(e)))
                    fr.close()
                    os.remove(pfn)
                    sys.exit(-26)

                while True:

                    try:
                        l = fr.read(1024)
                    except Exception, e :
                        if verbose:
                            print("Error: Cannot read payload file %s: Reason: %s" %
                                  (fn, str(e)))
                        fr.close()
                        os.remove(pfn)
                        fw.close(0)
                        os.remove(file_name)
                        sys.exit(-27)

                    if len(l) == 0:
                        fr.close()
                        fw.close()
                        break
                     
                    h.update(l)
                    try:
                        fw.write(l)
                    except Exception, e :
                        if verbose:
                            print("Error: Cannot write destination file %s: Reason: %s" %
                                  (file_name, str(e)))
                        fr.close()
                        os.remove(pfn)
                        fw.close(0)
                        os.remove(file_name)
                        sys.exit(-28)
示例#2
0
def publish_with_dtn(ni_name, destination, authority, hash_alg, ext_json, locs, 
                     scheme, full_put, file_name, rform, verbose):
    """
    @brief Action a NetInf publish request using the HTTP convergence layer
    @param ni_name NIname object instance or None - ni URI to publish if
                          given on comand line - otherwise will be constructed
    @param destination string netloc (FQDN or IP address with optional port)
                              indicating where to send publish request
    @param authority string netloc component to insert into ni name (may be "")
    @param hash_alg string name of hash algorithm used for ni URI
    @param ext_json dictionary additional information to send with request if any
                               in the form of a JSON dictionary or None
    @param locs list of strings with locators to publish - maybe None
    @param scheme URI scheme used for ni URI
    @param full_put boolean True if the file_name with the content was given
    @param file_name string name of file to publish or None if only doing metadata
    @param rform string request format of response
    @param verbose bolean indicates how much error message output is produced
    @return 2-tuple - target - string the actual ni name published
                      payload - string - the response received on publication
    """
    
    debug("Publishing via: %s" % destination)

    # Handle full_put = True cases - we have a file with the octets in it
    if full_put:
        if ni_name is None:
            # Make a ni_name template from specified components
            ni_name = NIname((scheme, authority, "/%s" % hash_alg))

            # Construct the digest from the file name and the template
            rv = NIproc.makenif(ni_name, file_name)
            if rv != ni_errs.niSUCCESS:
                raise PublishFailure("Unable to construct digest of file %s: %s" %
                                     (file_name, ni_errs_txt[rv]), -20)
        else:
            # Check the ni_name and the file match
            rv = Niproc.checknif(ni_name, file_name)
            if rv != ni_errs.niSUCCESS:
                raise PublishFailure("Digest of file %s does not match ni_name %s: %s" %
                                     (file_name,
                                      ni_name.get_url(),
                                      ni_errs_txt[rv]), -21)

        # Guess the mimetype of the file
        m = magic.Magic(mime=True)
        ctype = m.from_file(file_name)
        debug("Content-Type: %s" % ctype)
        if ctype is None:
            # Guessing didn't work - default
            ctype = "application/octet-stream"

    else:
        ctype = None

    target = ni_name.get_canonical_ni_url()
    debug("Using URI string: %s" % target)

    # Add extra items to ext_json to pass across as metadata
    ext_json["ni"] = target
    if ctype is not None:
        ext_json["ct"] = ctype
    if authority != "":
        ext_json["http_auth"] = authority
    # Send at most two locators as a list
    if (locs is not None):
        ext_json["loclist"] = locs[:2]
    ext_json["fullPut"] = full_put
    ext_json["rform"] = rform
    
    # Create a connection to the DTN daemon
    dtn_handle = dtnapi.dtn_open()
    if dtn_handle == -1:
        raise PublishFailure("Error: unable to open connection with DTN daemon",
                             -22)

    # Generate EID + service tag for service to be accessed via DTN
    if destination is None:
        remote_service_eid = \
                    dtnapi.dtn_build_local_eid(dtn_handle,
                                               "netinfproto/service/publish")
        i = remote_service_eid.find("/netinfproto")
        destination = remote_service_eid[:i]
    else:                           
        remote_service_eid = destination + "/netinfproto/service/publish"

    # Add destination to locs if it isn't there already
    if locs is None:
        locs = []
    if destination not in locs:
        locs.append(destination)
    
    # Generate the EID and service tag for this service
    local_service_eid = dtnapi.dtn_build_local_eid(dtn_handle,
                                                   "netinfproto/app/response")
    debug("Local Service EID: %s" % local_service_eid)
    debug("Remote Service EID: %s" % remote_service_eid)

    # Check if service_eid registration exists and register if not
    # Otherwise bind to the existing registration
    regid = dtnapi.dtn_find_registration(dtn_handle, local_service_eid)
    if (regid == -1):
        # Need to register the EID.. make it permanent with 'DEFER'
        # characteristics so that bundles are saved if they arrive
        # while the handler is inactive
        # Expire the registration an hour in the future
        exp = 60 * 60
        # The registration is immediately active
        passive = False
        # We don't want to execute a script
        script = ""
        
        regid = dtnapi.dtn_register(dtn_handle, local_service_eid,
                                    dtnapi.DTN_REG_DEFER,
                                    exp, passive, script)
    else:
        dtnapi.dtn_bind(dtn_handle, regid)

    # Build the bundle to send
    # First a suitable BPQ block
    bpq = BPQ()
    bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH)
    bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT)
    bpq.set_src_eid(local_service_eid)
    sent_msgid = str(random.randint(1, 32000))
    bpq.set_bpq_id(sent_msgid)
    bpq.set_bpq_val(target)
    bpq.clear_frag_desc()

    # Build an extension blocks structure to hold the block
    ext_blocks =  dtnapi.dtn_extension_block_list(1)

    # Construct the extension block
    bpq_block = dtnapi.dtn_extension_block()
    bpq_block.type = QUERY_EXTENSION_BLOCK
    bpq_block.flags = 0
    bpq_block.data = bpq.build_for_net()
    ext_blocks.blocks.append(bpq_block)

    # Build an extension blocks structure to hold the block
    meta_blocks =  dtnapi.dtn_extension_block_list(2)
            
    # Build a metadata block for JSON data
    md = Metadata()
    md.set_ontology(Metadata.ONTOLOGY_JSON)
    md.set_ontology_data(json.dumps(ext_json))
    json_block = dtnapi.dtn_extension_block()
    json_block.type = METADATA_BLOCK
    json_block.flags = 0
    json_block.data = md.build_for_net()
    meta_blocks.blocks.append(json_block)

    # Set up payload and placeholder if needed
    if full_put:
        # No placeholder required (obviously!)        
        pt = dtnapi.DTN_PAYLOAD_FILE
        pv = file_name
    else:
        # DTN bundle always has a payload - distinguish
        # zero length file form no content available
        # Payload is the empty string sent via memory
        pt = dtnapi.DTN_PAYLOAD_MEM
        pv = ""
        # Add a payload placeholder metablock
        md = Metadata()
        md.set_ontology(Metadata.ONTOLOGY_PAYLOAD_PLACEHOLDER)
        md.set_ontology_data("No content supplied")
        pp_block = dtnapi.dtn_extension_block()
        pp_block.type = METADATA_BLOCK
        pp_block.flags = 0
        pp_block.data = md.build_for_net()
        meta_blocks.blocks.append(pp_block)

    # We want delivery reports and publication reports
    # (and maybe deletion reports?)
    dopts = dtnapi.DOPTS_DELIVERY_RCPT | dtnapi.DOPTS_PUBLICATION_RCPT
    # - Send with normal priority.
    pri = dtnapi.COS_NORMAL
    # NetInf bundles should last a while..
    exp = (24 *60 * 60)

    # Send the bundle
    bundle_id = dtnapi.dtn_send(dtn_handle, regid, local_service_eid,
                                remote_service_eid, local_service_eid,
                                pri, dopts, exp, pt, pv, 
                                ext_blocks, meta_blocks, "", "")
    if bundle_id == None:
        raise PublishFailure("dtn_send failed - %s" %
                             dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)),
                             -23)

    # Wait for a reponse - maybe also some reports
    while(True):
        # NOTE: BUG in dtnapi - timeout is in msecs
        recv_timeout = 2000 * 60
        bpq_bundle = dtnapi.dtn_recv(dtn_handle, dtnapi.DTN_PAYLOAD_FILE,
                                     recv_timeout)
        # If bpq_bundle is None then either the dtn_recv timed out or
        # there was some other error.
        if bpq_bundle != None:
            # Filter out report bundles
            if bpq_bundle.status_report != None:
                debug("Received status report")
                if bpq_bundle.status_report.flags == dtnapi.STATUS_DELIVERED:
                    if verbose:
                        print("Received delivery report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                elif bpq_bundle.status_report.flags == dtnapi.STATUS_DELETED:
                    if verbose:
                        print("Received deletion report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                elif bpq_bundle.status_report.flags == dtnapi.STATUS_PUBLISHED:
                    if verbose:
                        print("Received publication report re from %s sent %d seq %d" %
                              (bpq_bundle.status_report.bundle_id.source,
                               bpq_bundle.status_report.bundle_id.creation_secs,
                               bpq_bundle.status_report.bundle_id.creation_seqno))

                else:
                    if verbose:
                        print("Received unexpected report: Flags: %d" %
                              bpq_bundle.status_report.flags)
                        
                # Wait for more status reports and incoming response
                continue

            # Check the payload really is in a file
            if not bpq_bundle.payload_file:
                raise PublishFailure("Received bundle payload not in file - "
                                     "ignoring bundle", -24)
            
            # Have to delete this file before an error exit or if empty
            pfn = bpq_bundle.payload
            l = len(pfn)
            if pfn[l-1] == "\x00":
                pfn = pfn[:-1]
            debug("Got incoming bundle with response in file %s" % pfn)

            # Does the bundle have a BPQ block
            bpq_data = None
            if bpq_bundle.extension_cnt == 0:
                os.remove(pfn)
                raise PublishFailure("Error: Received bundle with no "
                               "extension block.", -25)
                          
            for blk in bpq_bundle.extension_blks:
                if blk.type == QUERY_EXTENSION_BLOCK:
                    bpq_data = BPQ()
                    if not bpq_data.init_from_net(blk.data):
                        os.remove(pfn)
                        raise PublishFailure("Error: Bad BPQ block received",
                                             -26)
    
            if bpq_data is None:
                os.remove(pfn)
                raise PublishFailure("Error: Received bundle with no BPQ block "
                                     "in extension blocks", -27)

            debug(bpq_data)
            # OK.. got the response - finish with daemon
            break
                
        elif dtnapi.dtn_errno(dtn_handle) != dtnapi.DTN_ETIMEOUT:
            raise PublishFailure(dtnapi.dtn_strerror(dtnapi.dtn_errno(dtn_handle)),
                                 -28)
        else:
            raise PublishFailure("dtn_recv timed out without receiving "
                                 "response bundle", 1)
                           
    dtnapi.dtn_close(dtn_handle)

    # Check the BPQ data is right
    bpq.set_bpq_kind(BPQ.BPQ_BLOCK_KIND_PUBLISH)
    bpq.set_matching_rule(BPQ.BPQ_MATCHING_RULE_EXACT)
    if bpq_data.bpq_kind != BPQ.BPQ_BLOCK_KIND_PUBLISH:
        raise PublishFailure("Returned BPQ block is not PUBLISH kind: %d" %
                             bpq_data.bpq_kind, -29)
    if bpq_data.matching_rule != BPQ.BPQ_MATCHING_RULE_NEVER:
        raise PublishFailure("Returned BPQ block does not have NEVER matching rule: %d" %
                             bpq_data.matching_rule, -30)
    if bpq_data.bpq_id != sent_msgid:
        raise PublishFailure("Returned BPQ block has unmatched msgis %s vs %s" %
                             (bpq_data.bpq_id, sent_msgid), -31)

    # Verify the format of the response (a bit)
    try:
        pfd = open(pfn, "rb")
        payload = pfd.read()
        pfd.close()
        os.remove(pfn)
    except Exception, e:
        raise PublishFailure("Failed to read response from payload file %s" %
                             pfn, -32)