Esempio n. 1
0
def main(name_str):

    # Create a GDP_NAME object from a python string provided as argument.
    print "Name: " + name_str
    gcl_name = gdp.GDP_NAME(name_str)
    gcl_handle_writer = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_AO)

    gcl_handle_subscriber = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)

    # this is the actual subscribe call
    gcl_handle_subscriber.subscribe(0, 0, None)

    count = 0
    while count < 10:
        count += 1
        line = str(count)
        # Create a minimalist datum dictionary
        datum = {"data": line}
        gcl_handle_writer.append(datum)  # Write this datum to the GCL

        timeout = {'tv_sec': 1, 'tv_nsec': 0, 'tv_accuracy': 0.0}

        print "About to call get_next_event()"

        event = gcl_handle_subscriber.get_next_event(timeout)
        datum = event["datum"]
        handle = event["gcl_handle"]
        print datum
Esempio n. 2
0
def create_append_subscribe():    
    # http://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python
    name_str = 'python.test.writer_subscriber2_test.' + ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(10))
    print "Name: " + name_str;
    gcl_name = gdp.GDP_NAME(name_str);

    print "Trying to create using " + platform.node();
    logd_name = gdp.GDP_NAME(platform.node());

    print "About to create " + name_str 
    try:
        gdp.GDP_GCL.create(gcl_name, logd_name, '');
    except :
        # If run with "python writer_subscriber2_test.py foo"
        # and the router and logd daemons are not running, then we end up here.
        # FIXME: Probably don't want to hardcode in the log name
        logd_name = gdp.GDP_NAME('edu.berkeley.eecs.gdp-01.gdplogd');
        gdp.GDP_GCL.create(gcl_name, logd_name, '');
    print "Created " + name_str

    print "Get the writer"
    gcl_handle_writer = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_AO);

    print "Get the subscriber"
    gcl_handle_subscriber = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)

    print "Make the subscribe call"
    # This is the actual subscribe call.
    gcl_handle_subscriber.subscribe(0, 0, None)

    print "About to loop"
    count = 0
    outputList = []
    while count < 10:
        count += 1
        line = str(count)
        # Create a minimalist datum dictionary
        datum = {"data": line}

        print "About to append data"
        gcl_handle_writer.append(datum)           # Write this datum to the GCL
        print "Done appending data"

        timeout = {'tv_sec':1, 'tv_nsec':0, 'tv_accuracy':0.0}

        print "About to call get_next_event()"

        event = gcl_handle_subscriber.get_next_event(timeout)
        datum = event["datum"]
        handle = event["gcl_handle"]
        print datum
        outputList.append(datum['data'])

    expectedList = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
    if outputList != expectedList:
        raise ValueError(', '.join(map(str, outputList)) + " was not equal to " + ', '.join(map(str, expectedList)))
    print "OK"
Esempio n. 3
0
def open_gcl_write(gcl_complete_name_str):
    print "opening for writing: %s" % gcl_complete_name_str
    skey = gdp.EP_CRYPTO_KEY(filename=gcl_complete_name_str + ".pem",
                             keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                             flags=gdp.EP_CRYPTO_F_SECRET)
    gcl_name = gdp.GDP_NAME(gcl_complete_name_str)
    return gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_AO, open_info={'skey': skey})
Esempio n. 4
0
def store_reported_location(fileName, logNames):
  """Example implementation of the listener for reported locations. The function listens for reported locations 
     and stores them in a file.

     logNames - List of log names where the locations are reported (implements the report location primitive(s) of the SLI). 
     fileName - Name of the file where the received locations are stored.
  """

  obj_name_mapping = {}

  for name_str in logNames:

    gcl_name = gdp.GDP_NAME(name_str)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    obj_name_mapping[gcl_handle] = name_str
    gcl_handle.subscribe(0, 0, None)

  while True:
    event = gdp.GDP_GCL.get_next_event(None)
    timestamp_end = time.time()
    datum = event["datum"]
    gcl_name = obj_name_mapping[event["gcl_handle"]]
    data = datum["data"]
    timestamp_start = float(json.loads(data)['timestamp_start'])
    print gcl_name + str(': ') + 'New location information received.'
    print 'Latency: ' + str(timestamp_end - timestamp_start)
    string = gcl_name + ',' + str(timestamp_end) + ',' + data + '\n'
    with open(fileName, 'a') as the_file:
      the_file.write(string)
Esempio n. 5
0
def request_location(parameters, logName):
  """
  Implementation of the request location primitive of the SLI. 

  parameters:
    location_type - type of location information: global/local/semantic
    dimensionality - for global/local location information type, is it 2D/3D?
    accuracy - desired accuracy of the requested location information
    period - desired period of location information provisioning
    on_event - when is location information provided - periodically with the period, on change of step from preceding location
    step - size of change of location information
    duration - duration of location information provisioning 
    movement - do you want historical information, .e.g speed, orientation
  logName - name of the log for requesting location information
  """

  location_type = parameters['location_type']
  dimensionality = parameters['dimensionality'] 
  accuracy = parameters['accuracy']
  period = parameters['period'] 
  provisioning_type = parameters['provisioning_type'] 
  step = parameters['step']
  duration = parameters['duration'] 
  movement = parameters['movement']
  # Evaluation purpose only, can be removed later
  timestamp_start = parameters['timestamp_start']

  gcl_name = gdp.GDP_NAME(logName)
  gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

  # First parameter for evaluation purposes only, can be removed later.
  data = json.dumps({'timestamp_start': timestamp_start, 'location_type': location_type, 'dimensionality': dimensionality, 'accuracy': accuracy, 'period': period, 'provisioning_type': provisioning_type, 'step':  step, 'duration': duration, 'movement': movement})
  gcl_handle.append({'data': data})

  return 'New request written in the Request location log...'
Esempio n. 6
0
def main(*args):

    obj_name_mapping = {}

    for name_str in args:

        # create a python object
        gcl_name = gdp.GDP_NAME(name_str)

        # Assume that the GCL already exists
        gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
        obj_name_mapping[gcl_handle] = name_str

        # this is the actual subscribe call
        gcl_handle.subscribe(0, 0, None)

    while True:

        # This blocks, until there is a new event
        event = gdp.GDP_GCL.get_next_event(None)
        datum = event["datum"]
        gcl_name = obj_name_mapping[event["gcl_handle"]]
        readable_time = time.ctime(datum["ts"]["tv_sec"] +
                                   (datum["ts"]["tv_nsec"] * 1.0 / 10**9))
        print_str = ">>> gcl_name: %s\n>>> recno: %d, ts: %s\n%s" % (
            gcl_name, datum["recno"], readable_time, datum["data"])
        print print_str
Esempio n. 7
0
def gcl_subscription_init():
    gdp.gdp_init()
    gcl_input = "ph.edu.upd.pcari.jasper.data"
    print "gcl: [%r]" % gcl_input
    gcl_name = gdp.GDP_NAME(gcl_input)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    return gcl_handle
Esempio n. 8
0
def gcl_subscription_init(config_file):
    gdp.gdp_init()
    gcl_input = write_config("inputs.txt")[0]
    print "gcl: [%r]" % gcl_input
    gcl_name = gdp.GDP_NAME(gcl_input)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    return gcl_handle
Esempio n. 9
0
def gcl_append_init(gcl_input, pem_input):
    gdp.gdp_init()
    gcl_name = gdp.GDP_NAME(gcl_input)
    skey = gdp.EP_CRYPTO_KEY(filename=pem_input,
                             keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                             flags=gdp.EP_CRYPTO_F_SECRET)

    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA, {"skey": skey})
    return gcl_handle
Esempio n. 10
0
 def __init__(self,
              srcSink="",
              nameStr="",
              paramName="",
              lagVal=0,
              normMeth='none',
              key="",
              password=""):
     if nameStr == "":
         raise ValueError("GCL name must be provided.")
     if paramName == "":
         raise ValueError("JSON parameter name must be provided.")
     # Log name in GDP
     self.gclName = gdp.GDP_NAME(nameStr)
     if srcSink == "":
         raise ValueError("Source/Sink must be provided.")
     elif srcSink == "GDP_I":
         self.IO = 'in'
         # Assume that GCL already exists and create the GCL handle
         self.gclHandle = gdp.GDP_GCL(self.gclName, gdp.GDP_MODE_RO)
     elif srcSink == "GDP_O":
         self.IO = 'out'
         if key == "" or password == "":
             raise ValueError("Key path and password must \
                         be provided.")
         else:
             skey = gdp.EP_CRYPTO_KEY(filename=key,
                                      keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                                      flags=gdp.EP_CRYPTO_F_SECRET)
             open_info = {'skey': skey}
             # TODO Bypass password prompt
             # Assume that GCL already exists and create the GCL handle
             self.gclHandle = gdp.GDP_GCL(self.gclName, gdp.GDP_MODE_RA,
                                          open_info)
     # JSON parameter name to be used in each log record
     self.param = paramName
     # Lag from the current record. Can be used to implement time series functions.
     self.lag = lagVal
     # Normalization method for data:
     # 'none': no normalization
     # 'lin': linear normalization: mean-zeroed and divided by std
     self.norm = normMeth
     # Normalization parameters (i.e., avg, std etc.)
     self.normParam = {}
Esempio n. 11
0
def gcl_append_init():
    gdp.gdp_init()
    gcl_input = 'ph.edu.upd.pcari.jasper.data'
    pem_input = '_data.pem'

    gcl_name = gdp.GDP_NAME(gcl_input)
    skey = gdp.EP_CRYPTO_KEY(filename=pem_input,
           keyform=gdp.EP_CRYPTO_KEYFORM_PEM, flags=gdp.EP_CRYPTO_F_SECRET)

    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA, {"skey":skey})
    return gcl_handle
Esempio n. 12
0
def gdp_thread():
    gdp.gdp_init()
    print("GDP: connected.")
    gcl_name = gdp.GDP_NAME(args.gdp_log)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)
    print("GDP: got log handle")
    while True:
        im_fname = gdp_image_queue.get()
        im = Image.open(im_fname)
        data = {"data": im.tostring()}
        gcl_handle.append(data)
        print("GDP: posted " + im_fname)
Esempio n. 13
0
def gdp_source():

    rospy.init_node("gdp_source")
    gdp.gdp_init()
    lock = Lock()
    args = _parse_args()

    topic_dict = {}

    gcl_name = gdp.GDP_NAME(args.logname)
    loghandle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    loghandle.subscribe(0, 0, None)

    try:
        buf = ""
        while not rospy.is_shutdown():

            event = loghandle.get_next_event(None)
            data = event["datum"]["data"]
            d = pickle.loads(data)
            if args.preserve:
                topic = d["topic"]
            else:
                topic = "/gdp" + rospy.names.resolve_name(d["topic"])
            topic_type = d["topic_type"]

            try:
                assert len(buf) == d["offset"]
            except AssertionError:
                ## This is when we start at the wrong time, and some
                ## chunks of a message already have been missed.
                continue

            buf = buf + d["data"]

            if len(buf) == d["msg_size"]:
                with lock:  ## get publisher, create if doesn't exist
                    pub = topic_dict.get(topic, None)
                    if pub is None:
                        msg_class = roslib.message.get_message_class(
                            topic_type)
                        pub = rospy.Publisher(topic, msg_class, queue_size=10)
                        topic_dict[topic] = pub

                print "Publishing message"
                pub.publish(pickle.loads(zlib.decompress(buf)))
                buf = ""

    except rospy.ROSInterruptException:
        pass

    del loghandle
Esempio n. 14
0
def gcl_append_init(config_file):
    gdp.gdp_init()
    gcl_input, pem_input = write_config(config_file)
    print "gcl: [%r]" % gcl_input
    print "pem: [%r]" % pem_input

    gcl_name = gdp.GDP_NAME(gcl_input)
    skey = gdp.EP_CRYPTO_KEY(filename=pem_input,
                             keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                             flags=gdp.EP_CRYPTO_F_SECRET)

    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA, {"skey": skey})
    return gcl_handle
Esempio n. 15
0
def main(provisioning_service_id):

    print 'Creating the Register service log...'
    string = 'lemic.localization.esi.register_service'
    print subprocess.call([
        './../gdp/apps/gcl-create', '-k', 'none', '-G', 'localhost',
        'test.localization', string
    ])
    print 'The Register service log most probably already exists, but no worries, ignore the previous error!'

    # Check if a service with that ID already exists!
    gdp.gdp_init('localhost')
    gcl_name = gdp.GDP_NAME(string)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)

    # Read the whole register service log
    recno = 1
    services = []
    while True:
        try:
            datum = gcl_handle.read(recno)
            services.append(json.loads(datum['data'])['service_id'])
            recno += 1
        except:
            break

    if provisioning_service_id not in services:
        # Write an entry in the log
        logName = 'lemic.localization.esi.register_service'
        gcl_name = gdp.GDP_NAME(logName)
        gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

        data = json.dumps({'service_id': provisioning_service_id})
        gcl_handle.append({'data': data})
        print 'Provisioning service ' + str(
            provisioning_service_id
        ) + ' successfully registered for provisioning.'
    else:
        print 'Log ID has to be unique! Be creative!'
Esempio n. 16
0
    def read(self, name_str):
        """
        Reads a info_log and returns the capabilities, permissions, public key(s)
        (if present) and certificate (if present) described in the info_log

        Parameters:
        name_str - name of the info_log from which to read
        """
        gcl_name = gdp.GDP_NAME(name_str)
        gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)

        recno = 1
        capabilities = []
        permissions = []
        pkeys = []
        certificate = None
        try:
            datum = gcl_handle.read(recno)
            num_capabilities = int(datum['data'])
            recno += 1
            datum = gcl_handle.read(recno)
            num_permissions = int(datum['data'])
            recno += 1
            datum = gcl_handle.read(recno)
            num_pkeys = int(datum['data'])
            recno += 1
            while recno <= 3 + num_capabilities:
                datum = gcl_handle.read(recno)
                capabilities.append(datum['data'])
                recno += 1
            while recno <= 3 + num_capabilities + num_permissions:
                datum = gcl_handle.read(recno)
                permissions.append(datum['data'])
                recno += 1
            while recno <= 3 + num_capabilities + num_permissions + num_pkeys:
                datum = gcl_handle.read(recno)
                pkeys.append(datum['data'])
                recno += 1  
            datum = gcl_handle.read(recno)
            certificate = datum['data']
        except:
            pass # Error could be because there is no certificate

        return capabilities, permissions, pkeys, certificate

        # capabilities = ["capability1", "capability2", "capability3"]
        # permissions = ["permission1", "permission2", "permission3"]
        # pkeys = ["pkey1", "pkey2", "pkey3", "pkey4"]
        # certificate = "test_certificate"
        # return capabilities, permissions, pkeys, certificate
Esempio n. 17
0
def gdp_sink():

    rospy.init_node("gdp_sink")
    gdp.gdp_init()
    lock = Lock()
    args = _parse_args()

    gcl_name = gdp.GDP_NAME(args.logname)
    lh = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

    for t in args.topic:
        rospy.Subscriber(t, AnyMsg, callback, callback_args=(t, lh, lock))

    rospy.spin()
Esempio n. 18
0
def push_rss_to_log():
  """
    Storing the collected RSS scans into a GDP log.
  """

  logName = 'lemic.localization.resources'
  scan = get_rss_scan()

  gcl_name = gdp.GDP_NAME(logName)
  gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

  gcl_handle.append({'data': scan})

  return 'New scan pushed to the resource log...'
def get_resource():

    logName = 'lemic.localization.resources'
    gcl_name = gdp.GDP_NAME(logName)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    data = {}
    datum1 = gcl_handle.read(-1)
    data[0] = json.loads(datum1['data'])['data']
    datum2 = gcl_handle.read(-2)
    data[1] = json.loads(datum2['data'])['data']
    datum3 = gcl_handle.read(-3)
    data[2] = json.loads(datum3['data'])['data']
    datum4 = gcl_handle.read(-4)
    data[3] = json.loads(datum4['data'])['data']
    return data
Esempio n. 20
0
def open_gcl_read(gcl_complete_name_str, timeout=0):
    print "opening for reading: %s" % gcl_complete_name_str
    start = time.time()
    handler = []
    while time.time() - start <= timeout + 1:
        try:
            gcl_name = gdp.GDP_NAME(gcl_complete_name_str)
            handler = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
            break
        except:
            pass
        if timeout == 0:
            break
    del gcl_name
    return handler
 def initialize_gcl_handles(self):
     print('GDPDataProcessor: Initializing GCL handles')
     for addr_gdp in self.list_addr_dict:
         print('GDPDataProcessor.initialize_gcl_handles: Initializing %s' %
               (addr_gdp['addr']))
         try:
             gcl_name = gdp.GDP_NAME(addr_gdp['addr'])
             addr_gdp['gcl_name'] = gcl_name
             addr_gdp['gcl_handle'] = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
             print('GDPDataProcessor.initialize_gcl_handles:   Success!')
         except:
             addr_gdp['gcl_name'] = None
             addr_gdp['gcl_handle'] = None
             print('GDPDataProcessor.initialize_gcl_handles:   FAIL')
     print('GDPDataProcessor:   Finished!')
def request_discovery(ils_id, services):
    """
    Discovery of provisioning features form the available provisioning services.

    ils_id - ID of this integrated location service, needed for shared provisioning services
    services - services whose provisioning features are to be requested
  """

    # Request features discovery
    for service_id in services:

        # Writing an entry to a 'service discovery' log of each requested provisioning service
        logName = 'lemic.localization.esi.service_discovery_' + str(service_id)
        gcl_name = gdp.GDP_NAME(logName)
        gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)
        data = json.dumps({'ils_id': ils_id})
        gcl_handle.append({'data': data})

    return
def query_for_location(requests, provisioning_features, ils_id, mapper_q):
    """
    Requesting location information provisioning from the selected provisioning services.

    requests - requests for location information by the applications
    provisioning_features - provisioning features of the available provisioning services
    ils_id - ID of this integrated location service, needed for shared provisioning services
    mapper_q - contains mapping between the requirements from the applications and the locations provided by the provisioning services
  """

    # This variable is used as a buffer for matching the requirements from the applications with the selected provisioning services.
    memo = {}
    timestamp = float("{0:.2f}".format(time.time()))
    memo[timestamp] = {}
    # Select provisioning services to be invoked
    #selected_services = SA.select_provisioning_services_prsa(requests, provisioning_features)
    selected_services = SA.select_provisioning_services_ptsa(
        requests, provisioning_features)
    for sv in selected_services:
        memo[timestamp][sv] = {}
        memo[timestamp][sv]['elements'] = selected_services[sv]['elements']
        memo[timestamp][sv]['accuracy'] = selected_services[sv]['accuracy']
        # Evaluation purposes only, can be removed later
        memo[timestamp][sv]['timestamp_start'] = requests[sv][
            'timestamp_start']
    selected_ids = [
        selected_services[i]['elements'] for i in selected_services
    ]
    selected_ids_final = list(
        set([val for sublist in selected_ids for val in sublist]))
    for id in selected_ids_final:
        logName = 'lemic.localization.esi.request_location_' + str(id[-1])
        gcl_name = gdp.GDP_NAME(logName)
        gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)
        data = json.dumps({'ils_id': ils_id, 'timestamp': timestamp})
        gcl_handle.append({'data': data})

    mapper_q.put(memo)
    return
Esempio n. 24
0
def main(name_str, keyfile):

    skey = gdp.EP_CRYPTO_KEY(filename=keyfile,                                  
                                keyform=gdp.EP_CRYPTO_KEYFORM_PEM,              
                                flags=gdp.EP_CRYPTO_F_SECRET)                   

    # Create a GDP_NAME object from a python string provided as argument
    gcl_name = gdp.GDP_NAME(name_str)

    # There's a GCL with the given name, so let's open it
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_AO,
                                open_info={'skey':skey})

    while True:

        d = getInfo()
        # convert that to a nice string
        string_to_write = json.dumps(d)

        # Create a minimalist datum dictionary
        datum = {"data": string_to_write}
        gcl_handle.append(datum)           # Write this datum to the GCL
        time.sleep(SLEEP_INTERVAL)
Esempio n. 25
0
def setup_client(info_log,
                 signing_key_file,
                 capabilities,
                 permissions,
                 insecure=False,
                 router=None,
                 num_clients=1,
                 ca_key=None,
                 ca_crt=None):
    """
    Performs the following:
    - Creates num_clients number of private keys for clients and writes them
      to the current directory
    - Writes capabilities and permissions to the info_log
    - Writes a public key for each client to the info_log
    - Writes a certificate signed by a CA to the info_log if a CA key and CA
      certificate is provided

    Parameters:
    info_log - 43 byte name of the empty log to which the client(s) information
               will be written
    signing_key_file - signing key file in pem format for the info_log
    capabilities - list of capabilities of the client(s)
    permissions - list of the permissions of the clinet(s)
    insecure - if False, key pairs for each client will be produced and gdp discovery
               services will perform authentication wit hthe client(s). Otherwise
               they will be authenticated.
    router - address and port of gdp router in "<address>:<port>" format
    num_clients - number of key pairs to be produced
    ca_key and ca_crt - used to sign the info_log's certificate. If these are not
                        provided, the info_log will not be certified.
    """
    # Initialize GDP
    if router:
        router = router.split(":")
        host = router[0]
        port = int(router[1])
        gdp.gdp_init(host, port)
    else:
        gdp.gdp_init()

    # Open the info_log
    info_log_name = info_log
    skey = gdp.EP_CRYPTO_KEY(filename=signing_key_file,
                             keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                             flags=gdp.EP_CRYPTO_F_SECRET)
    gcl_name = gdp.GDP_NAME(info_log_name)
    gcl_handle = gdp.GDP_GCL(gcl_name,
                             gdp.GDP_MODE_AO,
                             open_info={'skey': skey})

    # Write capabilities and permissions to info_log

    datum = {"data": str(len(capabilities))}
    gcl_handle.append(datum)  # Write number of capabilities
    datum = {"data": str(len(permissions))}
    gcl_handle.append(datum)  # Write number of permissions
    datum = {"data": str(num_clients)}
    gcl_handle.append(datum)  # Write number of public keys
    for capability in capabilities:  # Write capabilities
        datum = {"data": capability}
        gcl_handle.append(datum)
    for permission in permissions:  # Write permissions
        datum = {"data": permission}
        gcl_handle.append(datum)

    # Create key(s)
    if not insecure:
        random_generator = Random.new()
        for i in range(num_clients):
            key = RSA.generate(1024, random_generator.read)
            # Write key to file
            keyfile = open("key" + str(i) + ".pem", 'w')
            keyfile.write(key.exportKey(format='PEM'))
            datum = {"data": key.publickey().exportKey(format='PEM')}
            gcl_handle.append(datum)

    # Create a certificate and write it to the info log
    if ca_crt and ca_key:
        # load CA cert and key
        ca_cert = crypto.load_certificate(crypto.FILETYPE_PEM,
                                          open(ca_crt).read())
        ca_key = crypto.load_privatekey(crypto.FILETYPE_PEM,
                                        open(ca_key).read())

        # create info_log key
        info_log_key = crypto.PKey()
        info_log_key.generate_key(crypto.TYPE_RSA, 2048)

        # create CSR
        req = crypto.X509Req()
        req.get_subject().CN = info_log_name
        req.set_pubkey(info_log_key)
        req.sign(info_log_key, 'sha256')

        # create unique serial number using info_log name
        md5_hash = hashlib.md5()
        md5_hash.update(info_log_name)
        serial = int(md5_hash.hexdigest(), 36)

        # create certificate
        cert = crypto.X509()
        cert.set_serial_number(serial)
        cert.gmtime_adj_notBefore(0)
        cert.gmtime_adj_notAfter(365 * 4 * 60 * 60 *
                                 1000)  # make certificate last 1000 yrs
        cert.set_issuer(ca_cert.get_subject())
        cert.set_subject(req.get_subject())
        cert.set_pubkey(req.get_pubkey())
        cert.sign(ca_key, 'sha256')

        # write certificate to info_log
        datum = {"data": crypto.dump_certificate(crypto.FILETYPE_PEM, cert)}
        gcl_handle.append(datum)
Esempio n. 26
0
import gdp
from random import *


def write_config(filename):
    r = open(filename, 'r')
    gcl_input = r.readline().rstrip('\n')
    pem_input = r.readline().rstrip('\n')
    r.close
    return gcl_input, pem_input


gdp.gdp_init()
random_int = randint(1, 100)
gcl_input = write_config("inputs.txt")[0]
print "gcl: [%r]" % gcl_input

gcl_name = gdp.GDP_NAME(gcl_input)
gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)

#GDP Reading
gcl_handle.multiread(-5, 4)

while True:
    event = gcl_handle.get_next_event(None)
    if event['type'] == gdp.GDP_EVENT_EOS:
        break
    print event['datum']['data']

exit()
Esempio n. 27
0
# -*- coding: utf-8 -*-
import gdp
from random import *


def write_config(filename):
    r = open(filename, 'r')
    gcl_input = r.readline().rstrip('\n')
    pem_input = r.readline().rstrip('\n')
    r.close
    return gcl_input, pem_input


gdp.gdp_init()
random_int = randint(1, 100)
gcl_input, pem_input = write_config("inputs.txt")
print "gcl: [%r]" % gcl_input
print "pem: [%r]" % pem_input

gcl_name = gdp.GDP_NAME(gcl_input)
skey = gdp.EP_CRYPTO_KEY(filename=pem_input,
                         keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                         flags=gdp.EP_CRYPTO_F_SECRET)

gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA, {"skey": skey})

#GDP Writing
gcl_handle.append({"data": str(random_int)})
print "sent:", random_int
exit()
    def run(self):

        gdp.gdp_init('localhost')
        logs = [
            'lemic.localization.esi.service_discovery_' +
            str(self.provisioning_service_id),
            'lemic.localization.esi.request_service_' +
            str(self.provisioning_service_id),
            'lemic.localization.esi.request_location_' +
            str(self.provisioning_service_id)
        ]

        obj_name_mapping = {}
        for name_str in logs:

            gcl_name = gdp.GDP_NAME(name_str)
            gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
            obj_name_mapping[gcl_handle] = name_str
            gcl_handle.subscribe(0, 0, None)

        while True:

            event = gdp.GDP_GCL.get_next_event(None)
            datum = event["datum"]
            gcl_name = obj_name_mapping[event["gcl_handle"]]

            # React on discover service -> offer service
            if gcl_name == logs[0]:

                ils_id = json.loads(datum['data'])['ils_id']
                print 'Got request'
                logName = 'lemic.localization.esi.service_offering_' + str(
                    self.provisioning_service_id)
                gcl_name = gdp.GDP_NAME(logName)
                gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

                accuracy, latency, power_consumption, provisioning = generate_offering_static(
                )
                data = json.dumps({
                    'ils_id': ils_id,
                    'accuracy': accuracy,
                    'latency': latency,
                    'power_consumption': power_consumption,
                    'elements': provisioning_service_id
                })
                gcl_handle.append({'data': data})

            # React on request service -> report granted duration
            if gcl_name == logs[1]:

                logName = 'lemic.localization.esi.report_service_' + str(
                    self.provisioning_service_id)
                gcl_name = gdp.GDP_NAME(logName)
                gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

                duration = provisioning_duration_policy()

                data = json.dumps({'duration': duration})
                gcl_handle.append({'data': data})

            # React on request location -> read last resource -> generate location -> report location
            if gcl_name == logs[2]:

                ils_id = json.loads(datum['data'])['ils_id']
                timestamp = json.loads(datum['data'])['timestamp']
                print 'Got location request..'
                # Read the last WiFi scan
                wifi_scan = get_resource()

                try:
                    location = fa.getPositionEstimateEuclidean(
                        wifi_scan[0], self.training_path)
                except:
                    location = None

                logName = 'lemic.localization.esi.report_location_' + str(
                    self.provisioning_service_id)
                gcl_name = gdp.GDP_NAME(logName)
                gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

                data = json.dumps({
                    'location': location,
                    'ils_id': ils_id,
                    'timestamp': timestamp
                })
                gcl_handle.append({'data': data})
Esempio n. 29
0
    def run (self):
        while True:
            #read one byte at a time in a loop
            by = self.sp.read(1)

            if(len(by) == 0):
                continue

            #if the byte is one of the escape characters read it in
            byte = None
            try:
                byte = by.decode("utf-8")
                #sys.stdout.write(byte)
            except UnicodeDecodeError:
                #it won't be valid if this fails
                continue

            buf = None
            if(byte == "#"):
                #see if the next character is a reset
                byte = self.sp.read(1);
                if(byte == "r"):
                    print("Fake-Radio Reset. Ready to receive radio Commands!")
                elif(byte == "w"):
                    #waiting on you to return data
                    #is this the state you should be in right now??
                    print("waiting on response");
                    pass
                elif(byte == "p"):
                    #waiting on you to return data
                    #is this the state you should be in right now??
                    print("Kernel Panic - dumping buffer");
                    #use a bug number just cause
                    buf = self.sp.read(16000);
                    print(buf.decode("utf-8"))
                    pass
                else:
                    sys.stdout.write("#" + byte.decode('utf-8'))

                continue
            elif(byte == "$"):
                #this is an actual message
                #read two more bytes to get the length
                num_bytes = struct.unpack('<H', self.sp.read(2))[0]

                #read in length number of bytes
                buf = self.sp.read(num_bytes)

                #did we get the number of bytes or timeout?
                if(len(buf) < num_bytes):
                    #we have a long enough timeout this shouldn't happen
                    #disregard this message
                    print("Received buffer shorted than expected. Discarding")
                    continue
            else:
                sys.stdout.write(byte)
                continue


            #we have a valid buffer, we should parse it
            url_len_struct = struct.unpack('<H', buf[0:2])
            url_len = url_len_struct[0]
            buf = buf[2:]
            url = buf[0:url_len].decode("utf-8")
            buf = buf[url_len:]
            num_headers = struct.unpack('<B', buf[0:1])[0]
            buf = buf[1:]
            headers = {}
            for i in range(0,num_headers):
                header_len = struct.unpack('<B',buf[0:1])[0]
                buf = buf[1:]
                header = buf[0:header_len].decode("utf-8")
                buf = buf[header_len:]
                value_len = struct.unpack('<B',buf[0:1])[0]
                buf = buf[1:]
                value = buf[0:value_len].decode("utf-8")
                buf = buf[value_len:]
                headers[header] = value


            body_len = struct.unpack('<H', buf[0:2])[0]
            buf = buf[2:]
            body = bytearray()
            body.extend(buf[:body_len])

            #now that we have parsed the buffer, post
            #split url into the first and second parts
            s_index = url.find("/")
            base = url[:s_index]
            end = url[s_index:]

            # is the base the gdp address?
            if(base == "gdp.lab11.eecs.umich.edu"):
                    stat = 0
                    reason = ""
                    print("")
                    print("#######################################################")
                    print("Trying to post to GDP")
                    index1 = 1+end[1:].find("/")
                    index2 = index1 + 1 + end[index1+1:].find("/")
                    index3 = index2 + 1 + end[index2+1:].find("/")
                    #version
                    try:
                        version = end[index1+1:index2]
                        log_name = end[index2+1:index3]
                        function = end[index3+1:]
                    except:
                        print("There was an error, aborting")
                        print("Do you have GDP installed?")
                        print("#######################################################")
                        print("")
                        continue

                    if(function == "append" or function == "Append"):
                            print("Attempting to append to log name {}".format(log_name))
                            #try to create the log. Don't know how to do this in python
                            #so instead call the shell
                            ret = os.system("gcl-create -C [email protected] -k none " + log_name)
                            if((ret >> 8) == 0):
                                print("Successfully created log")
                                stat = 201
                                reason = "OK - Log Created"
                            elif((ret >> 8) == 73):
                                print("Log already exists")
                                stat = 200
                                reason = "OK"
                            else:
                                print("An unkown gdp error(code {}) occurred).".format(str((ret >> 8))))
                                stat = 500
                                reason = "Server Error"

                            try:
                                gcl_name = gdp.GDP_NAME(log_name)
                                gcl_handle = gdp.GDP_GCL(gcl_name,gdp.GDP_MODE_AO)
                                gcl_handle.append({"signpost-data": body})
                                print("Append success")
                            except:
                                print("There was an error, aborting")
                                stat = 500
                                reason = "Server Error"
                    else:
                        print("Does not support that function")
                        stat = 503
                        reason = "Service Unkown"

                    #form the response here based on some of the stats above
                    send_buf = bytearray()
                    send_buf.extend(struct.pack('<H',stat))
                    send_buf.extend(struct.pack('<H',len(reason)))
                    send_buf.extend(reason)
                    send_buf.extend(struct.pack('<B',2))

                    send_buf.extend(struct.pack('<B',len("content-type")))
                    send_buf.extend("content-type")
                    send_buf.extend(struct.pack('<B',len("application/octet-stream")))
                    send_buf.extend("application/octet-stream")

                    send_buf.extend(struct.pack('<B',len("content-length")))
                    send_buf.extend("content-length")
                    send_buf.extend(struct.pack('<B',len("1")))
                    send_buf.extend("1")
                    send_buf.extend(struct.pack('<H',1))
                    send_buf.extend(struct.pack('<B',0x00))
                    self.sp.write(send_buf);
                    print("#######################################################")
                    print("")

            else:
                #this is a real http post. let's do it
                print("")
                print("#######################################################")
                print("Trying to post to {}".format(url))
                print("Post headers: {}".format(headers))
                is_ascii = False
                try:
                    if re.match('^[\x0a-\x7F]+$', body.decode('utf-8')):
                        is_ascii = True
                except UnicodeDecodeError:
                    pass
                if is_ascii:
                    # all bytes in body are printable characters
                    print("Post body: {}".format(body.decode('utf-8')))
                else:
                    print("Post body: <binary data, length {}>".format(len(body)))
                    print('  ' + ' '.join(map(lambda x: str.format('{:02x}', x), body)))
                print("")
                try:
                    conn = httplib.HTTPConnection(base)
                    conn.request("POST",end,body,headers)
                    response = conn.getresponse()
                except:
                    print("Post failed, please check your destination URL")
                    print("#######################################################")
                    print("")
                    continue


                #we should send this back, but for now that's good
                print("Post Succeeded! See response below.")
                print("Status: {}, Reason: {}".format(response.status,response.reason))
                body = response.read();
                print("Body: {}".format(body.decode('utf-8')))
                print("")
                #now format the response and send it back to the radio
                send_buf = bytearray()
                send_buf.extend(struct.pack('<H',response.status))
                send_buf.extend(struct.pack('<H',len(response.reason)))
                send_buf.extend(response.reason.encode('utf-8'))
                send_buf.extend(struct.pack('<B',len(response.getheaders())))
                for header in response.getheaders():
                    h0 = header[0].encode('utf-8')
                    h1 = header[1].encode('utf-8')
                    send_buf.extend(struct.pack('<B',len(h0)))
                    send_buf.extend(h0)
                    send_buf.extend(struct.pack('<B',len(h1)))
                    send_buf.extend(h1)
                send_buf.extend(struct.pack('<H',len(body)))
                send_buf.extend(body)
                self.sp.write(send_buf);
                print("Sending response back to radio")
                print("#######################################################")
                print("")
Esempio n. 30
0
    def __init__(self,
                 root,
                 mode=MODE_RO,
                 keyfile=None,
                 freq=100,
                 cache_size=1000,
                 size_factor=2,
                 X_factor=0.8):
        """
        Initialize the instance with the root log. By default, we open 
             log in read only mode.
          Parameters name: description
        - keyfile        : A private signing key for the log (PEM format)
        - mode           : Read-only or Read-Write mode
        - freq           : checkpoint frequency
        - cache_size     : max records to hold in in-memory cache
        - size_factor    : Change the checkpoint level if the size
                             of the new checkpoint differs by this factor
        - X_factor       : Change the checkpoint level if there is a 
                             certain amount of overlap in keys of old 
                             checkpoint and new checkpoint
        """

        self.__iomode = mode
        self.__freq = freq
        self.__cp_size_factor = size_factor
        self.__cp_X_factor = X_factor
        self.__cache_size = cache_size
        assert self.__cache_size > 0

        gdp_iomode = gdp.GDP_MODE_RO if mode == self.MODE_RO else gdp.GDP_MODE_RA

        gdp.gdp_init()  ## XXX: Not sure if this is the best idea
        # Setup the key

        open_info = {}
        if keyfile is not None:
            skey = gdp.EP_CRYPTO_KEY(filename=keyfile,
                                     keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                                     flags=gdp.EP_CRYPTO_F_SECRET)
            open_info['skey'] = skey

        self.__root = gdp.GDP_NAME(root)
        self.__root_handle = gdp.GDP_GCL(self.__root, gdp_iomode, open_info)

        # a cache for records. recno => datum
        # datum may or may not contain a timestamp and recno
        self.__cache = OrderedDict()

        # find the number of records by querying the most recent record
        try:
            datum = self.__root_handle.read(-1)
            self.__num_records = datum["recno"]
            self.__cache[self.__num_records] = datum
        except (gdp.MISC.EP_STAT_SEV_ERROR, gdp.MISC.EP_STAT_SEV_WARN) as e:
            if "Berkeley:Swarm-GDP:404" in e.msg:
                self.__num_records = 0
            else:
                raise e

        # set up lock for adding new data to the log
        # >> we want the __setitems__ to be atomic, because that also
        #    includes the checkpointing logic
        self.log_lock = threading.Lock()  # unused in MODE_RO

        # for a read-only KVstore, make sure we have a subscription
        #   in a separate thread to keep things most up to date
        if self.__iomode == self.MODE_RO:
            t = threading.Thread(target=self.__subscription_thread)
            t.daemon = True
            t.start()