Пример #1
0
def main(logname, inputDevicePrefix=None):

    gdp.gdp_init()
    lh = gdp.GDP_GCL(gdp.GDP_NAME(logname), gdp.GDP_MODE_RA)

    # Okay, get the parameters
    try:
        # try reading from the first record
        firstRecord = lh.read(1)
        audioParams = json.loads(firstRecord['data'])
        mic_id, max_params = getDeviceParams(inputDevicePrefix)
        assert audioParams['samplingRate'] <= max_params['samplingRate']
        assert audioParams['channels'] <= max_params['channels']

    except gdp.MISC.EP_STAT_SEV_ERROR as e:
        # in case first record does not exist, let's write it
        if e.msg.startswith("ERROR: 404 ") or \
                        e.msg.startswith('ERROR: 4.04 '):
            mic_id, audioParams = getDeviceParams(inputDevicePrefix)
            lh.append( {"data": json.dumps(audioParams)} )
        else:
            # this is some other error, let's just raise it as it is
            raise e

    # start recording
    recordToLog(lh, sampleWidth=audioParams['sampleWidth'],
                    channels=audioParams['channels'],
                    samplingRate=audioParams['samplingRate'],
                    samplesPerRecord=audioParams['samplesPerRecord'],
                    device_id=mic_id)
Пример #2
0
def gcl_subscription_init(config_file):
    gdp.gdp_init()
    gcl_input = write_config("inputs.txt")[0]
    print "gcl: [%r]" % gcl_input
    gcl_name = gdp.GDP_NAME(gcl_input)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    return gcl_handle
Пример #3
0
def gcl_subscription_init():
    gdp.gdp_init()
    gcl_input = "ph.edu.upd.pcari.jasper.data"
    print "gcl: [%r]" % gcl_input
    gcl_name = gdp.GDP_NAME(gcl_input)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    return gcl_handle
Пример #4
0
def gcl_append_init(gcl_input, pem_input):
    gdp.gdp_init()
    gcl_name = gdp.GDP_NAME(gcl_input)
    skey = gdp.EP_CRYPTO_KEY(filename=pem_input,
                             keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                             flags=gdp.EP_CRYPTO_F_SECRET)

    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA, {"skey": skey})
    return gcl_handle
Пример #5
0
def gcl_append_init():
    gdp.gdp_init()
    gcl_input = 'ph.edu.upd.pcari.jasper.data'
    pem_input = '_data.pem'

    gcl_name = gdp.GDP_NAME(gcl_input)
    skey = gdp.EP_CRYPTO_KEY(filename=pem_input,
           keyform=gdp.EP_CRYPTO_KEYFORM_PEM, flags=gdp.EP_CRYPTO_F_SECRET)

    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA, {"skey":skey})
    return gcl_handle
Пример #6
0
def gdp_thread():
    gdp.gdp_init()
    print("GDP: connected.")
    gcl_name = gdp.GDP_NAME(args.gdp_log)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)
    print("GDP: got log handle")
    while True:
        im_fname = gdp_image_queue.get()
        im = Image.open(im_fname)
        data = {"data": im.tostring()}
        gcl_handle.append(data)
        print("GDP: posted " + im_fname)
Пример #7
0
def gdp_source():

    rospy.init_node("gdp_source")
    gdp.gdp_init()
    lock = Lock()
    args = _parse_args()

    topic_dict = {}

    gcl_name = gdp.GDP_NAME(args.logname)
    loghandle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
    loghandle.subscribe(0, 0, None)

    try:
        buf = ""
        while not rospy.is_shutdown():

            event = loghandle.get_next_event(None)
            data = event["datum"]["data"]
            d = pickle.loads(data)
            if args.preserve:
                topic = d["topic"]
            else:
                topic = "/gdp" + rospy.names.resolve_name(d["topic"])
            topic_type = d["topic_type"]

            try:
                assert len(buf) == d["offset"]
            except AssertionError:
                ## This is when we start at the wrong time, and some
                ## chunks of a message already have been missed.
                continue

            buf = buf + d["data"]

            if len(buf) == d["msg_size"]:
                with lock:  ## get publisher, create if doesn't exist
                    pub = topic_dict.get(topic, None)
                    if pub is None:
                        msg_class = roslib.message.get_message_class(
                            topic_type)
                        pub = rospy.Publisher(topic, msg_class, queue_size=10)
                        topic_dict[topic] = pub

                print "Publishing message"
                pub.publish(pickle.loads(zlib.decompress(buf)))
                buf = ""

    except rospy.ROSInterruptException:
        pass

    del loghandle
Пример #8
0
def gcl_append_init(config_file):
    gdp.gdp_init()
    gcl_input, pem_input = write_config(config_file)
    print "gcl: [%r]" % gcl_input
    print "pem: [%r]" % pem_input

    gcl_name = gdp.GDP_NAME(gcl_input)
    skey = gdp.EP_CRYPTO_KEY(filename=pem_input,
                             keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                             flags=gdp.EP_CRYPTO_F_SECRET)

    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA, {"skey": skey})
    return gcl_handle
Пример #9
0
def gdp_sink():

    rospy.init_node("gdp_sink")
    gdp.gdp_init()
    lock = Lock()
    args = _parse_args()

    gcl_name = gdp.GDP_NAME(args.logname)
    lh = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

    for t in args.topic:
        rospy.Subscriber(t, AnyMsg, callback, callback_args=(t, lh, lock))

    rospy.spin()
Пример #10
0
def main(logname):

    gdp.gdp_init()
    lh = gdp.GDP_GCL(gdp.GDP_NAME(logname), gdp.GDP_MODE_RO)

    firstRecord = lh.read(1)
    audioParams = json.loads(firstRecord['data'])
    global sampleWidth
    global samplingRate
    global channels
    sampleWidth = audioParams['sampleWidth']
    samplingRate = audioParams['samplingRate']
    channels = audioParams['channels']

    playAudio(lh)
Пример #11
0
  def run(self):

    gdp.gdp_init('localhost')
    t1 = Thread(target = store_reported_location, args = ('/home/tkn/Desktop/standardized_location_service/performance_benchmark.txt', 
      ['lemic.localization.sli.report_location_1', 'lemic.localization.sli.report_location_2'],)) 
    
    t1.start()


    time.sleep(0.1)
    logName = 'lemic.localization.sli.request_location_1'
    parameters = {}
    parameters['location_type'] = 'local'
    parameters['dimensionality'] = '2D'
    parameters['accuracy'] = 1.0
    parameters['period'] = 2.0
    parameters['provisioning_type'] = 'periodic'
    parameters['step'] = 0.0
    parameters['duration'] = 150.0
    parameters['movement'] = 'no'
    # Evaluation purpose only, can be removed later
    parameters['timestamp_start'] = time.time()
    print logName + str(': ') + request_location(parameters, logName)
    time.sleep(0.1)


    logName = 'lemic.localization.sli.request_location_2'
    parameters = {}
    parameters['location_type'] = 'semantic'
    parameters['dimensionality'] = '2D'
    parameters['accuracy'] = 0.8
    parameters['period'] = 1.5
    parameters['provisioning_type'] = 'periodic'
    parameters['step'] = 0.0
    parameters['duration'] = 150.0
    parameters['movement'] = 'no'
    # Evaluation purpose only, can be removed later
    parameters['timestamp_start'] = time.time()
    print logName + str(': ') +  request_location(parameters, logName)
    time.sleep(0.01)

    while True:
      pass
    def __init__(self,
                 list_addr_dict,
                 addr_gdprouter='gdp-01.eecs.berkeley.edu:8007',
                 b_print=False):
        '''Manages subscription to select Signpost logs within the GDP.
        
           list_addr_dict: list of dictionaries of the following form
                   {'id_signpost':, 'id_sensor':, 'addr':}'''

        print('GDPDataProcessor: Initializing GDP Data Processor')
        super(GDPDataProcessor, self).__init__()
        gdp.gdp_init(addr_gdprouter)
        self.list_addr_dict = list_addr_dict
        self.initialize_gcl_handles()
        self.initialize_signposts()
        self._stop_event = threading.Event()
        self._stop_event.clear()  # Unnecessary
        self.b_print = b_print
        self.start()
Пример #13
0
    def __init__(self, logname, limit=10000):
        """
        Initialize with just the log name, and optionally cache size
        
        limit is the number of records to keep in the cache. This is a soft
        limit, which means that we will go over the limit on various
        occasions, but we will try to be within a certain factor of the
        specified limit (by default, 2). This enables us to minimize the
        cleanup overhead.
        """

        gdp.gdp_init()  # No side-effects of calling this multiple times
        # gdp.dbg_set("*=20")
        self.logname = logname
        self.lh = gdp.GDP_GIN(gdp.GDP_NAME(logname), gdp.GDP_MODE_RO)
        self.limit = limit
        self.cache = {}  # recno => record cache   (limited size)
        self.atime = {}  # recno => time of access (same size as cache)

        ## populate the limits
        self.leastRecent()
        self.mostRecent()
Пример #14
0
def main(provisioning_service_id):

    print 'Creating the Register service log...'
    string = 'lemic.localization.esi.register_service'
    print subprocess.call([
        './../gdp/apps/gcl-create', '-k', 'none', '-G', 'localhost',
        'test.localization', string
    ])
    print 'The Register service log most probably already exists, but no worries, ignore the previous error!'

    # Check if a service with that ID already exists!
    gdp.gdp_init('localhost')
    gcl_name = gdp.GDP_NAME(string)
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)

    # Read the whole register service log
    recno = 1
    services = []
    while True:
        try:
            datum = gcl_handle.read(recno)
            services.append(json.loads(datum['data'])['service_id'])
            recno += 1
        except:
            break

    if provisioning_service_id not in services:
        # Write an entry in the log
        logName = 'lemic.localization.esi.register_service'
        gcl_name = gdp.GDP_NAME(logName)
        gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

        data = json.dumps({'service_id': provisioning_service_id})
        gcl_handle.append({'data': data})
        print 'Provisioning service ' + str(
            provisioning_service_id
        ) + ' successfully registered for provisioning.'
    else:
        print 'Log ID has to be unique! Be creative!'
    def run(self):

        request_q = Queue()
        memorized_requests_q = Queue()
        offering_q = Queue()
        mapper_q = Queue()
        location_q = Queue()
        cashed_locations_q = Queue()
        requests = {}
        cashed_locations = []
        provisioning_features = {}
        memory = []
        location = []
        ils_id = '1'
        gdp.gdp_init('localhost')

        # Get a list of registered provisioning services
        logName = 'lemic.localization.esi.register_service'
        gcl_name = gdp.GDP_NAME(logName)
        gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)

        # Read the whole register service log
        recno = 1
        services = []
        while True:
            try:
                datum = gcl_handle.read(recno)
                services.append(json.loads(datum['data'])['service_id'])
                recno += 1
            except:
                break

        # Subscribe for capturing requests
        logs = [
            'lemic.localization.sli.request_location_1',
            'lemic.localization.sli.request_location_2'
        ]
        obj_name_mapping = {}

        for name_str in logs:

            gcl_name = gdp.GDP_NAME(name_str)
            gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
            obj_name_mapping[gcl_handle] = name_str
            gcl_handle.subscribe(0, 0, None)

        # Discover provisioning features
        # Subscribe to all service offering logs.
        for service_id in services:  # [services]
            logName = 'lemic.localization.esi.service_offering_' + str(
                service_id)
            gcl_name = gdp.GDP_NAME(logName)
            gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
            obj_name_mapping[gcl_handle] = logName
            gcl_handle.subscribe(0, 0, None)

        # Subscribe to all service offering logs.
        for service_id in services:  # [services]
            logName = 'lemic.localization.esi.report_location_' + str(
                service_id)
            gcl_name = gdp.GDP_NAME(logName)
            gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
            obj_name_mapping[gcl_handle] = logName
            gcl_handle.subscribe(0, 0, None)

        while True:
            """
      Implementation of the integrated location service.
      """

            t1 = Thread(target=read_logs,
                        args=(obj_name_mapping, ils_id, cashed_locations,
                              request_q, offering_q, location_q,
                              memorized_requests_q))
            t2 = Thread(target=request_discovery, args=(
                ils_id,
                services,
            ))
            t1.start()
            t2.start()

            if len(provisioning_features) > 0 and len(requests) > 0:
                # Make a selection decision
                t3 = Thread(target=query_for_location,
                            args=(
                                requests,
                                provisioning_features,
                                ils_id,
                                mapper_q,
                            ))
                t3.start()

            if len(location) > 0:
                t4 = Thread(target=merge_and_report_locations,
                            args=(
                                location,
                                memory,
                                cashed_locations,
                                cashed_locations_q,
                            ))
                t4.start()

            newtime = time.time()
            requests = request_q.get()
            # for i in requests:
            #  requests[i]['request_time'] = newtime - requests[i]['request_time']
            provisioning_features = offering_q.get()

            try:
                tmp_mem = mapper_q.get_nowait()
                if tmp_mem:
                    memory.append(tmp_mem)
            except:
                pass

            try:
                location = location_q.get_nowait()
            except:
                pass

            try:
                cashed_locations = cashed_locations_q.get_nowait()
            except:
                pass
Пример #16
0
 def __init__(self, router_host, router_port):
     if router_host:
         gdp.gdp_init(router_host, router_port)
     else:
         gdp.gdp_init()
Пример #17
0
def setup_client(info_log,
                 signing_key_file,
                 capabilities,
                 permissions,
                 insecure=False,
                 router=None,
                 num_clients=1,
                 ca_key=None,
                 ca_crt=None):
    """
    Performs the following:
    - Creates num_clients number of private keys for clients and writes them
      to the current directory
    - Writes capabilities and permissions to the info_log
    - Writes a public key for each client to the info_log
    - Writes a certificate signed by a CA to the info_log if a CA key and CA
      certificate is provided

    Parameters:
    info_log - 43 byte name of the empty log to which the client(s) information
               will be written
    signing_key_file - signing key file in pem format for the info_log
    capabilities - list of capabilities of the client(s)
    permissions - list of the permissions of the clinet(s)
    insecure - if False, key pairs for each client will be produced and gdp discovery
               services will perform authentication wit hthe client(s). Otherwise
               they will be authenticated.
    router - address and port of gdp router in "<address>:<port>" format
    num_clients - number of key pairs to be produced
    ca_key and ca_crt - used to sign the info_log's certificate. If these are not
                        provided, the info_log will not be certified.
    """
    # Initialize GDP
    if router:
        router = router.split(":")
        host = router[0]
        port = int(router[1])
        gdp.gdp_init(host, port)
    else:
        gdp.gdp_init()

    # Open the info_log
    info_log_name = info_log
    skey = gdp.EP_CRYPTO_KEY(filename=signing_key_file,
                             keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                             flags=gdp.EP_CRYPTO_F_SECRET)
    gcl_name = gdp.GDP_NAME(info_log_name)
    gcl_handle = gdp.GDP_GCL(gcl_name,
                             gdp.GDP_MODE_AO,
                             open_info={'skey': skey})

    # Write capabilities and permissions to info_log

    datum = {"data": str(len(capabilities))}
    gcl_handle.append(datum)  # Write number of capabilities
    datum = {"data": str(len(permissions))}
    gcl_handle.append(datum)  # Write number of permissions
    datum = {"data": str(num_clients)}
    gcl_handle.append(datum)  # Write number of public keys
    for capability in capabilities:  # Write capabilities
        datum = {"data": capability}
        gcl_handle.append(datum)
    for permission in permissions:  # Write permissions
        datum = {"data": permission}
        gcl_handle.append(datum)

    # Create key(s)
    if not insecure:
        random_generator = Random.new()
        for i in range(num_clients):
            key = RSA.generate(1024, random_generator.read)
            # Write key to file
            keyfile = open("key" + str(i) + ".pem", 'w')
            keyfile.write(key.exportKey(format='PEM'))
            datum = {"data": key.publickey().exportKey(format='PEM')}
            gcl_handle.append(datum)

    # Create a certificate and write it to the info log
    if ca_crt and ca_key:
        # load CA cert and key
        ca_cert = crypto.load_certificate(crypto.FILETYPE_PEM,
                                          open(ca_crt).read())
        ca_key = crypto.load_privatekey(crypto.FILETYPE_PEM,
                                        open(ca_key).read())

        # create info_log key
        info_log_key = crypto.PKey()
        info_log_key.generate_key(crypto.TYPE_RSA, 2048)

        # create CSR
        req = crypto.X509Req()
        req.get_subject().CN = info_log_name
        req.set_pubkey(info_log_key)
        req.sign(info_log_key, 'sha256')

        # create unique serial number using info_log name
        md5_hash = hashlib.md5()
        md5_hash.update(info_log_name)
        serial = int(md5_hash.hexdigest(), 36)

        # create certificate
        cert = crypto.X509()
        cert.set_serial_number(serial)
        cert.gmtime_adj_notBefore(0)
        cert.gmtime_adj_notAfter(365 * 4 * 60 * 60 *
                                 1000)  # make certificate last 1000 yrs
        cert.set_issuer(ca_cert.get_subject())
        cert.set_subject(req.get_subject())
        cert.set_pubkey(req.get_pubkey())
        cert.sign(ca_key, 'sha256')

        # write certificate to info_log
        datum = {"data": crypto.dump_certificate(crypto.FILETYPE_PEM, cert)}
        gcl_handle.append(datum)
Пример #18
0
def main(name_str, start, stop):

    # create a python object
    _name = gdp.GDP_NAME(name_str)
    print _name.printable_name()

    # Assume that the GCL already exists
    gin_handle = gdp.GDP_GIN(_name, gdp.GDP_MODE_RO)

    # initialize this to the first record number
    recno = start
    while recno <= stop:
        try:
            datum = gin_handle.read_by_recno(recno)
            print datum["buf"].peek()
            recno += 1
        except Exception as e:
            # Typically, end of log.
            raise e


if __name__ == "__main__":

    if len(sys.argv) < 4:
        print "Usage: %s <gcl-name> <start-rec> <stop-rec>" % sys.argv[0]
        sys.exit(1)

    # Change this to point to a gdp_router
    gdp.gdp_init()
    main(sys.argv[1], int(sys.argv[2]), int(sys.argv[3]))
Пример #19
0
def main(name_str, keyfile):

    skey = gdp.EP_CRYPTO_KEY(filename=keyfile,
                                keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                                flags=gdp.EP_CRYPTO_F_SECRET)

    # Create a GDP_NAME object from a python string provided as argument
    gcl_name = gdp.GDP_NAME(name_str)

    # There's a GCL with the given name, so let's open it
    gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_AO,
                                open_info={'skey':skey})

    while True:

        line = sys.stdin.readline().strip()  # read from stdin
        # Create a minimalist datum dictionary
        datum = {"data": line}
        gcl_handle.append(datum)           # Write this datum to the GCL


if __name__ == "__main__":

    if len(sys.argv) < 3:
        print "Usage: %s <gcl_name> <signing-key-file>" % sys.argv[0]
        sys.exit(1)

    # Change this to point to a gdp_router
    gdp.gdp_init()
    main(sys.argv[1], sys.argv[2])
    def run(self):

        gdp.gdp_init('localhost')
        logs = [
            'lemic.localization.esi.service_discovery_' +
            str(self.provisioning_service_id),
            'lemic.localization.esi.request_service_' +
            str(self.provisioning_service_id),
            'lemic.localization.esi.request_location_' +
            str(self.provisioning_service_id)
        ]

        obj_name_mapping = {}
        for name_str in logs:

            gcl_name = gdp.GDP_NAME(name_str)
            gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RO)
            obj_name_mapping[gcl_handle] = name_str
            gcl_handle.subscribe(0, 0, None)

        while True:

            event = gdp.GDP_GCL.get_next_event(None)
            datum = event["datum"]
            gcl_name = obj_name_mapping[event["gcl_handle"]]

            # React on discover service -> offer service
            if gcl_name == logs[0]:

                ils_id = json.loads(datum['data'])['ils_id']
                print 'Got request'
                logName = 'lemic.localization.esi.service_offering_' + str(
                    self.provisioning_service_id)
                gcl_name = gdp.GDP_NAME(logName)
                gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

                accuracy, latency, power_consumption, provisioning = generate_offering_static(
                )
                data = json.dumps({
                    'ils_id': ils_id,
                    'accuracy': accuracy,
                    'latency': latency,
                    'power_consumption': power_consumption,
                    'elements': provisioning_service_id
                })
                gcl_handle.append({'data': data})

            # React on request service -> report granted duration
            if gcl_name == logs[1]:

                logName = 'lemic.localization.esi.report_service_' + str(
                    self.provisioning_service_id)
                gcl_name = gdp.GDP_NAME(logName)
                gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

                duration = provisioning_duration_policy()

                data = json.dumps({'duration': duration})
                gcl_handle.append({'data': data})

            # React on request location -> read last resource -> generate location -> report location
            if gcl_name == logs[2]:

                ils_id = json.loads(datum['data'])['ils_id']
                timestamp = json.loads(datum['data'])['timestamp']
                print 'Got location request..'
                # Read the last WiFi scan
                wifi_scan = get_resource()

                try:
                    location = fa.getPositionEstimateEuclidean(
                        wifi_scan[0], self.training_path)
                except:
                    location = None

                logName = 'lemic.localization.esi.report_location_' + str(
                    self.provisioning_service_id)
                gcl_name = gdp.GDP_NAME(logName)
                gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)

                data = json.dumps({
                    'location': location,
                    'ils_id': ils_id,
                    'timestamp': timestamp
                })
                gcl_handle.append({'data': data})
Пример #21
0
    def __init__(self,
                 root,
                 mode=MODE_RO,
                 keyfile=None,
                 freq=100,
                 cache_size=1000,
                 size_factor=2,
                 X_factor=0.8):
        """
        Initialize the instance with the root log. By default, we open 
             log in read only mode.
          Parameters name: description
        - keyfile        : A private signing key for the log (PEM format)
        - mode           : Read-only or Read-Write mode
        - freq           : checkpoint frequency
        - cache_size     : max records to hold in in-memory cache
        - size_factor    : Change the checkpoint level if the size
                             of the new checkpoint differs by this factor
        - X_factor       : Change the checkpoint level if there is a 
                             certain amount of overlap in keys of old 
                             checkpoint and new checkpoint
        """

        self.__iomode = mode
        self.__freq = freq
        self.__cp_size_factor = size_factor
        self.__cp_X_factor = X_factor
        self.__cache_size = cache_size
        assert self.__cache_size > 0

        gdp_iomode = gdp.GDP_MODE_RO if mode == self.MODE_RO else gdp.GDP_MODE_RA

        gdp.gdp_init()  ## XXX: Not sure if this is the best idea
        # Setup the key

        open_info = {}
        if keyfile is not None:
            skey = gdp.EP_CRYPTO_KEY(filename=keyfile,
                                     keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                                     flags=gdp.EP_CRYPTO_F_SECRET)
            open_info['skey'] = skey

        self.__root = gdp.GDP_NAME(root)
        self.__root_handle = gdp.GDP_GCL(self.__root, gdp_iomode, open_info)

        # a cache for records. recno => datum
        # datum may or may not contain a timestamp and recno
        self.__cache = OrderedDict()

        # find the number of records by querying the most recent record
        try:
            datum = self.__root_handle.read(-1)
            self.__num_records = datum["recno"]
            self.__cache[self.__num_records] = datum
        except (gdp.MISC.EP_STAT_SEV_ERROR, gdp.MISC.EP_STAT_SEV_WARN) as e:
            if "Berkeley:Swarm-GDP:404" in e.msg:
                self.__num_records = 0
            else:
                raise e

        # set up lock for adding new data to the log
        # >> we want the __setitems__ to be atomic, because that also
        #    includes the checkpointing logic
        self.log_lock = threading.Lock()  # unused in MODE_RO

        # for a read-only KVstore, make sure we have a subscription
        #   in a separate thread to keep things most up to date
        if self.__iomode == self.MODE_RO:
            t = threading.Thread(target=self.__subscription_thread)
            t.daemon = True
            t.start()
Пример #22
0
  def run(self):

    gdp.gdp_init('localhost')
    while True:
      print push_rss_to_log()
      time.sleep(0.01)
Пример #23
0
    def __init__(self, d): 
                
        #if nameStr == "":
        #    raise ValueError ("Name must be provided.")
        #if paramName == "":
        #    raise ValueError ("JSON parameter name must be provided.") 
        #if srcSink == "":
        #    raise ValueError ("Source/Sink must be provided.")
        
        self.IO = d['dir']
        if d['dir'] == 'in':
            if d['source']  == "GDP_I":
                self.IOtype = 'GDP'
                # Assume that GCL already exists and create the GCL handle
                # Log name in GDP
                # NOTE
                # NOTE
                # NOTE XXX NOTE
                # NOTE XXX TODO
                # TODO XXX TODO
                # TODO
                # TODO
                gdp.gdp_init()
                #gdp.dbg_set("*=50")
                self.gclName = gdp.GDP_NAME(d['name'])
                self.gclHandle = gdp.GDP_GCL(self.gclName, gdp.GDP_MODE_RO)

                # JSON parameter name to be used in each log record
                self.param1 = d['param_lev1']
                self.param2 = d['param_lev2']
            elif d['source']  == "Database":
                self.IOtype = 'Database'
                # Assume that GCL already exists and create the GCL handle
                # Log name in GDP
                self.gclName = d['name']
                self.gclHandle = "dont care" #gdp.GDP_GCL(self.gclName, gdp.GDP_MODE_RO)

                # JSON parameter name to be used in each log record
                self.param1 = d['param_lev1']
                self.param2 = d['param_lev2']
            elif d['source'] == "BLE_I":
                print "BLE input init."
                self.IOtype = 'BLE'
                self.UART_service_UUID = d['service_uuid']
                self.tx_char_UUID = d['tx_char_uuid']
                self.rx_char_UUID = d['rx_char_uuid']
                self.ble = Adafruit_BluefruitLE.get_provider()
                self.ble.initialize()
                self.buff = ''
            elif d['source'] == "SER_I":
                print "Serial input init"
                self.IOtype = 'SER'
                self.port = d['name']
                self.serialHandle = serial.Serial(self.port, timeout = 2)
                
                # JSON parameter name to be used in each log record
                self.param1 = d['param_lev1']
                self.param2 = d['param_lev2']
            else:
                raise ValueError("Undefined source for input: "+d['source'])
        if d['dir'] == 'out':
            if d['sink']  == "GDP_O":
                print d['name']
                self.IOtype = 'GDP'  
                # Log name in GDP
                self.gclName = gdp.GDP_NAME(d['name'])
                #self.gclHandle = gdp.GDP_GCL(self.gclName, gdp.GDP_MODE_RO)
                key = d['key']
                password = d['password']
                if key == "" or password == "":
                    raise ValueError ("Key path and password must be provided.")
                else:
                    skey = gdp.EP_CRYPTO_KEY(filename=key,
                                keyform=gdp.EP_CRYPTO_KEYFORM_PEM,
                                flags=gdp.EP_CRYPTO_F_SECRET)
                    open_info = {'skey': skey}
                    
                    # TODO Bypass password prompt
                    # Assume that GCL already exists and create the GCL handle
                    self.gclHandle = gdp.GDP_GCL(self.gclName, gdp.GDP_MODE_RA, open_info)
            elif d['sink'] == 'CONSOLE':
                self.IOtype = 'CONSOLE'
            elif d['sink'] == 'DRONE':
                self.IOtype = 'DRONE'
                self.timeout = d['timeout']
                self.url = d['name']
                print 'Drone launch trigger defined: ', self.url
                
        # Lag from the current record. Can be used to implement time series functions.
        self.lag = d['lag']

        # Normalization method for data:
        # 'none': no normalization
        # 'lin': linear normalization: mean-zeroed and divided by std
        self.norm = d['norm']
        # Normalization parameters (i.e., avg, std etc.)
        self.normParam = {}