def dump(self): """ @brief Prototype dumping a taxonomy as yaml for instance for an instrument agent to store locally. """ #@todo - need to serialize sets to yaml??? d = ion_serializer.serialize(self._t) return yaml.dump(d)
def process_stream(self, packet, dset_config): """ Accepts a stream. Also accepts instruction (a dset_config). According to the received dset_config it processes the stream such as store in hfd_storage, couch_storage. @param: packet The incoming data stream of type stream. @param: dset_config The dset_config telling this method what to do with the incoming data stream. """ ingestion_attributes={'variables':[], 'number_of_records':-1,'updated_metadata':False, 'updated_data':False} if dset_config is None: log.info('No dataset config for this stream!') return # Get back to the serialized form - the process receives only the IonObject after the interceptor stack has decoded it... simple_dict = ion_serializer.serialize(packet) #packet is an ion_object byte_string = msgpack.packb(simple_dict, default=encode_ion) encoding_type = 'ion_msgpack' # Persisted sha1 is crafted from the byte string msgpack creates calculated_sha1 = hashlib.sha1(byte_string).hexdigest().upper() dataset_granule = { 'stream_id' : dset_config.stream_id, 'dataset_id' : dset_config.dataset_id, 'persisted_sha1' : calculated_sha1, 'encoding_type' : encoding_type, 'ts_create' : get_ion_ts() } self.persist_immutable(dataset_granule) filename = FileSystem.get_hierarchical_url(FS.CACHE, calculated_sha1, ".%s" % encoding_type) with open(filename, mode='wb') as f: f.write(byte_string) f.close() return ingestion_attributes