def test_cancel_replay(self): #mocks self.mock_rr_find_assoc.return_value = [1, 2, 3] replay = Replay() replay.process_id = '1' self.mock_rr_read.return_value = replay #execution self.data_retriever_service.cancel_replay('replay_id') #assertions self.assertEquals(self.mock_rr_delete_assoc.call_count, 3) self.mock_rr_delete.assert_called_with('replay_id') self.mock_pd_cancel.assert_called_with('1')
def test_cancel_replay(self): #mocks self.mock_rr_find_assoc.return_value = [1,2,3] replay = Replay() replay.process_id = '1' self.mock_rr_read.return_value = replay #execution self.data_retriever_service.cancel_replay('replay_id') #assertions self.assertEquals(self.mock_rr_delete_assoc.call_count,3) self.mock_rr_delete.assert_called_with('replay_id') self.mock_pd_cancel.assert_called_with('1')
def replay_data_process(self, dataset_id, query, delivery_format, replay_stream_id): dataset = self.clients.dataset_management.read_dataset( dataset_id=dataset_id) datastore_name = dataset.datastore_name delivery_format = delivery_format or {} view_name = dataset.view_name key_id = dataset.primary_view_key # Make a new definition container replay = Replay() replay.delivery_format = delivery_format replay.process_id = 'null' replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = { 'process': { 'query': query, 'datastore_name': datastore_name, 'dataset_id': dataset_id, 'view_name': view_name, 'key_id': key_id, 'delivery_format': delivery_format, 'publish_streams': { 'output': replay_stream_id } } } replay.config = config return replay
def replay_data_process(self, dataset_id, query, delivery_format, replay_stream_id): dataset = self.clients.dataset_management.read_dataset( dataset_id=dataset_id) delivery_format = delivery_format or {} replay = Replay() replay.delivery_format = delivery_format replay.process_id = 'null' replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = { 'process': { 'query': query, 'dataset_id': dataset_id, 'delivery_format': delivery_format, 'publish_streams': { 'output': replay_stream_id } } } replay.config = config return replay
def replay_data_process(self, dataset_id, query, delivery_format, replay_stream_id): dataset = self.clients.dataset_management.read_dataset(dataset_id=dataset_id) datastore_name = dataset.datastore_name delivery_format = delivery_format or {} view_name = dataset.view_name key_id = dataset.primary_view_key # Make a new definition container replay = Replay() replay.delivery_format = delivery_format replay.process_id = 'null' replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = {'process':{ 'query':query, 'datastore_name':datastore_name, 'dataset_id':dataset_id, 'view_name':view_name, 'key_id':key_id, 'delivery_format':delivery_format, 'publish_streams':{'output':replay_stream_id} } } replay.config = config return replay
def replay_data_process(self, dataset_id, query, delivery_format, replay_stream_id): dataset = self.clients.dataset_management.read_dataset(dataset_id=dataset_id) datastore_name = dataset.datastore_name delivery_format = delivery_format or {} view_name = dataset.view_name key_id = dataset.primary_view_key # Make a new definition container replay = Replay() replay.delivery_format = delivery_format replay.process_id = "null" replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = { "process": { "query": query, "datastore_name": datastore_name, "dataset_id": dataset_id, "view_name": view_name, "key_id": key_id, "delivery_format": delivery_format, "publish_streams": {"output": replay_stream_id}, } } replay.config = config return replay
def define_replay(self, dataset_id='', query=None, delivery_format=None): ''' Define the stream that will contain the data from data store by streaming to an exchange name. query: start_time: 0 The beginning timestamp end_time: N The ending timestamp parameters: [] The list of parameters which match the coverages parameters ''' if not dataset_id: raise BadRequest('(Data Retriever Service %s): No dataset provided.' % self.name) if self.process_definition_id is None: res, _ = self.clients.resource_registry.find_resources(restype=RT.ProcessDefinition,name='data_replay_process',id_only=True) if not len(res): raise BadRequest('No replay process defined.') self.process_definition_id = res[0] dataset = self.clients.dataset_management.read_dataset(dataset_id=dataset_id) datastore_name = dataset.datastore_name delivery_format = delivery_format or {} view_name = dataset.view_name key_id = dataset.primary_view_key # Make a new definition container replay_stream_id = self.clients.pubsub_management.create_stream() replay = Replay() replay.delivery_format = delivery_format replay.process_id = 'null' replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = {'process':{ 'query':query, 'datastore_name':datastore_name, 'dataset_id':dataset_id, 'view_name':view_name, 'key_id':key_id, 'delivery_format':delivery_format, 'publish_streams':{'output':replay_stream_id} } } pid = self.clients.process_dispatcher.schedule_process( process_definition_id=self.process_definition_id, configuration=config ) replay.process_id = pid self.clients.resource_registry.update(replay) self.clients.resource_registry.create_association(replay_id, PRED.hasStream, replay_stream_id) return replay_id, replay_stream_id
def define_replay(self, dataset_id='', query=None, delivery_format=None): ''' Define the stream that will contain the data from data store by streaming to an exchange name. ''' # Get the datastore name from the dataset object, use dm_datastore by default. if not dataset_id: raise BadRequest('(Data Retriever Service %s): No dataset provided.' % self.name) dataset = self.clients.dataset_management.read_dataset(dataset_id=dataset_id) datastore_name = dataset.datastore_name delivery_format = delivery_format or {} view_name = dataset.view_name key_id = dataset.primary_view_key # Make a new definition container definition_container = ctd_stream_definition() # Tell pubsub about our definition that we want to use and setup the association so clients can figure out # What belongs on the stream definition_id = self.clients.pubsub_management.create_stream_definition(container=definition_container) # Make a stream replay_stream_id = self.clients.pubsub_management.create_stream(stream_definition_id=definition_id) replay = Replay() replay.delivery_format = delivery_format #----------------------------- #@todo: Add in CEI integration #----------------------------- replay.process_id = 0 replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = {'process':{ 'query':query, 'datastore_name':datastore_name, 'view_name':view_name, 'key_id':key_id, 'delivery_format':dict({'container':definition_container}, **delivery_format), 'publish_streams':{'output':replay_stream_id} } } pid = self.clients.process_dispatcher.schedule_process( process_definition_id=self.process_definition_id, configuration=config ) replay.process_id = pid self.clients.resource_registry.update(replay) self.clients.resource_registry.create_association(replay_id, PRED.hasStream, replay_stream_id) return (replay_id, replay_stream_id)
def replay_data_process(self, dataset_id, query, delivery_format, replay_stream_id): dataset = self.clients.dataset_management.read_dataset(dataset_id=dataset_id) delivery_format = delivery_format or {} replay = Replay() replay.delivery_format = delivery_format replay.process_id = 'null' replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = {'process':{ 'query':query, 'dataset_id':dataset_id, 'delivery_format':delivery_format, 'publish_streams':{'output':replay_stream_id} } } replay.config = config return replay
def define_replay(self, dataset_id='', query=None, delivery_format=None): ''' Define the stream that will contain the data from data store by streaming to an exchange name. ''' # Get the datastore name from the dataset object, use dm_datastore by default. """ delivery_format - fields """ if not dataset_id: raise BadRequest( '(Data Retriever Service %s): No dataset provided.' % self.name) if self.process_definition_id is None: self.process_definition = ProcessDefinition( name='data_replay_process', description='Process for the replay of datasets') self.process_definition.executable[ 'module'] = 'ion.processes.data.replay_process' self.process_definition.executable['class'] = 'ReplayProcess' self.process_definition_id = self.clients.process_dispatcher.create_process_definition( process_definition=self.process_definition) dataset = self.clients.dataset_management.read_dataset( dataset_id=dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore( datastore_name) delivery_format = delivery_format or {} view_name = dataset.view_name key_id = dataset.primary_view_key # Make a new definition container # Make a definition try: definition = datastore.query_view( 'datasets/dataset_by_id', opts={ 'key': [dataset.primary_view_key, 0], 'include_docs': True })[0]['doc'] except IndexError: raise NotFound('The requested document was not located.') definition_container = definition # Tell pubsub about our definition that we want to use and setup the association so clients can figure out # What belongs on the stream definition_id = self.clients.pubsub_management.create_stream_definition( container=definition_container) # Make a stream replay_stream_id = self.clients.pubsub_management.create_stream( stream_definition_id=definition_id) replay = Replay() replay.delivery_format = delivery_format definition_container.stream_resource_id = replay_stream_id replay.process_id = 0 replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = { 'process': { 'query': query, 'datastore_name': datastore_name, 'view_name': view_name, 'key_id': key_id, 'delivery_format': dict({'definition_id': definition_id}, **delivery_format), 'publish_streams': { 'output': replay_stream_id } } } pid = self.clients.process_dispatcher.schedule_process( process_definition_id=self.process_definition_id, configuration=config) replay.process_id = pid self.clients.resource_registry.update(replay) self.clients.resource_registry.create_association( replay_id, PRED.hasStream, replay_stream_id) return (replay_id, replay_stream_id)
def define_replay(self, dataset_id='', query=None, delivery_format=None): ''' Define the stream that will contain the data from data store by streaming to an exchange name. ''' # Get the datastore name from the dataset object, use dm_datastore by default. """ delivery_format - fields """ if not dataset_id: raise BadRequest('(Data Retriever Service %s): No dataset provided.' % self.name) if self.process_definition_id is None: self.process_definition = ProcessDefinition(name='data_replay_process', description='Process for the replay of datasets') self.process_definition.executable['module']='ion.processes.data.replay_process' self.process_definition.executable['class'] = 'ReplayProcess' self.process_definition_id = self.clients.process_dispatcher.create_process_definition(process_definition=self.process_definition) dataset = self.clients.dataset_management.read_dataset(dataset_id=dataset_id) datastore_name = dataset.datastore_name datastore = self.container.datastore_manager.get_datastore(datastore_name) delivery_format = delivery_format or {} view_name = dataset.view_name key_id = dataset.primary_view_key # Make a new definition container # Make a definition try: definition = datastore.query_view('datasets/dataset_by_id',opts={'key':[dataset.primary_view_key,0],'include_docs':True})[0]['doc'] except IndexError: raise NotFound('The requested document was not located.') definition_container = definition # Tell pubsub about our definition that we want to use and setup the association so clients can figure out # What belongs on the stream definition_id = self.clients.pubsub_management.create_stream_definition(container=definition_container) # Make a stream replay_stream_id = self.clients.pubsub_management.create_stream(stream_definition_id=definition_id) replay = Replay() replay.delivery_format = delivery_format definition_container.stream_resource_id = replay_stream_id replay.process_id = 0 replay_id, rev = self.clients.resource_registry.create(replay) replay._id = replay_id replay._rev = rev config = {'process':{ 'query':query, 'datastore_name':datastore_name, 'view_name':view_name, 'key_id':key_id, 'delivery_format':dict({'definition_id':definition_id}, **delivery_format), 'publish_streams':{'output':replay_stream_id} } } pid = self.clients.process_dispatcher.schedule_process( process_definition_id=self.process_definition_id, configuration=config ) replay.process_id = pid self.clients.resource_registry.update(replay) self.clients.resource_registry.create_association(replay_id, PRED.hasStream, replay_stream_id) return (replay_id, replay_stream_id)