def __init__(self, **kwargs): interface = Auth(**kwargs) self.gbdx_connection = interface.gbdx_connection self.root_url = interface.root_url self.logger = interface.logger # create and store an instance of the GBDX s3 client self.s3 = S3() # create and store an instance of the GBDX Ordering Client self.ordering = Ordering() # create and store an instance of the GBDX Catalog Client self.catalog = Catalog() # create and store an instance of the GBDX Workflow Client self.workflow = Workflow() # create and store an instance of the Idaho Client self.idaho = Idaho() self.vectors = Vectors() self.catalog_image = CatalogImage self.idaho_image = IdahoImage self.landsat_image = LandsatImage self.sentinel2 = Sentinel2 self.tms_image = TmsImage self.dem_image = DemImage self.wv03_vnir = WV03_VNIR self.wv02 = WV02 self.ge01 = GE01 self.s3_image = S3Image self.task_registry = TaskRegistry()
def __init__(self, **kwargs): interface = Auth(**kwargs) self.gbdx_connection = interface.gbdx_connection self.root_url = interface.root_url self.logger = interface.logger # create and store an instance of the GBDX s3 client self.s3 = S3() # create and store an instance of the GBDX Ordering Client self.ordering = Ordering() # create and store an instance of the GBDX Catalog Client self.catalog = Catalog() # create and store an instance of the GBDX Workflow Client self.workflow = Workflow() # create and store an instance of the Idaho Client self.idaho = Idaho() self.vectors = Vectors() self.catalog_image = CatalogImage self.idaho_image = IdahoImage self.task_registry = TaskRegistry()
def test_batch_workflows(self): """ tests all 3 endpoints for batch workflows, create, fetch, and cancel :return: """ wf = Workflow() with open(os.path.join(self.data_path, "batch_workflow.json")) as json_file: self.batch_workflow_json = json.loads(json_file.read()) # test create batch_workflow_id = wf.launch_batch_workflow(self.batch_workflow_json) # test status batch_workflow_status = wf.batch_workflow_status(batch_workflow_id) self.assertEqual(batch_workflow_id, batch_workflow_status.get("batch_workflow_id")) # test cancel batch_workflow_status = wf.batch_workflow_cancel(batch_workflow_id) workflows = batch_workflow_status.get('workflows') for workflow in workflows: self.assertTrue(workflow.get('state') in ["canceling", "canceled"])
def test_describe_tasks(): wf = Workflow(gbdx) taskinfo = wf.list_tasks() assert len(taskinfo) > 0 desc = wf.describe_task(taskinfo['tasks'][0]) assert isinstance(desc, dict) assert len(desc['description']) > 0
def test_describe_tasks(self): wf = Workflow(self.gbdx) taskinfo = wf.list_tasks() self.assertTrue(len(taskinfo) > 0) desc = wf.describe_task(taskinfo['tasks'][0]) self.assertTrue(isinstance(desc, dict)) self.assertTrue(len(desc['description']) > 0)
def test_workflow_search(self): """ test gbdx.workflow.search(lookback_h=<hours>, state=<state>, owner=<owner>) """ wf = Workflow() output = wf.search(lookback_h=12, state='all') self.assertTrue(len(output), 0)
def test_task_get_stderr(self): """ test gbdx.workflows.get_stdout(<workflow_id>,<task_id>) """ wf = Workflow() output = wf.get_stderr('4488969848362445219', '4488969848354891944') self.assertEqual('<empty>', output)
def test_task_get_stdout(self): """ test gbdx.workflows.get_stdout(<workflow_id>,<task_id>) """ wf = Workflow() output = wf.get_stdout('4488969848362445219', '4488969848354891944') self.assertTrue(len(output) > 0)
def test_workflow_get(self): """ test gbdx.workflows.get(<workflow_id>) """ wf = Workflow() output = wf.get('4488969848362445219') self.assertTrue('id' in output.keys()) self.assertTrue('owner' in output.keys()) self.assertTrue('submitted_time' in output.keys()) self.assertTrue('state' in output.keys()) self.assertTrue('callback' in output.keys()) self.assertTrue('tasks' in output.keys())
def test_workflow_events(self): wf = Workflow(self.gbdx) workflow_id = '4347109104758907277' events = wf.events(workflow_id) assert len(events) > 0 assert isinstance(events, list) for event in events: assert 'task' in event.keys() assert 'state' in event.keys() assert 'event' in event.keys() assert 'timestamp' in event.keys() assert 'when' in event.keys() assert 'note' in event.keys() assert event['state'] in ['pending','running','complete'] assert event['event'] in ['submitted','scheduled','rescheduling','started','succeeded','failed','timedout']
def __init__(self, **kwargs): host = kwargs.get('host') if kwargs.get('host') else 'geobigdata.io' self.root_url = 'https://%s' % host if (kwargs.get('username') and kwargs.get('password') and kwargs.get('client_id') and kwargs.get('client_secret')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): # Pass in a custom gbdx connection object, for testing purposes self.gbdx_connection = kwargs.get('gbdx_connection') else: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) # create a logger # for now, just log to the console. We'll replace all the 'print' statements # with at least logger.info or logger.debug statements # later, we can log to a service, file, or some other aggregator self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) console_handler = logging.StreamHandler() console_handler.setLevel(logging.ERROR) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') console_handler.setFormatter(formatter) self.logger.addHandler(console_handler) self.logger.info('Logger initialized') # create and store an instance of the GBDX s3 client self.s3 = S3(self) # create and store an instance of the GBDX Ordering Client self.ordering = Ordering(self) # create and store an instance of the GBDX Catalog Client self.catalog = Catalog(self) # create and store an instance of the GBDX Workflow Client self.workflow = Workflow(self) # create and store an instance of the Idaho Client self.idaho = Idaho(self) self.vectors = Vectors(self) self.task_registry = TaskRegistry(self)
def test_list_tasks(): wf = Workflow(gbdx) taskinfo = wf.list_tasks() assert taskinfo is not None assert 'HelloGBDX' in taskinfo['tasks']
def test_init(): wf = Workflow(gbdx) assert isinstance(wf, Workflow) assert wf.s3 is not None assert wf.gbdx_connection is not None
def test_list_tasks(self): wf = Workflow(self.gbdx) taskinfo = wf.list_tasks() self.assertTrue(taskinfo is not None) self.assertTrue('HelloGBDX' in taskinfo['tasks'])
def test_init(self): wf = Workflow(self.gbdx) self.assertTrue(isinstance(wf, Workflow)) self.assertTrue(wf.s3 is not None) self.assertTrue(wf.gbdx_connection is not None)