def __init__(self, **kwargs): self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) self.console_handler = logging.StreamHandler() self.console_handler.setLevel(logging.ERROR) self.formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') self.console_handler.setFormatter(self.formatter) self.logger.addHandler(self.console_handler) self.logger.info('Logger initialized') if 'host' in kwargs: self.root_url = 'https://%s' % kwargs.get('host') try: if (kwargs.get('username') and kwargs.get('password') and kwargs.get('client_id') and kwargs.get('client_secret')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): self.gbdx_connection = kwargs.get('gbdx_connection') elif self.gbdx_connection is None: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) except Exception as err: print(err)
def __init__(self, **kwargs): self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) self.console_handler = logging.StreamHandler() self.console_handler.setLevel(logging.ERROR) self.formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') self.console_handler.setFormatter(self.formatter) self.logger.addHandler(self.console_handler) self.logger.info('Logger initialized') if 'host' in kwargs: self.root_url = 'https://%s' % kwargs.get('host') try: if (kwargs.get('username') and kwargs.get('password') and kwargs.get('client_id') and kwargs.get('client_secret')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): self.gbdx_connection = kwargs.get('gbdx_connection') elif self.gbdx_connection is None: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) except Exception as err: print(err) if self.gbdx_connection is not None: self.gbdx_connection.mount( VIRTUAL_IPE_URL, HTTPAdapter( max_retries=5)) #status_forcelist=[500, 502, 504])) self.gbdx_futures_session = FuturesSession( session=self.gbdx_connection, max_workers=64)
def expire_token(r, *args, **kw): """ Requests a new token if 401, retries request, mainly for auth v2 migration :param r: :param args: :param kw: :return: """ if r.status_code == 401: try: # remove hooks so it doesn't get into infinite loop r.request.hooks = None # expire the token gbdx_auth.expire_token( token_to_expire=self.gbdx_connection.token, config_file=kwargs.get('config_file')) # re-init the session self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) # make original request, triggers new token request first return self.gbdx_connection.request( method=r.request.method, url=r.request.url) except Exception as e: r.request.hooks = None print( "Error expiring token from session, Reason {}".format( e))
def test_session_from_existing_ini_file_user_pass_without_client_creds( self): gbdx_auth.SAVE_TOKEN = False # prevent the test config file from getting written to inifile = 'tests/unit/data/config_ini_with_user_pass_without_client_creds.txt' gbdx = gbdx_auth.get_session(config_file=inifile) self.assertEqual('dumdumdum', gbdx.token['access_token']) gbdx_auth.SAVE_TOKEN = True
def expire_token(r, *args, **kw): """ Requests a new token if 401, retries request, mainly for auth v2 migration :param r: :param args: :param kw: :return: """ if r.status_code == 401: try: # remove hooks so it doesn't get into infinite loop r.request.hooks = None # expire the token gbdx_auth.expire_token( token_to_expire=self.gbdx_connection.token, config_file=kwargs.get('config_file')) # re-init the session self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) if HOST == 'http://host.docker.internal:3002': self.gbdx_connection = localhost(self.gbdx_connection) # make original request, triggers new token request first res = self.gbdx_connection.request(method=r.request.method, url=r.request.url) # re-add the hook to refresh in the future self.gbdx_connection.hooks['response'].append(expire_token) return res except Exception as e: r.request.hooks = None print( "Error expiring token from session, Reason {}".format( e))
def set(awscli, awscli_profile, s3cmd, s3cmd_config, environ, environ_export, print_token, duration): """Writes temporary GBDX S3 credentials to one or more of the following targets: awscli -- The Amazon Web Services Command Line Interface (https://aws.amazon.com/cli/) credentials file s3cmd -- The Command Line S3 Client (http://s3tools.org/s3cmd) configuration file environ -- Bash environment variables (only prints to the screen) By design, the GBDX credentials have a duration of at most 36000 seconds Warning: No backups of the original files are made! """ if not any((awscli, s3cmd, environ)): raise click.ClickException( "Must specify at least one of --awscli, --s3cmd or --environ.") gbdx_conn = gbdx_auth.get_session() if print_token: _s3creds.print_gbdx_token_info(gbdx_conn) _s3creds.set_temp_creds(gbdx_conn, awscli, awscli_profile, s3cmd, s3cmd_config, environ, environ_export, duration)
def test_session_from_runtime(self): # override the default location of a possible runtime file: gbdx_auth.GBDX_RUNTIME_FILE = 'tests/unit/data/runtime2.json' gbdx = gbdx_auth.get_session() gbdx_auth.GBDX_RUNTIME_FILE = '/mnt/work/gbdx_runtime.json' self.assertEqual( 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJPbmxpbmUgSldUIEJ1aWxkZXIiLCJpYXQiOjE1MjIzNDUzMDMsImV4cCI6MTU1Mzg4MTMwMywiYXVkIjoid3d3LmV4YW1wbGUuY29tIiwic3ViIjoianJvY2tldEBleGFtcGxlLmNvbSIsIkdpdmVuTmFtZSI6IkpvaG5ueSIsIlN1cm5hbWUiOiJSb2NrZXQiLCJFbWFpbCI6Impyb2NrZXRAZXhhbXBsZS5jb20iLCJSb2xlIjpbIk1hbmFnZXIiLCJQcm9qZWN0IEFkbWluaXN0cmF0b3IiXX0.iTYhRVtOLExk3q1ScRs_98lH-QBpLzgdFkhGepQOvtg', gbdx.token['access_token'])
def __init__(self, **kwargs): self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) self.console_handler = logging.StreamHandler() self.console_handler.setLevel(logging.ERROR) self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') self.console_handler.setFormatter(self.formatter) self.logger.addHandler(self.console_handler) self.logger.info('Logger initialized') if 'host' in kwargs: self.root_url = 'https://%s' % kwargs.get('host') if (kwargs.get('username') and kwargs.get('password')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): self.gbdx_connection = kwargs.get('gbdx_connection') elif self.gbdx_connection is None: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session(kwargs.get('config_file')) def expire_token(r, *args, **kw): """ Requests a new token if 401, retries request, mainly for auth v2 migration :param r: :param args: :param kw: :return: """ if r.status_code == 401: try: # remove hooks so it doesn't get into infinite loop r.request.hooks = None # expire the token gbdx_auth.expire_token(token_to_expire=self.gbdx_connection.token, config_file=kwargs.get('config_file')) # re-init the session self.gbdx_connection = gbdx_auth.get_session(kwargs.get('config_file')) # make original request, triggers new token request first return self.gbdx_connection.request(method=r.request.method, url=r.request.url) except Exception as e: r.request.hooks = None print("Error expiring token from session, Reason {}".format(e.message)) if self.gbdx_connection is not None: self.gbdx_connection.hooks['response'].append(expire_token) # status_forcelist=[500, 502, 504])) self.gbdx_connection.mount(VIRTUAL_RDA_URL, HTTPAdapter(max_retries=5)) self.gbdx_futures_session = FuturesSession(session=self.gbdx_connection, max_workers=64) if 'GBDX_USER' in os.environ: header = {'User-Agent': os.environ['GBDX_USER']} self.gbdx_futures_session.headers.update(header) self.gbdx_connection.headers.update(header)
def __init__(self, **kwargs): self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) self.console_handler = logging.StreamHandler() self.console_handler.setLevel(logging.ERROR) self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') self.console_handler.setFormatter(self.formatter) self.logger.addHandler(self.console_handler) self.logger.info('Logger initialized') if 'host' in kwargs: self.root_url = 'https://%s' % kwargs.get('host') if (kwargs.get('username') and kwargs.get('password')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): self.gbdx_connection = kwargs.get('gbdx_connection') elif self.gbdx_connection is None: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session(kwargs.get('config_file')) def expire_token(r, *args, **kw): """ Requests a new token if 401, retries request, mainly for auth v2 migration :param r: :param args: :param kw: :return: """ if r.status_code == 401: try: # remove hooks so it doesn't get into infinite loop r.request.hooks = None # expire the token gbdx_auth.expire_token(token_to_expire=self.gbdx_connection.token, config_file=kwargs.get('config_file')) # re-init the session self.gbdx_connection = gbdx_auth.get_session(kwargs.get('config_file')) # make original request, triggers new token request first return self.gbdx_connection.request(method=r.request.method, url=r.request.url) except Exception as e: r.request.hooks = None print("Error expiring token from session, Reason {}".format(e)) if self.gbdx_connection is not None: self.gbdx_connection.hooks['response'].append(expire_token) # status_forcelist=[500, 502, 504])) self.gbdx_connection.mount(VIRTUAL_RDA_URL, HTTPAdapter(max_retries=5)) self.gbdx_futures_session = FuturesSession(session=self.gbdx_connection, max_workers=64) if 'GBDX_USER' in os.environ: header = {'User-Agent': os.environ['GBDX_USER']} self.gbdx_futures_session.headers.update(header) self.gbdx_connection.headers.update(header)
def test_session_from_existing_env_var_without_client_creds(self): os.environ['GBDX_USERNAME'] = '******' os.environ['GBDX_PASSWORD'] = '******' gbdx = gbdx_auth.get_session() os.environ.pop('GBDX_USERNAME') os.environ.pop('GBDX_PASSWORD') token = 'dumdumdum' self.assertEqual(token, gbdx.token['access_token'])
def test_session_from_existing_env_var_token( self, mocked_session_from_existing_token): os.environ['GBDX_ACCESS_TOKEN'] = 'dummy-access-token-not-jwt' os.environ['GBDX_REFRESH_TOKEN'] = 'dummy-refresh-token' gbdx = gbdx_auth.get_session() self.assertTrue(mocked_session_from_existing_token.called) os.environ.pop('GBDX_ACCESS_TOKEN') os.environ.pop('GBDX_REFRESH_TOKEN')
def test_session_from_existing_env_var_token2(self): # this is a dummy jwt that decodes successfully token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJPbmxpbmUgSldUIEJ1aWxkZXIiLCJpYXQiOjE1MjIzNDUzMDMsImV4cCI6MTU1Mzg4MTMwMywiYXVkIjoid3d3LmV4YW1wbGUuY29tIiwic3ViIjoianJvY2tldEBleGFtcGxlLmNvbSIsIkdpdmVuTmFtZSI6IkpvaG5ueSIsIlN1cm5hbWUiOiJSb2NrZXQiLCJFbWFpbCI6Impyb2NrZXRAZXhhbXBsZS5jb20iLCJSb2xlIjpbIk1hbmFnZXIiLCJQcm9qZWN0IEFkbWluaXN0cmF0b3IiXX0.iTYhRVtOLExk3q1ScRs_98lH-QBpLzgdFkhGepQOvtg' os.environ['GBDX_ACCESS_TOKEN'] = token os.environ['GBDX_REFRESH_TOKEN'] = 'dummy-refresh-token' gbdx = gbdx_auth.get_session() os.environ.pop('GBDX_ACCESS_TOKEN') os.environ.pop('GBDX_REFRESH_TOKEN') self.assertEqual(token, gbdx.token['access_token'])
def test_session_from_runtime_invalid_jwt(self): # override the default location of a possible runtime file: gbdx_auth.GBDX_RUNTIME_FILE = 'tests/unit/data/runtime1.json' with self.assertRaises(Exception) as e: gbdx = gbdx_auth.get_session() gbdx_auth.GBDX_RUNTIME_FILE = '/mnt/work/gbdx_runtime.json' self.assertEqual( str(e.exception), 'Supplied GBDX access token is not a valid JWT. Check GBDX_ACCESS_TOKEN env var or runtime.json' )
def get_temp_s3creds(gbdx_conn=None, duration=36000): url = 'https://geobigdata.io/s3creds/v1/prefix?duration={}'.format(duration) if gbdx_conn is None: gbdx_conn = gbdx_auth.get_session() results = gbdx_conn.get(url, verify=False) if not results.ok or not results.json()['S3_access_key']: raise Exception("Failed to find {0}. Error {1}".format(url, results.reason)) s3creds = results.json() return s3creds
def test_session_from_existing_env_var_token_invalid_jwt(self): os.environ['GBDX_ACCESS_TOKEN'] = 'dummy-access-token-not-jwt' os.environ['GBDX_REFRESH_TOKEN'] = 'dummy-refresh-token' with self.assertRaises(Exception) as e: gbdx = gbdx_auth.get_session() self.assertEqual( str(e.exception), 'Supplied GBDX access token is not a valid JWT. Check GBDX_ACCESS_TOKEN env var or runtime.json' ) os.environ.pop('GBDX_ACCESS_TOKEN') os.environ.pop('GBDX_REFRESH_TOKEN')
def test_session_from_existing_env_var_user_pass_id_secret3(self): os.environ['GBDX_USERNAME'] = '******' os.environ['GBDX_PASSWORD'] = '******' os.environ['GBDX_CLIENT_ID'] = 'dummy-client_id' os.environ['GBDX_CLIENT_SECRET'] = 'dummy-secret' gbdx = gbdx_auth.get_session() os.environ.pop('GBDX_USERNAME') os.environ.pop('GBDX_PASSWORD') os.environ.pop('GBDX_CLIENT_ID') os.environ.pop('GBDX_CLIENT_SECRET') token = 'dumdumdum' self.assertEqual(token, gbdx.token['access_token'])
def test_session_from_existing_env_var_user_pass_id_secret( self, mocked_session_from_config, mocked_session_from_envvars): os.environ['GBDX_USERNAME'] = '******' os.environ['GBDX_PASSWORD'] = '******' os.environ['GBDX_CLIENT_ID'] = 'dummy-client_id' os.environ['GBDX_CLIENT_SECRET'] = 'dummy-secret' gbdx = gbdx_auth.get_session() self.assertTrue(mocked_session_from_envvars.called) os.environ.pop('GBDX_USERNAME') os.environ.pop('GBDX_PASSWORD') os.environ.pop('GBDX_CLIENT_ID') os.environ.pop('GBDX_CLIENT_SECRET')
def get_temp_s3creds(gbdx_conn=None, duration=36000): url = 'https://geobigdata.io/s3creds/v1/prefix?duration={}'.format( duration) if gbdx_conn is None: gbdx_conn = gbdx_auth.get_session() results = gbdx_conn.get(url, verify=False) if not results.ok or not results.json()['S3_access_key']: raise Exception("Failed to find {0}. Error {1}".format( url, results.reason)) s3creds = results.json() return s3creds
def __init__(self, **kwargs): host = kwargs.get('host') if kwargs.get('host') else 'geobigdata.io' self.root_url = 'https://%s' % host if (kwargs.get('username') and kwargs.get('password') and kwargs.get('client_id') and kwargs.get('client_secret')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): # Pass in a custom gbdx connection object, for testing purposes self.gbdx_connection = kwargs.get('gbdx_connection') else: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session(kwargs.get('config_file')) # create a logger # for now, just log to the console. We'll replace all the 'print' statements # with at least logger.info or logger.debug statements # later, we can log to a service, file, or some other aggregator self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) console_handler = logging.StreamHandler() console_handler.setLevel(logging.ERROR) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') console_handler.setFormatter(formatter) self.logger.addHandler(console_handler) self.logger.info('Logger initialized') # create and store an instance of the GBDX s3 client self.s3 = S3(self) # create and store an instance of the GBDX Ordering Client self.ordering = Ordering(self) # create and store an instance of the GBDX Catalog Client self.catalog = Catalog(self) # create and store an instance of the GBDX Workflow Client self.workflow = Workflow(self) # create and store an instance of the Idaho Client self.idaho = Idaho(self) self.vectors = Vectors(self) self.task_registry = TaskRegistry(self) # Flag for running the workflow locally self.run_local = kwargs.get('run_local', False)
def test_session_from_existing_env_var_user_pass_id_secret2(self): os.environ['GBDX_USERNAME'] = '******' os.environ['GBDX_PASSWORD'] = '******' os.environ['GBDX_CLIENT_ID'] = 'dummy-client_id' os.environ['GBDX_CLIENT_SECRET'] = 'dummy-secret' with self.assertRaises(Exception) as e: gbdx = gbdx_auth.get_session() os.environ.pop('GBDX_USERNAME') os.environ.pop('GBDX_PASSWORD') os.environ.pop('GBDX_CLIENT_ID') os.environ.pop('GBDX_CLIENT_SECRET') self.assertEqual( str(e.exception), 'Invalid GBDX credentials given in environment variables.')
def __init__(self, **kwargs): host = kwargs.get('host') if kwargs.get('host') else 'geobigdata.io' self.root_url = 'https://%s' % host if (kwargs.get('username') and kwargs.get('password') and kwargs.get('client_id') and kwargs.get('client_secret')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): # Pass in a custom gbdx connection object, for testing purposes self.gbdx_connection = kwargs.get('gbdx_connection') else: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) # create a logger # for now, just log to the console. We'll replace all the 'print' statements # with at least logger.info or logger.debug statements # later, we can log to a service, file, or some other aggregator self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) console_handler = logging.StreamHandler() console_handler.setLevel(logging.ERROR) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') console_handler.setFormatter(formatter) self.logger.addHandler(console_handler) self.logger.info('Logger initialized') # create and store an instance of the GBDX s3 client self.s3 = S3(self) # create and store an instance of the GBDX Ordering Client self.ordering = Ordering(self) # create and store an instance of the GBDX Catalog Client self.catalog = Catalog(self) # create and store an instance of the GBDX Workflow Client self.workflow = Workflow(self) # create and store an instance of the Idaho Client self.idaho = Idaho(self) self.vectors = Vectors(self) self.task_registry = TaskRegistry(self)
def __init__(self, is_test=False): """ Constructor for AccountStorageService class :param session: an instance of gbdx_auth (requests.OAuth2) """ endpoint = 'https://geobigdata.io/s3creds/v1/prefix' if not is_test: self.gbdx = gbdx_auth.get_session() response = self.gbdx.get('%s?duration=3600' % endpoint) if response.status_code != 200: raise Exception('%s: %s' % (response.status_code, response.content)) secret_key = response.json()['S3_secret_key'] access_key = response.json()['S3_access_key'] session_token = response.json()['S3_session_token'] session = boto3.session.Session( aws_access_key_id=access_key, aws_secret_access_key=secret_key, aws_session_token=session_token ) self.bucket = response.json()['bucket'] self.prefix = response.json()['prefix'] self.client = session.client('s3') else: # session is not None, meaning it is being overridden. # Testing purposes. self.bucket = "not_provided" self.prefix = "not_provided" # Session creds don't matter because of the mock_s3 decorator session = boto3.session.Session( region_name='us-east-1' ) self.client = session.client('s3') self.client.create_bucket(Bucket=self.bucket) self.s3 = S3Transfer(self.client)
def set(awscli, awscli_profile, s3cmd, s3cmd_config, environ, environ_export, print_token, duration): """Writes temporary GBDX S3 credentials to one or more of the following targets: awscli -- The Amazon Web Services Command Line Interface (https://aws.amazon.com/cli/) credentials file s3cmd -- The Command Line S3 Client (http://s3tools.org/s3cmd) configuration file environ -- Bash environment variables (only prints to the screen) By design, the GBDX credentials have a duration of at most 36000 seconds Warning: No backups of the original files are made! """ if not any((awscli, s3cmd, environ)): raise click.ClickException("Must specify at least one of --awscli, --s3cmd or --environ.") gbdx_conn = gbdx_auth.get_session() if print_token: _s3creds.print_gbdx_token_info(gbdx_conn) _s3creds.set_temp_creds(gbdx_conn, awscli, awscli_profile, s3cmd, s3cmd_config, environ, environ_export, duration)
def __init__(self, **kwargs): if (kwargs.get('username') and kwargs.get('password') and kwargs.get('client_id') and kwargs.get('client_secret')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): # Pass in a custom gbdx connection object, for testing purposes self.gbdx_connection = kwargs.get('gbdx_connection') else: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session() # create a logger # for now, just log to the console. We'll replace all the 'print' statements # with at least logger.info or logger.debug statements # later, we can log to a service, file, or some other aggregator self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.DEBUG) console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') console_handler.setFormatter(formatter) self.logger.addHandler(console_handler) self.logger.info('Logger initialized') # create and store an instance of the GBDX s3 client self.s3 = S3(self) # create and store an instance of the GBDX Ordering Client self.ordering = Ordering(self) # create and store an instance of the GBDX Catalog Client self.catalog = Catalog(self) # create and store an instance of the GBDX Workflow Client self.workflow = Workflow(self) # create and store an instance of the Idaho Client self.idaho = Idaho(self)
def expire_token(r, *args, **kw): """ Requests a new token if 401, retries request, mainly for auth v2 migration :param r: :param args: :param kw: :return: """ if r.status_code == 401: try: # remove hooks so it doesn't get into infinite loop r.request.hooks = None # expire the token gbdx_auth.expire_token(token_to_expire=self.gbdx_connection.token, config_file=kwargs.get('config_file')) # re-init the session self.gbdx_connection = gbdx_auth.get_session(kwargs.get('config_file')) # make original request, triggers new token request first return self.gbdx_connection.request(method=r.request.method, url=r.request.url) except Exception as e: r.request.hooks = None print("Error expiring token from session, Reason {}".format(e))
def test_session_from_existing_ini_file_user_pass2( self, mocked_session_from_config): inifile = 'tests/unit/data/config_ini_with_user_pass.txt' gbdx = gbdx_auth.get_session(config_file=inifile) self.assertTrue(mocked_session_from_config.called)
def test_session_from_existing_ini_file_token2(self): inifile = 'tests/unit/data/config_ini_with_token.txt' gbdx = gbdx_auth.get_session(config_file=inifile) self.assertEqual('super-dummy-access-token', gbdx.token['access_token'])
from gbdx_auth import gbdx_auth # Get the config (you can pass one in below if its not located at ~/.gbdx-config) # gbdx is an oauth2 enabled Session object like you find in the requests package (http://docs.python-requests.org/en/latest/user/advanced/). gbdx = gbdx_auth.get_session() # GET the set of workflow tasks: r = gbdx.get("https://geobigdata.io/workflows/v1/tasks") task_list = r.json() print task_list
import numpy as np import pandas as pd from pprint import pprint import pyproj import urllib.request from functools import partial import requests import shapely as sp from shapely import wkb from shapely.geometry import mapping, shape from shapely.geometry.polygon import LinearRing, Polygon from shapely.ops import transform, unary_union import time #Open a session using the Authentication files (~/.gbdx-config) gbdx_auth.get_session() gbdx = gbdxtools.Interface() def wkb_to_wkt(poly): """ Open data using shapely and take it from binary to a valid Python geometry. """ poly_sp = wkb.loads(poly, hex=True) unary_poly = unary_union(poly_sp) return unary_poly def url_geojson_to_wkt(url): """ Create a list of wkt geometries from a geojson stored in an specific URL.
def __init__(self, auth=None): self.session = auth if auth is not None else gbdx_auth.get_session() self.task_url = 'https://geobigdata.io/workflows/v1/tasks'
from gbdx_auth import gbdx_auth import vcr def force(r1, r2): return True my_vcr = vcr.VCR() my_vcr.register_matcher('force', force) my_vcr.match_on = ['force'] def get_mock_gbdx_session(token='dummytoken'): s = OAuth2Session(client=LegacyApplicationClient('asdf'), auto_refresh_url='fdsa', auto_refresh_kwargs={ 'client_id': 'asdf', 'client_secret': 'fdsa' }) s.token = token s.access_token = token return s if 'GBDX_MOCK' not in os.environ: conn = get_mock_gbdx_session(token='dummytoken') else: conn = gbdx_auth.get_session()
def __init__(self, **kwargs): self.logger = logging.getLogger('pyveda') self.logger.setLevel(logging.ERROR) self.console_handler = logging.StreamHandler() self.console_handler.setLevel(logging.ERROR) self.formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') self.console_handler.setFormatter(self.formatter) self.logger.addHandler(self.console_handler) self.logger.info('Logger initialized') if 'host' in kwargs: self.root_url = 'https://%s' % kwargs.get('host') if (kwargs.get('username') and kwargs.get('password')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): self.gbdx_connection = kwargs.get('gbdx_connection') elif self.gbdx_connection is None: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) # for local dev, cant use oauth2 if not kwargs.get('oauth', True): self.gbdx_connection = localhost(self.gbdx_connection) def expire_token(r, *args, **kw): """ Requests a new token if 401, retries request, mainly for auth v2 migration :param r: :param args: :param kw: :return: """ if r.status_code == 401: try: # remove hooks so it doesn't get into infinite loop r.request.hooks = None # expire the token gbdx_auth.expire_token( token_to_expire=self.gbdx_connection.token, config_file=kwargs.get('config_file')) # re-init the session self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) if HOST == 'http://host.docker.internal:3002': self.gbdx_connection = localhost(self.gbdx_connection) # make original request, triggers new token request first res = self.gbdx_connection.request(method=r.request.method, url=r.request.url) # re-add the hook to refresh in the future self.gbdx_connection.hooks['response'].append(expire_token) return res except Exception as e: r.request.hooks = None print( "Error expiring token from session, Reason {}".format( e)) if self.gbdx_connection is not None: self.gbdx_connection.hooks['response'].append(expire_token)