class OptimizerApi(object): def __init__(self, api_url=None, product_name=None, product_secret=None, ssl_cert_ca_verify=OPTIMIZER.SSL_CERT_CA_VERIFY.get(), product_auth_secret=None): self._api_url = (api_url or get_optimizer_url()).strip('/') self._email = OPTIMIZER.EMAIL.get() self._email_password = OPTIMIZER.EMAIL_PASSWORD.get() self._product_secret = product_secret if product_secret else OPTIMIZER.PRODUCT_SECRET.get( ) self._product_auth_secret = product_auth_secret if product_auth_secret else ( OPTIMIZER.PRODUCT_AUTH_SECRET.get() and OPTIMIZER.PRODUCT_AUTH_SECRET.get().replace('\\n', '\n')) self._api = ApiLib("navopt", urlparse(self._api_url).hostname, self._product_secret, self._product_auth_secret) self._product_name = product_name if product_name else ( OPTIMIZER.PRODUCT_NAME.get() or self.get_tenant()['tenant'] ) # Aka "workload" def _authenticate(self, force=False): if self._token is None or force: self._token = self.authenticate()['token'] return self._token def get_tenant(self, email=None): return self._api.call_api("getTenant", { "email": email or self._email }).json() def create_tenant(self, group): return self._api.call_api('createTenant', {'userGroup': group}).json() def authenticate(self): try: data = { 'productName': self._product_name, 'productSecret': self._product_secret, } return self._root.post('/api/authenticate', data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE) except RestException, e: raise PopupException(e, title=_('Error while accessing Optimizer'))
def _exec(service, command, parameters=None): if parameters is None: parameters = {} if service == 'dataware': hostname = ALTUS.HOSTNAME_ANALYTICDB.get() elif service == 'dataeng': hostname = ALTUS.HOSTNAME_DATAENG.get() elif service == 'wa': hostname = ALTUS.HOSTNAME_WA.get() else: hostname = ALTUS.HOSTNAME.get() if not ALTUS.AUTH_KEY_ID.get() or not ALTUS.AUTH_KEY_SECRET.get(): raise PopupException('Altus API is not configured.') try: api = ApiLib(service, hostname, ALTUS.AUTH_KEY_ID.get(), ALTUS.AUTH_KEY_SECRET.get().replace('\\n', '\n')) LOG.debug('%s : %s' % (command, parameters)) resp = api.call_api(command, parameters) LOG.info(resp) json_resp = resp.json() LOG.debug(json_resp) return json_resp except Exception as e: raise PopupException(e, title=_('Error accessing'))
def _exec(service, command, parameters=None): if parameters is None: parameters = {} if service == 'analyticdb': hostname = ALTUS.HOSTNAME_ANALYTICDB.get() elif service == 'dataeng': hostname = ALTUS.HOSTNAME_DATAENG.get() elif service == 'wa': hostname = ALTUS.HOSTNAME_WA.get() else: hostname = ALTUS.HOSTNAME.get() try: api = ApiLib(service, hostname, ALTUS.AUTH_KEY_ID.get(), ALTUS.AUTH_KEY_SECRET.get().replace('\\n', '\n')) resp = api.call_api(command, parameters) LOG.info(resp) return resp.json() except Exception, e: raise PopupException(e, title=_('Error accessing'))
class OptimizerApi(object): def __init__(self, user, api_url=None, product_name=None, product_secret=None, ssl_cert_ca_verify=OPTIMIZER.SSL_CERT_CA_VERIFY.get(), product_auth_secret=None): self.user = user self._api_url = (api_url or get_optimizer_url()).strip('/') self._email = OPTIMIZER.EMAIL.get() self._email_password = OPTIMIZER.EMAIL_PASSWORD.get() self._product_secret = product_secret if product_secret else OPTIMIZER.PRODUCT_SECRET.get( ) self._product_auth_secret = product_auth_secret if product_auth_secret else ( OPTIMIZER.PRODUCT_AUTH_SECRET.get() and OPTIMIZER.PRODUCT_AUTH_SECRET.get().replace('\\n', '\n')) self._api = ApiLib("navopt", urlparse(self._api_url).hostname, self._product_secret, self._product_auth_secret) self._product_name = product_name if product_name else ( OPTIMIZER.PRODUCT_NAME.get() or self.get_tenant()['tenant'] ) # Aka "workload" def _authenticate(self, force=False): if self._token is None or force: self._token = self.authenticate()['token'] return self._token def _call(self, *kwargs): resp = self._api.call_api(*kwargs) data = resp.json() if resp.headers.get('x-altus-request-id'): LOG.info('%s %s: %s' % (self.user, resp.headers['x-altus-request-id'], kwargs)) if data.get('code') == 'UNKNOWN': raise NavOptException(data.get('message')) elif data.get('errorMsg'): raise NavOptException(data.get('errorMsg')) else: return data def get_tenant(self, email=None): return self._call("getTenant", {"email": email or self._email}) def create_tenant(self, group): return self._call('createTenant', {'userGroup': group}) def upload(self, data, data_type='queries', source_platform='generic', workload_id=None): if data_type in ('table_stats', 'cols_stats'): data_suffix = '.json' if data_type == 'table_stats': extra_parameters = {'fileType': 'TABLE_STATS'} else: extra_parameters = {'fileType': 'COLUMN_STATS'} else: data_suffix = '.csv' extra_parameters = { 'colDelim': ',', 'rowDelim': '\n', "headerFields": [{ "count": 0, "name": "SQL_ID", "coltype": "SQL_ID", "use": True, "tag": "" }, { "count": 0, "name": "ELAPSED_TIME", "coltype": "NONE", "use": True, "tag": "" }, { "count": 0, "name": "SQL_FULLTEXT", "coltype": "SQL_QUERY", "use": True, "tag": "" }, { "count": 0, "name": "DATABASE", "coltype": "NONE", "use": True, "tag": "DATABASE" }], } f_queries_path = NamedTemporaryFile(suffix=data_suffix) f_queries_path.close( ) # Reopened as real file below to work well with the command try: f_queries = open(f_queries_path.name, 'w+') try: # Queries if data_suffix == '.csv': content_generator = OptimizerQueryDataAdapter(data) queries_csv = export_csvxls.create_generator( content_generator, 'csv') for row in queries_csv: f_queries.write(row) LOG.debug(row[:1000]) else: # Table, column stats f_queries.write(json.dumps(data)) LOG.debug(json.dumps(data[:10])) finally: f_queries.close() parameters = { 'tenant': self._product_name, 'fileLocation': f_queries.name, 'sourcePlatform': source_platform, } parameters.update(extra_parameters) response = self._api.call_api('upload', parameters) status = json.loads(response) status['count'] = len(data) return status except RestException, e: raise PopupException(e, title=_('Error while accessing Optimizer')) finally:
class OptimizerApi(object): def __init__(self, user, api_url=None, auth_key=None, auth_key_secret=None, tenant_id=None): self.user = user self._api_url = (api_url or get_optimizer_url()).strip('/') self._auth_key = auth_key if auth_key else OPTIMIZER.AUTH_KEY_ID.get() self._auth_key_secret = auth_key_secret if auth_key_secret else (OPTIMIZER.AUTH_KEY_SECRET.get() and OPTIMIZER.AUTH_KEY_SECRET.get().replace('\\n', '\n')) self._api = ApiLib("navopt", self._api_url, self._auth_key, self._auth_key_secret) self._tenant_id = tenant_id if tenant_id else _get_tenant_id(self) # Aka "workload" def _call(self, *kwargs): start_time = time.time() resp = self._api.call_api(*kwargs) data = resp.json() if resp.headers.get('x-altus-request-id'): LOG.info('%s %s in %dms: %s' % (self.user, resp.headers['x-altus-request-id'], (time.time() - start_time) * 1000, kwargs)) if data.get('code') == 'UNKNOWN': raise NavOptException(data.get('message')) elif data.get('errorMsg'): raise NavOptException(data.get('errorMsg')) else: return data def get_tenant(self, cluster_id='default'): return self._call('getTenant', {'clusterId' : cluster_id}) def upload(self, data, data_type='queries', source_platform='generic', workload_id=None): if data_type in ('table_stats', 'cols_stats'): data_suffix = '.json' if data_type == 'table_stats': extra_parameters = {'fileType': 'TABLE_STATS'} else: extra_parameters = {'fileType': 'COLUMN_STATS'} else: data_suffix = '.csv' extra_parameters = { 'colDelim': ',', 'rowDelim': '\n', "headerFields": [ {"count": 0, "name": "SQL_ID", "coltype": "SQL_ID", "use": True, "tag": ""}, {"count": 0, "name": "ELAPSED_TIME", "coltype": "NONE", "use": True, "tag": ""}, {"count": 0, "name": "SQL_FULLTEXT", "coltype": "SQL_QUERY", "use": True, "tag": ""}, {"count": 0, "name": "DATABASE", "coltype": "NONE", "use": True, "tag": "DATABASE"} ], } f_queries_path = NamedTemporaryFile(suffix=data_suffix) f_queries_path.close() # Reopened as real file below to work well with the command try: f_queries = open(f_queries_path.name, 'w+') try: # Queries if data_suffix == '.csv': content_generator = OptimizerQueryDataAdapter(data) queries_csv = export_csvxls.create_generator(content_generator, 'csv') for row in queries_csv: f_queries.write(row) LOG.debug(row[:1000]) else: # Table, column stats f_queries.write(json.dumps(data)) LOG.debug(json.dumps(data[:10])) finally: f_queries.close() parameters = { 'tenant' : self._tenant_id, 'fileLocation': f_queries.name, 'sourcePlatform': source_platform, } parameters.update(extra_parameters) response = self._api.call_api('upload', parameters) status = json.loads(response) status['count'] = len(data) return status except RestException, e: raise PopupException(e, title=_('Error while accessing Optimizer')) finally:
class OptimizerClient(object): def __init__(self, user, api_url=None, auth_key=None, auth_key_secret=None, tenant_id=None): self.user = user self._api_url = (api_url or get_optimizer_url()).strip('/') self._auth_key = auth_key if auth_key else OPTIMIZER.AUTH_KEY_ID.get() self._auth_key_secret = auth_key_secret if auth_key_secret else (OPTIMIZER.AUTH_KEY_SECRET.get() and OPTIMIZER.AUTH_KEY_SECRET.get().replace('\\n', '\n')) self._api = ApiLib("navopt", self._api_url, self._auth_key, self._auth_key_secret) self._tenant_id = tenant_id if tenant_id else _get_tenant_id(self) # Aka "workload" def _call(self, *kwargs): start_time = time.time() resp = self._api.call_api(*kwargs) data = resp.json() if resp.headers.get('x-altus-request-id'): LOG.info('%s %s in %dms: %s' % (self.user, resp.headers['x-altus-request-id'], (time.time() - start_time) * 1000, kwargs)) if data.get('code') == 'UNKNOWN': raise NavOptException(data.get('message')) elif data.get('errorMsg'): raise NavOptException(data.get('errorMsg')) else: return data def get_tenant(self, cluster_id='default'): return self._call('getTenant', {'clusterId' : cluster_id}) def upload(self, data, data_type='queries', source_platform='generic', workload_id=None): if data_type in ('table_stats', 'cols_stats'): data_suffix = '.json' if data_type == 'table_stats': extra_parameters = {'fileType': 'TABLE_STATS'} else: extra_parameters = {'fileType': 'COLUMN_STATS'} else: data_suffix = '.csv' extra_parameters = { 'fileType': 'QUERY', 'colDelim': ',', 'rowDelim': '\n', "headerFields": [ {"count": 0, "name": "SQL_ID", "coltype": "SQL_ID", "use": True, "tag": ""}, {"count": 0, "name": "ELAPSED_TIME", "coltype": "NONE", "use": True, "tag": ""}, {"count": 0, "name": "SQL_FULLTEXT", "coltype": "SQL_QUERY", "use": True, "tag": ""}, {"count": 0, "name": "DATABASE", "coltype": "NONE", "use": True, "tag": "DATABASE"} ], } f_queries_path = NamedTemporaryFile(suffix=data_suffix) f_queries_path.close() # Reopened as real file below to work well with the command try: f_queries = open(f_queries_path.name, 'w+') try: # Queries if data_suffix == '.csv': content_generator = OptimizerQueryDataAdapter(data) queries_csv = export_csvxls.create_generator(content_generator, 'csv') for row in queries_csv: f_queries.write(row) LOG.debug(row[:1000]) else: # Table, column stats f_queries.write(json.dumps(data)) LOG.debug(json.dumps(data[:10])) finally: f_queries.close() parameters = { 'tenant' : self._tenant_id, 'fileLocation': f_queries.name, 'sourcePlatform': source_platform, } parameters.update(extra_parameters) response = self._api.call_api('upload', parameters) status = json.loads(response) status['count'] = len(data) return status except RestException as e: raise PopupException(e, title=_('Error while accessing Optimizer')) finally: os.remove(f_queries_path.name) def upload_status(self, workload_id): return self._call('uploadStatus', {'tenant' : self._tenant_id, 'workloadId': workload_id}) # Sentry permissions work bottom to top. # @check_privileges def top_tables(self, workfloadId=None, database_name='default', page_size=1000, startingToken=None): return self._call('getTopTables', {'tenant' : self._tenant_id, 'dbName': database_name.lower(), 'pageSize': page_size, 'startingToken': startingToken}) @check_privileges def table_details(self, database_name, table_name, page_size=100, startingToken=None): return self._call('getTablesDetail', {'tenant' : self._tenant_id, 'dbName': database_name.lower(), 'tableName': table_name.lower(), 'pageSize': page_size, 'startingToken': startingToken}) def query_compatibility(self, source_platform, target_platform, query, page_size=100, startingToken=None): return self._call('getQueryCompatible', {'tenant' : self._tenant_id, 'query': query, 'sourcePlatform': source_platform, 'targetPlatform': target_platform, 'startingToken': startingToken}) def query_risk(self, query, source_platform, db_name, page_size=100, startingToken=None): response = self._call('getQueryRisk', { 'tenant' : self._tenant_id, 'query': _clean_query(query), 'dbName': db_name, 'sourcePlatform': source_platform, 'pageSize': page_size, 'startingToken': startingToken }) hints = response.get(source_platform + 'Risk', {}) if hints and hints == [{"riskTables": [], "riskAnalysis": "", "riskId": 0, "risk": "low", "riskRecommendation": ""}]: hints = [] return { 'hints': hints, 'noStats': response.get('noStats', []), 'noDDL': response.get('noDDL', []), } def similar_queries(self, source_platform, query, page_size=100, startingToken=None): if is_admin(self.user): return self._call('getSimilarQueries', {'tenant' : self._tenant_id, 'sourcePlatform': source_platform, 'query': query, 'pageSize': page_size, 'startingToken': startingToken}) else: raise PopupException(_('Call not supported')) @check_privileges def top_filters(self, db_tables=None, page_size=100, startingToken=None): args = { 'tenant' : self._tenant_id, 'pageSize': page_size, 'startingToken': startingToken } if db_tables: args['dbTableList'] = [db_table.lower() for db_table in db_tables] return self._call('getTopFilters', args) @check_privileges def top_aggs(self, db_tables=None, page_size=100, startingToken=None): args = { 'tenant' : self._tenant_id, 'pageSize': page_size, 'startingToken': startingToken } if db_tables: args['dbTableList'] = [db_table.lower() for db_table in db_tables] results = self._call('getTopAggs', args) if OPTIMIZER.APPLY_SENTRY_PERMISSIONS.get(): checker = get_checker(user=self.user) action = 'SELECT' def getkey(table): names = table['aggregateInfo'][0] names['server'] = get_hive_sentry_provider() return names results['results'] = list(checker.filter_objects(results['results'], action, key=getkey)) return results @check_privileges def top_columns(self, db_tables=None, page_size=100, startingToken=None): args = { 'tenant' : self._tenant_id, 'pageSize': page_size, 'startingToken': startingToken } if db_tables: args['dbTableList'] = [db_table.lower() for db_table in db_tables] results = self._call('getTopColumns', args) if OPTIMIZER.APPLY_SENTRY_PERMISSIONS.get(): for section in ['orderbyColumns', 'selectColumns', 'filterColumns', 'joinColumns', 'groupbyColumns']: results[section] = list(_secure_results(results[section], self.user)) return results @check_privileges def top_joins(self, db_tables=None, page_size=100, startingToken=None): args = { 'tenant' : self._tenant_id, 'pageSize': page_size, 'startingToken': startingToken } if db_tables: args['dbTableList'] = [db_table.lower() for db_table in db_tables] results = self._call('getTopJoins', args) if OPTIMIZER.APPLY_SENTRY_PERMISSIONS.get(): filtered_joins = [] for result in results['results']: cols = [_get_table_name(col) for col in result["joinCols"][0]["columns"]] if len(cols) == len(list(_secure_results(cols, self.user))): filtered_joins.append(result) results['results'] = filtered_joins return results def top_databases(self, page_size=100, startingToken=None): args = { 'tenant' : self._tenant_id, 'pageSize': page_size, 'startingToken': startingToken } data = self._call('getTopDatabases', args) if OPTIMIZER.APPLY_SENTRY_PERMISSIONS.get(): data['results'] = list(_secure_results(data['results'], self.user)) return data