def hue(request): apps = appmanager.get_apps_dict(request.user) current_app, other_apps, apps_list = _get_apps(request.user, '') default_cluster_index, default_cluster_interface = Cluster(request.user).get_list_interface_indexes() clusters = get_clusters().values() return render('hue.mako', request, { 'apps': apps, 'other_apps': other_apps, 'is_s3_enabled': is_s3_enabled() and has_s3_access(request.user), 'is_adls_enabled': is_adls_enabled() and has_adls_access(request.user), 'is_ldap_setup': 'desktop.auth.backend.LdapBackend' in desktop.conf.AUTH.BACKEND.get(), 'leaflet': { 'layer': desktop.conf.LEAFLET_TILE_LAYER.get(), 'attribution': desktop.conf.LEAFLET_TILE_LAYER_ATTRIBUTION.get(), 'map_options': json.dumps(desktop.conf.LEAFLET_MAP_OPTIONS.get()), 'layer_options': json.dumps(desktop.conf.LEAFLET_TILE_LAYER_OPTIONS.get()), }, 'is_demo': desktop.conf.DEMO_ENABLED.get(), 'banner_message': get_banner_message(request), 'user_preferences': dict((x.key, x.value) for x in UserPreferences.objects.filter(user=request.user)), 'cluster': clusters[0]['type'] if clusters else None, 'clusters_config_json': json.dumps(clusters), 'default_cluster_index': default_cluster_index, 'default_cluster_interface': default_cluster_interface })
def get_context_clusters(request, interface): response = {} clusters = [] cluster_configs = list(get_clusters(request.user).values()) for cluster in cluster_configs: cluster = { 'id': cluster.get('id'), 'name': cluster.get('name'), 'status': 'CREATED', 'environmentType': cluster.get('type'), 'serviceType': cluster.get('interface'), 'namespace': '', 'type': cluster.get('type') } if cluster.get('type') == 'altus': cluster['name'] = 'Altus DE' cluster['type'] = 'altus-de' clusters.append(cluster) cluster = cluster.copy() cluster['name'] = 'Altus Data Warehouse' cluster['type'] = 'altus-dw' elif cluster.get('type') == 'altusv2': cluster['name'] = 'Data Warehouse' cluster['type'] = 'altus-dw2' clusters.append(cluster) response[interface] = clusters response['status'] = 0 return JsonResponse(response)
def hue(request): current_app, other_apps, apps_list = _get_apps(request.user, '') clusters = list(get_clusters(request.user).values()) user_permitted = request.session.get('samlgroup_permitted_flag') if (not user_permitted) and (not samlgroup_check(request)): return render('403.mako', request, { 'is_embeddable': True }) return render('hue.mako', request, { 'apps': apps_list, 'other_apps': other_apps, 'is_s3_enabled': fsmanager.is_enabled('s3a') and fsmanager.has_access('s3a', request.user), 'is_adls_enabled': fsmanager.is_enabled('adl') and fsmanager.has_access('adl', request.user), 'is_ldap_setup': 'desktop.auth.backend.LdapBackend' in desktop.conf.AUTH.BACKEND.get(), 'leaflet': { 'layer': desktop.conf.LEAFLET_TILE_LAYER.get(), 'attribution': desktop.conf.LEAFLET_TILE_LAYER_ATTRIBUTION.get(), 'map_options': json.dumps(desktop.conf.LEAFLET_MAP_OPTIONS.get()), 'layer_options': json.dumps(desktop.conf.LEAFLET_TILE_LAYER_OPTIONS.get()), }, 'is_demo': desktop.conf.DEMO_ENABLED.get(), 'banner_message': get_banner_message(request), 'user_preferences': dict((x.key, x.value) for x in UserPreferences.objects.filter(user=request.user)), 'cluster': clusters[0]['type'] if clusters else None })
def get_context_clusters(request, interface): response = {} clusters = [] cluster_configs = get_clusters(request.user).values() for cluster in cluster_configs: cluster = { 'id': cluster.get('id'), 'name': cluster.get('name'), 'status': 'CREATED', 'environmentType': cluster.get('type'), 'serviceType': cluster.get('interface'), 'namespace': '', 'type': cluster.get('type') } if cluster.get('type') == 'altus': cluster['name'] = 'Altus DE' cluster['type'] = 'altus-de' clusters.append(cluster) cluster = cluster.copy() cluster['name'] = 'Altus Data Warehouse' cluster['type'] = 'altus-dw' elif cluster.get('type') == 'altusv2': cluster['name'] = 'Data Warehouse' cluster['type'] = 'altus-dw2' clusters.append(cluster) response[interface] = clusters response['status'] = 0 return JsonResponse(response)
def get_context_namespaces(request, interface): response = {'dynamicClusters': False} namespaces = [] clusters = get_clusters(request.user).values() if interface == 'hive' or interface == 'impala' or interface == 'report': namespaces.extend([ { 'id': cluster['id'], 'name': cluster['name'], 'status': 'CREATED', 'computes': [cluster] } for cluster in clusters if cluster.get('type') == 'direct' # and interface == 'hive' ]) # From Altus SDX if [cluster for cluster in clusters if cluster['type'] == 'altus']: # Note: attaching computes to namespaces might be done via the frontend in the future if interface == 'impala': adb_clusters = AnalyticDbApi( request.user).list_clusters()['clusters'] for _cluster in adb_clusters: # Add "fake" namespace if needed if not _cluster.get('namespaceCrn'): _cluster['namespaceCrn'] = _cluster['crn'] _cluster['id'] = _cluster['crn'] _cluster['namespaceName'] = _cluster['clusterName'] _cluster['name'] = _cluster['clusterName'] else: adb_clusters = [] namespaces.extend([ { 'id': namespace.get('crn', 'None'), 'name': namespace.get('namespaceName'), 'status': namespace.get('status'), 'computes': [ _cluster for _cluster in adb_clusters if _cluster.get('namespaceCrn') == namespace.get('crn') ] } for namespace in SdxApi(request.user).list_namespaces() + # Adding "fake" namespace for cluster without one [ _cluster for _cluster in adb_clusters if not cluster.get('namespaceCrn') and _cluster.get('status') == 'CREATED' ] ]) response['dynamicClusters'] = True response[interface] = namespaces response['status'] = 0 return JsonResponse(response)
def get_config(request): config = get_cluster_config(request.user) config['clusters'] = list(get_clusters(request.user).values()) config['documents'] = { 'types': list(Document2.objects.documents(user=request.user).order_by().values_list('type', flat=True).distinct()) } config['status'] = 0 return JsonResponse(config)
def get_context_namespaces(request, interface): ''' Namespaces are node cluster contexts (e.g. Hive + Ranger) that can be queried by computes. ''' response = {} namespaces = [] clusters = list(get_clusters(request.user).values()) # Currently broken if not sent namespaces.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'status': 'CREATED', 'computes': [cluster] } for cluster in clusters if cluster.get('type') == 'direct' ]) if interface == 'hive' or interface == 'impala' or interface == 'report': if get_cluster_config(request.user)['has_computes']: # Note: attaching computes to namespaces might be done via the frontend in the future if interface == 'impala': if IS_K8S_ONLY.get(): adb_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters'] else: adb_clusters = AnalyticDbApi(request.user).list_clusters()['clusters'] for _cluster in adb_clusters: # Add "fake" namespace if needed if not _cluster.get('namespaceCrn'): _cluster['namespaceCrn'] = _cluster['crn'] _cluster['id'] = _cluster['crn'] _cluster['namespaceName'] = _cluster['clusterName'] _cluster['name'] = _cluster['clusterName'] _cluster['compute_end_point'] = '%(publicHost)s' % _cluster['coordinatorEndpoint'] if IS_K8S_ONLY.get() else '', else: adb_clusters = [] if IS_K8S_ONLY.get(): sdx_namespaces = [] else: sdx_namespaces = SdxApi(request.user).list_namespaces() # Adding "fake" namespace for cluster without one sdx_namespaces.extend([_cluster for _cluster in adb_clusters if not _cluster.get('namespaceCrn') or (IS_K8S_ONLY.get() and 'TERMINAT' not in _cluster['status'])]) namespaces.extend([{ 'id': namespace.get('crn', 'None'), 'name': namespace.get('namespaceName'), 'status': namespace.get('status'), 'computes': [_cluster for _cluster in adb_clusters if _cluster.get('namespaceCrn') == namespace.get('crn')] } for namespace in sdx_namespaces if namespace.get('status') == 'CREATED' or IS_K8S_ONLY.get() ]) response[interface] = namespaces response['status'] = 0 return JsonResponse(response)
def get_context_namespaces(request, interface): response = {} namespaces = [] clusters = get_clusters(request.user).values() namespaces.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'status': 'CREATED', 'computes': [cluster] } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all') ]) if interface == 'hive' or interface == 'impala' or interface == 'report': # From Altus SDX if [cluster for cluster in clusters if 'altus' in cluster['type']]: # Note: attaching computes to namespaces might be done via the frontend in the future if interface == 'impala': if IS_K8S_ONLY.get(): adb_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters'] else: adb_clusters = AnalyticDbApi(request.user).list_clusters()['clusters'] for _cluster in adb_clusters: # Add "fake" namespace if needed if not _cluster.get('namespaceCrn'): _cluster['namespaceCrn'] = _cluster['crn'] _cluster['id'] = _cluster['crn'] _cluster['namespaceName'] = _cluster['clusterName'] _cluster['name'] = _cluster['clusterName'] _cluster['compute_end_point'] = '%(publicHost)s' % _cluster['coordinatorEndpoint'] if IS_K8S_ONLY.get() else '', else: adb_clusters = [] if IS_K8S_ONLY.get(): sdx_namespaces = [] else: sdx_namespaces = SdxApi(request.user).list_namespaces() # Adding "fake" namespace for cluster without one sdx_namespaces.extend([_cluster for _cluster in adb_clusters if not _cluster.get('namespaceCrn') or (IS_K8S_ONLY.get() and 'TERMINAT' not in _cluster['status'])]) namespaces.extend([{ 'id': namespace.get('crn', 'None'), 'name': namespace.get('namespaceName'), 'status': namespace.get('status'), 'computes': [_cluster for _cluster in adb_clusters if _cluster.get('namespaceCrn') == namespace.get('crn')] } for namespace in sdx_namespaces if namespace.get('status') == 'CREATED' or IS_K8S_ONLY.get() ]) response[interface] = namespaces response['status'] = 0 return JsonResponse(response)
def get_context_computes(request, interface): response = {} computes = [] clusters = get_clusters(request.user).values() if interface == 'hive' or interface == 'impala' or interface == 'oozie' or interface == 'jobs' or interface == 'report': computes.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'namespace': cluster['id'], 'interface': interface, 'type': cluster['type'] } for cluster in clusters if cluster.get('type') == 'direct']) if interface == 'impala' or interface == 'jobs' or interface == 'report': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'type': 'altus-adb' } for cluster in AnalyticDbApi(request.user).list_clusters() ['clusters'] if cluster.get('status') == 'CREATED']) if interface == 'oozie' or interface == 'jobs' or interface == 'spark2': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'environmentType': cluster.get('environmentType'), 'serviceType': cluster.get('serviceType'), 'namespace': cluster.get('namespaceCrn'), 'type': 'altus-de' } for cluster in DataEngApi(request.user).list_clusters() ['clusters']]) # TODO if interface == 'spark2' keep only SPARK type response[interface] = computes response['status'] = 0 return JsonResponse(response)
def get_context_computes(request, interface): response = {} computes = [] clusters = get_clusters(request.user).values() has_altus_clusters = [cluster for cluster in clusters if 'altus' in cluster['type']] computes.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'namespace': cluster['id'], 'interface': interface, 'type': cluster['type'] } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all') ]) if has_altus_clusters: if interface == 'impala' or interface == 'report': if IS_K8S_ONLY.get(): dw_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters'] else: dw_clusters = AnalyticDbApi(request.user).list_clusters()['clusters'] computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'compute_end_point': IS_K8S_ONLY.get() and '%(publicHost)s' % cluster['coordinatorEndpoint'] or '', 'type': 'altus-dw' } for cluster in dw_clusters if (cluster.get('status') == 'CREATED' and cluster.get('cdhVersion') >= 'CDH515') or (IS_K8S_ONLY.get() and 'TERMINAT' not in cluster['status'])] ) if interface == 'oozie' or interface == 'spark2': computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'environmentType': cluster.get('environmentType'), 'serviceType': cluster.get('serviceType'), 'namespace': cluster.get('namespaceCrn'), 'type': 'altus-de' } for cluster in DataEngApi(request.user).list_clusters()['clusters']] ) # TODO if interface == 'spark2' keep only SPARK type response[interface] = computes response['status'] = 0 return JsonResponse(response)
def get_context_computes(request, interface): ''' Some clusters like Snowball can have multiple computes for a certain languages (Hive, Impala...). ''' response = {} computes = [] clusters = list(get_clusters(request.user).values()) if get_cluster_config(request.user)['has_computes']: # TODO: only based on interface selected? interpreter = get_interpreter(connector_type=interface, user=request.user) if interpreter['dialect'] == 'impala': # dw_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters'] dw_clusters = [ {'crn': 'c1', 'clusterName': 'c1', 'status': 'created', 'options': {'server_host': 'c1.gethue.com', 'server_port': 10000}}, {'crn': 'c2', 'clusterName': 'c2', 'status': 'created', 'options': {'server_host': 'c2.gethue.com', 'server_port': 10000}}, ] computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'type': interpreter['dialect'], 'options': cluster['options'], } for cluster in dw_clusters] ) else: # Currently broken if not sent computes.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'namespace': cluster['id'], 'interface': interface, 'type': cluster['type'], 'options': {} } for cluster in clusters if cluster.get('type') == 'direct' ]) response[interface] = computes response['status'] = 0 return JsonResponse(response)
def get_context_computes(request, interface): ''' Some clusters like Snowball can have multiple computes for a certain languages (Hive, Impala...). ''' response = {} computes = [] clusters = get_clusters(request.user).values() has_altus_clusters = [ cluster for cluster in clusters if 'altus' in cluster['type'] or 'snowball' in cluster['type'] ] if has_altus_clusters: if interface == 'impala' or interface == 'report': if IS_K8S_ONLY.get(): dw_clusters = DataWarehouse2Api( request.user).list_clusters()['clusters'] computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'compute_end_point': IS_K8S_ONLY.get() and '%(publicHost)s' % cluster['coordinatorEndpoint'] or '', 'type': 'altus-dw' } for cluster in dw_clusters]) response[interface] = computes response['status'] = 0 return JsonResponse(response)
def get_config(request): config = get_cluster_config(request.user) config['clusters'] = list(get_clusters(request.user).values()) config['status'] = 0 return JsonResponse(config)
def get_context_computes(request, interface): response = {} computes = [] clusters = get_clusters(request.user).values() if interface == 'hive' or interface == 'impala' or interface == 'oozie' or interface == 'report': computes.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'namespace': cluster['id'], 'interface': interface, 'type': cluster['type'] } for cluster in clusters if cluster.get('type') == 'direct']) if interface == 'impala' or interface == 'report': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'type': 'altus-dw' } for cluster in AnalyticDbApi( request.user).list_clusters()['clusters'] if cluster.get('status') == 'CREATED' and cluster.get('cdhVersion') >= 'CDH515']) if interface == 'oozie' or interface == 'spark2': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'environmentType': cluster.get('environmentType'), 'serviceType': cluster.get('serviceType'), 'namespace': cluster.get('namespaceCrn'), 'type': 'altus-de' } for cluster in DataEngApi(request.user).list_clusters() ['clusters']]) # TODO if interface == 'spark2' keep only SPARK type if interface == 'jobs': for cluster in clusters: cluster = { 'id': cluster.get('id'), 'name': cluster.get('name'), 'status': 'CREATED', 'environmentType': cluster.get('type'), 'serviceType': cluster.get('interface'), 'namespace': '', 'type': cluster.get('type') } if cluster.get('type') == 'altus': cluster['name'] = 'Altus DE' cluster['type'] = 'altus-de' computes.append(cluster) cluster = cluster.copy() cluster['name'] = 'Altus Data Warehouse' cluster['type'] = 'altus-dw' computes.append(cluster) response[interface] = computes response['status'] = 0 return JsonResponse(response)