Ejemplo n.º 1
0
def get_context_namespaces(request, interface):
  '''
  Namespaces are node cluster contexts (e.g. Hive + Ranger) that can be queried by computes.
  '''
  response = {}
  namespaces = []

  clusters = list(get_clusters(request.user).values())

  # Currently broken if not sent
  namespaces.extend([{
      'id': cluster['id'],
      'name': cluster['name'],
      'status': 'CREATED',
      'computes': [cluster]
    } for cluster in clusters if cluster.get('type') == 'direct'
  ])

  if interface == 'hive' or interface == 'impala' or interface == 'report':
    if get_cluster_config(request.user)['has_computes']:
      # Note: attaching computes to namespaces might be done via the frontend in the future
      if interface == 'impala':
        if IS_K8S_ONLY.get():
          adb_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters']
        else:
          adb_clusters = AnalyticDbApi(request.user).list_clusters()['clusters']
        for _cluster in adb_clusters: # Add "fake" namespace if needed
          if not _cluster.get('namespaceCrn'):
            _cluster['namespaceCrn'] = _cluster['crn']
            _cluster['id'] = _cluster['crn']
            _cluster['namespaceName'] = _cluster['clusterName']
            _cluster['name'] = _cluster['clusterName']
            _cluster['compute_end_point'] = '%(publicHost)s' % _cluster['coordinatorEndpoint'] if IS_K8S_ONLY.get() else '',
      else:
        adb_clusters = []

      if IS_K8S_ONLY.get():
        sdx_namespaces = []
      else:
        sdx_namespaces = SdxApi(request.user).list_namespaces()

      # Adding "fake" namespace for cluster without one
      sdx_namespaces.extend([_cluster for _cluster in adb_clusters if not _cluster.get('namespaceCrn') or (IS_K8S_ONLY.get() and 'TERMINAT' not in _cluster['status'])])

      namespaces.extend([{
          'id': namespace.get('crn', 'None'),
          'name': namespace.get('namespaceName'),
          'status': namespace.get('status'),
          'computes': [_cluster for _cluster in adb_clusters if _cluster.get('namespaceCrn') == namespace.get('crn')]
        } for namespace in sdx_namespaces if namespace.get('status') == 'CREATED' or IS_K8S_ONLY.get()
      ])

  response[interface] = namespaces
  response['status'] = 0

  return JsonResponse(response)
Ejemplo n.º 2
0
def get_context_namespaces(request, interface):
  response = {}
  namespaces = []

  clusters = get_clusters(request.user).values()

  namespaces.extend([{
      'id': cluster['id'],
      'name': cluster['name'],
      'status': 'CREATED',
      'computes': [cluster]
    } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all')
  ])

  if interface == 'hive' or interface == 'impala' or interface == 'report':
    # From Altus SDX
    if [cluster for cluster in clusters if 'altus' in cluster['type']]:
      # Note: attaching computes to namespaces might be done via the frontend in the future
      if interface == 'impala':
        if IS_K8S_ONLY.get():
          adb_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters']
        else:
          adb_clusters = AnalyticDbApi(request.user).list_clusters()['clusters']
        for _cluster in adb_clusters: # Add "fake" namespace if needed
          if not _cluster.get('namespaceCrn'):
            _cluster['namespaceCrn'] = _cluster['crn']
            _cluster['id'] = _cluster['crn']
            _cluster['namespaceName'] = _cluster['clusterName']
            _cluster['name'] = _cluster['clusterName']
            _cluster['compute_end_point'] = '%(publicHost)s' % _cluster['coordinatorEndpoint'] if IS_K8S_ONLY.get() else '',
      else:
        adb_clusters = []

      if IS_K8S_ONLY.get():
        sdx_namespaces = []
      else:
        sdx_namespaces = SdxApi(request.user).list_namespaces()

      # Adding "fake" namespace for cluster without one
      sdx_namespaces.extend([_cluster for _cluster in adb_clusters if not _cluster.get('namespaceCrn') or (IS_K8S_ONLY.get() and 'TERMINAT' not in _cluster['status'])])

      namespaces.extend([{
          'id': namespace.get('crn', 'None'),
          'name': namespace.get('namespaceName'),
          'status': namespace.get('status'),
          'computes': [_cluster for _cluster in adb_clusters if _cluster.get('namespaceCrn') == namespace.get('crn')]
        } for namespace in sdx_namespaces if namespace.get('status') == 'CREATED' or IS_K8S_ONLY.get()
      ])

  response[interface] = namespaces
  response['status'] = 0

  return JsonResponse(response)
Ejemplo n.º 3
0
def get_context_computes(request, interface):
  response = {}
  computes = []

  clusters = get_clusters(request.user).values()
  has_altus_clusters = [cluster for cluster in clusters if 'altus' in cluster['type']]

  computes.extend([{
      'id': cluster['id'],
      'name': cluster['name'],
      'namespace': cluster['id'],
      'interface': interface,
      'type': cluster['type']
    } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all')
  ])

  if has_altus_clusters:
    if interface == 'impala' or interface == 'report':
      if IS_K8S_ONLY.get():
        dw_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters']
      else:
        dw_clusters = AnalyticDbApi(request.user).list_clusters()['clusters']

      computes.extend([{
          'id': cluster.get('crn'),
          'name': cluster.get('clusterName'),
          'status': cluster.get('status'),
          'namespace': cluster.get('namespaceCrn', cluster.get('crn')),
          'compute_end_point': IS_K8S_ONLY.get() and '%(publicHost)s' % cluster['coordinatorEndpoint'] or '',
          'type': 'altus-dw'
        } for cluster in dw_clusters if (cluster.get('status') == 'CREATED' and cluster.get('cdhVersion') >= 'CDH515') or (IS_K8S_ONLY.get() and 'TERMINAT' not in cluster['status'])]
      )

    if interface == 'oozie' or interface == 'spark2':
      computes.extend([{
          'id': cluster.get('crn'),
          'name': cluster.get('clusterName'),
          'status': cluster.get('status'),
          'environmentType': cluster.get('environmentType'),
          'serviceType': cluster.get('serviceType'),
          'namespace': cluster.get('namespaceCrn'),
          'type': 'altus-de'
        } for cluster in DataEngApi(request.user).list_clusters()['clusters']]
      )
      # TODO if interface == 'spark2' keep only SPARK type

  response[interface] = computes
  response['status'] = 0

  return JsonResponse(response)
Ejemplo n.º 4
0
def get_context_computes(request, interface):
    '''
  Some clusters like Snowball can have multiple computes for a certain languages (Hive, Impala...).
  '''
    response = {}
    computes = []

    clusters = get_clusters(request.user).values()
    has_altus_clusters = [
        cluster for cluster in clusters
        if 'altus' in cluster['type'] or 'snowball' in cluster['type']
    ]

    if has_altus_clusters:
        if interface == 'impala' or interface == 'report':
            if IS_K8S_ONLY.get():
                dw_clusters = DataWarehouse2Api(
                    request.user).list_clusters()['clusters']
                computes.extend([{
                    'id':
                    cluster.get('crn'),
                    'name':
                    cluster.get('clusterName'),
                    'status':
                    cluster.get('status'),
                    'namespace':
                    cluster.get('namespaceCrn', cluster.get('crn')),
                    'compute_end_point':
                    IS_K8S_ONLY.get()
                    and '%(publicHost)s' % cluster['coordinatorEndpoint']
                    or '',
                    'type':
                    'altus-dw'
                } for cluster in dw_clusters])

    response[interface] = computes
    response['status'] = 0

    return JsonResponse(response)
Ejemplo n.º 5
0
def create_session(request):
    response = {'status': -1}

    notebook = json.loads(request.POST.get('notebook', '{}'))
    session = json.loads(request.POST.get('session', '{}'))

    if IS_K8S_ONLY.get(
    ):  # TODO: create session happening asynchronously on cluster selection when opening Notebook
        return JsonResponse({
            "status": 0,
            "session": {
                "reuse_session":
                True,
                "type":
                session['type'],
                "properties": [{
                    "nice_name":
                    "Settings",
                    "multiple":
                    True,
                    "key":
                    "settings",
                    "help_text":
                    "Hive and Hadoop configuration properties.",
                    "defaultValue": [],
                    "type":
                    "settings",
                    "options": [
                        "hive.map.aggr", "hive.exec.compress.output",
                        "hive.exec.parallel", "hive.execution.engine",
                        "mapreduce.job.queuename"
                    ],
                    "value": []
                }]
            }
        })

    properties = session.get('properties', [])

    response['session'] = get_api(request, session).create_session(
        lang=session['type'], properties=properties)
    response['status'] = 0

    return JsonResponse(response)