Esempio n. 1
0
def create_cluster(request):
    response = {'status': -1}

    is_k8 = request.POST.get('is_k8') == 'true'
    cluster_name = request.POST.get('cluster_name') or 'Analytic Cluster'
    cdh_version = request.POST.get('cdh_version')
    public_key = request.POST.get('public_key')
    instance_type = request.POST.get('instance_type', "workers_group_size" '')
    environment_name = request.POST.get('environment_name')
    workers_group_size = int(request.POST.get('workers_group_size', '3'))
    namespace_name = request.POST.get('namespace_name', 'null')

    api = DataWarehouse2Api(request.user) if is_k8 else AnalyticDbApi(
        request.user)
    data = api.create_cluster(cloud_provider='aws',
                              cluster_name=cluster_name,
                              cdh_version=cdh_version,
                              public_key=public_key,
                              instance_type=instance_type,
                              environment_name=environment_name,
                              namespace_name=namespace_name,
                              workers_group_size=workers_group_size)

    if data:
        response['status'] = 0
        response['data'] = data
    else:
        response['message'] = 'Data Warehouse API: %s' % data['details']

    return JsonResponse(response)
Esempio n. 2
0
def get_context_namespaces(request, interface):
  '''
  Namespaces are node cluster contexts (e.g. Hive + Ranger) that can be queried by computes.
  '''
  response = {}
  namespaces = []

  clusters = list(get_clusters(request.user).values())

  # Currently broken if not sent
  namespaces.extend([{
      'id': cluster['id'],
      'name': cluster['name'],
      'status': 'CREATED',
      'computes': [cluster]
    } for cluster in clusters if cluster.get('type') == 'direct'
  ])

  if interface == 'hive' or interface == 'impala' or interface == 'report':
    if get_cluster_config(request.user)['has_computes']:
      # Note: attaching computes to namespaces might be done via the frontend in the future
      if interface == 'impala':
        if IS_K8S_ONLY.get():
          adb_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters']
        else:
          adb_clusters = AnalyticDbApi(request.user).list_clusters()['clusters']
        for _cluster in adb_clusters: # Add "fake" namespace if needed
          if not _cluster.get('namespaceCrn'):
            _cluster['namespaceCrn'] = _cluster['crn']
            _cluster['id'] = _cluster['crn']
            _cluster['namespaceName'] = _cluster['clusterName']
            _cluster['name'] = _cluster['clusterName']
            _cluster['compute_end_point'] = '%(publicHost)s' % _cluster['coordinatorEndpoint'] if IS_K8S_ONLY.get() else '',
      else:
        adb_clusters = []

      if IS_K8S_ONLY.get():
        sdx_namespaces = []
      else:
        sdx_namespaces = SdxApi(request.user).list_namespaces()

      # Adding "fake" namespace for cluster without one
      sdx_namespaces.extend([_cluster for _cluster in adb_clusters if not _cluster.get('namespaceCrn') or (IS_K8S_ONLY.get() and 'TERMINAT' not in _cluster['status'])])

      namespaces.extend([{
          'id': namespace.get('crn', 'None'),
          'name': namespace.get('namespaceName'),
          'status': namespace.get('status'),
          'computes': [_cluster for _cluster in adb_clusters if _cluster.get('namespaceCrn') == namespace.get('crn')]
        } for namespace in sdx_namespaces if namespace.get('status') == 'CREATED' or IS_K8S_ONLY.get()
      ])

  response[interface] = namespaces
  response['status'] = 0

  return JsonResponse(response)
Esempio n. 3
0
def get_context_namespaces(request, interface):
  response = {}
  namespaces = []

  clusters = get_clusters(request.user).values()

  namespaces.extend([{
      'id': cluster['id'],
      'name': cluster['name'],
      'status': 'CREATED',
      'computes': [cluster]
    } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all')
  ])

  if interface == 'hive' or interface == 'impala' or interface == 'report':
    # From Altus SDX
    if [cluster for cluster in clusters if 'altus' in cluster['type']]:
      # Note: attaching computes to namespaces might be done via the frontend in the future
      if interface == 'impala':
        if IS_K8S_ONLY.get():
          adb_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters']
        else:
          adb_clusters = AnalyticDbApi(request.user).list_clusters()['clusters']
        for _cluster in adb_clusters: # Add "fake" namespace if needed
          if not _cluster.get('namespaceCrn'):
            _cluster['namespaceCrn'] = _cluster['crn']
            _cluster['id'] = _cluster['crn']
            _cluster['namespaceName'] = _cluster['clusterName']
            _cluster['name'] = _cluster['clusterName']
            _cluster['compute_end_point'] = '%(publicHost)s' % _cluster['coordinatorEndpoint'] if IS_K8S_ONLY.get() else '',
      else:
        adb_clusters = []

      if IS_K8S_ONLY.get():
        sdx_namespaces = []
      else:
        sdx_namespaces = SdxApi(request.user).list_namespaces()

      # Adding "fake" namespace for cluster without one
      sdx_namespaces.extend([_cluster for _cluster in adb_clusters if not _cluster.get('namespaceCrn') or (IS_K8S_ONLY.get() and 'TERMINAT' not in _cluster['status'])])

      namespaces.extend([{
          'id': namespace.get('crn', 'None'),
          'name': namespace.get('namespaceName'),
          'status': namespace.get('status'),
          'computes': [_cluster for _cluster in adb_clusters if _cluster.get('namespaceCrn') == namespace.get('crn')]
        } for namespace in sdx_namespaces if namespace.get('status') == 'CREATED' or IS_K8S_ONLY.get()
      ])

  response[interface] = namespaces
  response['status'] = 0

  return JsonResponse(response)
Esempio n. 4
0
def get_context_computes(request, interface):
  response = {}
  computes = []

  clusters = get_clusters(request.user).values()
  has_altus_clusters = [cluster for cluster in clusters if 'altus' in cluster['type']]

  computes.extend([{
      'id': cluster['id'],
      'name': cluster['name'],
      'namespace': cluster['id'],
      'interface': interface,
      'type': cluster['type']
    } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all')
  ])

  if has_altus_clusters:
    if interface == 'impala' or interface == 'report':
      if IS_K8S_ONLY.get():
        dw_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters']
      else:
        dw_clusters = AnalyticDbApi(request.user).list_clusters()['clusters']

      computes.extend([{
          'id': cluster.get('crn'),
          'name': cluster.get('clusterName'),
          'status': cluster.get('status'),
          'namespace': cluster.get('namespaceCrn', cluster.get('crn')),
          'compute_end_point': IS_K8S_ONLY.get() and '%(publicHost)s' % cluster['coordinatorEndpoint'] or '',
          'type': 'altus-dw'
        } for cluster in dw_clusters if (cluster.get('status') == 'CREATED' and cluster.get('cdhVersion') >= 'CDH515') or (IS_K8S_ONLY.get() and 'TERMINAT' not in cluster['status'])]
      )

    if interface == 'oozie' or interface == 'spark2':
      computes.extend([{
          'id': cluster.get('crn'),
          'name': cluster.get('clusterName'),
          'status': cluster.get('status'),
          'environmentType': cluster.get('environmentType'),
          'serviceType': cluster.get('serviceType'),
          'namespace': cluster.get('namespaceCrn'),
          'type': 'altus-de'
        } for cluster in DataEngApi(request.user).list_clusters()['clusters']]
      )
      # TODO if interface == 'spark2' keep only SPARK type

  response[interface] = computes
  response['status'] = 0

  return JsonResponse(response)
Esempio n. 5
0
def update_cluster(request):
    response = {'status': -1}

    cluster_name = request.POST.get('cluster_name') or 'Analytic Cluster'
    workers_group_size = int(request.POST.get('workers_group_size', '3'))

    api = DataWarehouse2Api(request.user)
    data = api.update_cluster(cluster_name=cluster_name,
                              workers_group_size=workers_group_size)

    if data:
        response['status'] = 0
        response['data'] = data
    else:
        response['message'] = 'Data Warehouse API: %s' % data['details']

    return JsonResponse(response)
Esempio n. 6
0
def get_context_computes(request, interface):
    '''
  Some clusters like Snowball can have multiple computes for a certain languages (Hive, Impala...).
  '''
    response = {}
    computes = []

    clusters = get_clusters(request.user).values()
    has_altus_clusters = [
        cluster for cluster in clusters
        if 'altus' in cluster['type'] or 'snowball' in cluster['type']
    ]

    if has_altus_clusters:
        if interface == 'impala' or interface == 'report':
            if IS_K8S_ONLY.get():
                dw_clusters = DataWarehouse2Api(
                    request.user).list_clusters()['clusters']
                computes.extend([{
                    'id':
                    cluster.get('crn'),
                    'name':
                    cluster.get('clusterName'),
                    'status':
                    cluster.get('status'),
                    'namespace':
                    cluster.get('namespaceCrn', cluster.get('crn')),
                    'compute_end_point':
                    IS_K8S_ONLY.get()
                    and '%(publicHost)s' % cluster['coordinatorEndpoint']
                    or '',
                    'type':
                    'altus-dw'
                } for cluster in dw_clusters])

    response[interface] = computes
    response['status'] = 0

    return JsonResponse(response)
Esempio n. 7
0
def update_cluster(request):
    response = {'status': -1}

    cluster_name = request.POST.get('cluster_name') or 'Analytic Cluster'
    auto_resize_changed = request.POST.get('auto_resize_changed') == 'true'

    params = {
        'clusterName': cluster_name,
        'updateClusterAutoResizeChanged': auto_resize_changed
    }

    if auto_resize_changed:
        updateClusterAutoResize = request.POST.get(
            'auto_resize_enabled') == 'true'
        params['updateClusterAutoResize'] = updateClusterAutoResize
        if updateClusterAutoResize:
            params['updateClusterAutoResizeMax'] = int(
                request.POST.get('auto_resize_max'))
            if request.POST.get('auto_resize_min'):
                params['updateClusterAutoResizeMin'] = int(
                    request.POST.get('auto_resize_min'))
            if request.POST.get('auto_resize_cpu'):
                params['updateClusterAutoResizeCpu'] = int(
                    request.POST.get('auto_resize_cpu'))
    else:
        params['workerReplicas'] = int(
            request.POST.get('workers_group_size', '3'))

    api = DataWarehouse2Api(request.user)
    data = api.update_cluster(**params)

    if data:
        response['status'] = 0
        response['data'] = data
    else:
        response['message'] = 'Data Warehouse API: %s' % data['details']

    return JsonResponse(response)
Esempio n. 8
0
File: clusters.py Progetto: zgfh/hue
  def __init__(self, user, version=1):
    super(ClusterApi, self).__init__(user)

    self.version = version
    self.api = DataWarehouse2Api(self.user) 
Esempio n. 9
0
    def __init__(self, user, version=1):
        super(DataWarehouseClusterApi, self).__init__(user)

        self.version = version
        self.api = DataWarehouse2Api(
            self.user) if version == 2 else AnalyticDbApi(self.user)