def create_cluster(request): response = {'status': -1} cluster_name = request.POST.get('cluster_name') cdh_version = request.POST.get('cdh_version') public_key = request.POST.get('public_key') instance_type = request.POST.get('instance_type', "workers_group_size"'') environment_name = request.POST.get('environment_name') workers_group_size = request.POST.get('workers_group_size', '3') namespace_name = request.POST.get('namespace_name', 'null') api = DataEngApi(request.user) data = api.create_cluster( cloud_provider='aws', cluster_name=cluster_name, cdh_version=cdh_version, public_key=public_key, instance_type=instance_type, environment_name=environment_name, namespace_name=namespace_name, workers_group_size=workers_group_size ) if data: response['status'] = 0 response['data'] = data else: response['message'] = 'Data Engineering: %s' % data['details'] return JsonResponse(response)
def apps(self, filters): kwargs = {} if 'time' in filters: if filters['time']['time_unit'] == 'minutes': delta = timedelta(minutes=int(filters['time']['time_value'])) elif filters['time']['time_unit'] == 'hours': delta = timedelta(hours=int(filters['time']['time_value'])) else: delta = timedelta(days=int(filters['time']['time_value'])) kwargs['creation_date_after'] = (datetime.today() - delta).strftime(DATE_FORMAT) # Todo: filter on 'cluster_crn' api = DataEngApi(self.user) jobs = api.list_jobs(**kwargs) return { 'apps': [{ 'id': app['jobId'], 'name': app['creationDate'], 'status': app['status'], 'apiStatus': self._api_status(app['status']), 'type': app['jobType'], 'user': '', 'progress': 100, 'duration': 10 * 3600, 'submitted': app['creationDate'] } for app in jobs['jobs']], 'total': len(jobs) }
def apps(self, filters): kwargs = {} if 'time' in filters: if filters['time']['time_unit'] == 'minutes': delta = timedelta(minutes=int(filters['time']['time_value'])) elif filters['time']['time_unit'] == 'hours': delta = timedelta(hours=int(filters['time']['time_value'])) else: delta = timedelta(days=int(filters['time']['time_value'])) kwargs['creation_date_after'] = (datetime.today() - delta).strftime(DATE_FORMAT) # Todo: filter on 'cluster_crn' api = DataEngApi(self.user) jobs = api.list_jobs(**kwargs) return { 'apps': [{ 'id': app['jobId'], 'name': app['jobName'], 'status': app['status'], 'apiStatus': self._api_status(app['status']), 'type': 'Altus %(jobType)s' % app, 'user': '', 'progress': 50 if self._api_status(app['status']) == 'RUNNING' else 100, 'duration': 10 * 3600, 'submitted': app['creationDate'], 'canWrite': True } for app in jobs['jobs']], 'total': len(jobs) }
def apps(self, filters): api = DataEngApi(self.user) jobs = api.list_clusters() return { 'apps': [{ 'id': app['crn'], 'name': '%(clusterName)s' % app, 'status': app['status'], 'apiStatus': self._api_status(app['status']), 'type': '%(serviceType)s %(workersGroupSize)s %(instanceType)s %(cdhVersion)s' % app, 'user': app['clusterName'].split('-', 1)[0], 'progress': 100, 'queue': 'group', 'duration': 1, 'submitted': app['creationDate'] } for app in jobs['clusters']], 'total': len(jobs) }
def action(self, appid, action): message = {'message': '', 'status': 0} if action.get('action') == 'kill': api = DataEngApi(self.user) for _id in appid: result = api.delete_cluster(_id) if result.get('error'): message['message'] = result.get('error') message['status'] = -1 elif result.get('contents') and message.get('status') != -1: message['message'] = result.get('contents') return message
def action(self, appid, action): message = {'message': '', 'status': 0} if action.get('action') == 'kill': api = DataEngApi(self.user) for _id in appid: result = api.delete_cluster(_id) if result.get('error'): message['message'] = result.get('error') message['status'] = -1 elif result.get('contents') and message.get('status') != -1: message['message'] = result.get('contents') return message;
def apps(self, filters): api = DataEngApi(self.user) jobs = api.list_clusters() return { 'apps': [{ 'id': app['crn'], 'name': '%(clusterName)s' % app, 'status': app['status'], 'apiStatus': self._api_status(app['status']), 'type': 'Altus %(serviceType)s %(workersGroupSize)s %(instanceType)s %(cdhVersion)s' % app, 'user': app['clusterName'].split('-', 1)[0], 'progress': 100, 'queue': 'group', 'duration': 1, 'submitted': app['creationDate'], 'canWrite': True } for app in sorted(jobs['clusters'], key=lambda a: a['creationDate'], reverse=True)], 'total': len(jobs) }
def cancel(self, notebook, snippet): if snippet['result']['handle'].get('id'): job_id = snippet['result']['handle']['id'] DataEngApi(self.user).terminate_job(job_id=job_id) response = {'status': 0} else: response = { 'status': -1, 'message': _('Could not cancel because of unsuccessful submition.') } return response
def get_context_computes(request, interface): response = {} computes = [] clusters = get_clusters(request.user).values() if interface == 'hive' or interface == 'impala' or interface == 'oozie' or interface == 'jobs' or interface == 'report': computes.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'namespace': cluster['id'], 'interface': interface, 'type': cluster['type'] } for cluster in clusters if cluster.get('type') == 'direct']) if interface == 'impala' or interface == 'jobs' or interface == 'report': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'type': 'altus-adb' } for cluster in AnalyticDbApi(request.user).list_clusters() ['clusters'] if cluster.get('status') == 'CREATED']) if interface == 'oozie' or interface == 'jobs' or interface == 'spark2': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'environmentType': cluster.get('environmentType'), 'serviceType': cluster.get('serviceType'), 'namespace': cluster.get('namespaceCrn'), 'type': 'altus-de' } for cluster in DataEngApi(request.user).list_clusters() ['clusters']]) # TODO if interface == 'spark2' keep only SPARK type response[interface] = computes response['status'] = 0 return JsonResponse(response)
def get_context_computes(request, interface): response = {} computes = [] clusters = get_clusters(request.user).values() has_altus_clusters = [cluster for cluster in clusters if 'altus' in cluster['type']] computes.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'namespace': cluster['id'], 'interface': interface, 'type': cluster['type'] } for cluster in clusters if cluster.get('type') == 'direct' and cluster['interface'] in (interface, 'all') ]) if has_altus_clusters: if interface == 'impala' or interface == 'report': if IS_K8S_ONLY.get(): dw_clusters = DataWarehouse2Api(request.user).list_clusters()['clusters'] else: dw_clusters = AnalyticDbApi(request.user).list_clusters()['clusters'] computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'compute_end_point': IS_K8S_ONLY.get() and '%(publicHost)s' % cluster['coordinatorEndpoint'] or '', 'type': 'altus-dw' } for cluster in dw_clusters if (cluster.get('status') == 'CREATED' and cluster.get('cdhVersion') >= 'CDH515') or (IS_K8S_ONLY.get() and 'TERMINAT' not in cluster['status'])] ) if interface == 'oozie' or interface == 'spark2': computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'environmentType': cluster.get('environmentType'), 'serviceType': cluster.get('serviceType'), 'namespace': cluster.get('namespaceCrn'), 'type': 'altus-de' } for cluster in DataEngApi(request.user).list_clusters()['clusters']] ) # TODO if interface == 'spark2' keep only SPARK type response[interface] = computes response['status'] = 0 return JsonResponse(response)
def check_status(self, notebook, snippet): response = {'status': 'running'} job_id = snippet['result']['handle']['id'] handle = DataEngApi(self.user).list_jobs(job_ids=[job_id]) job = handle['jobs'][0] if job['status'] in RUNNING_STATES: return response elif job['status'] in ('failed', 'terminated'): raise QueryError(_('Job was %s') % job['status']) else: response['status'] = 'available' return response
def execute(self, notebook, snippet): statement = snippet['statement'] handle = DataEngApi(self.user).submit_hive_job(self.cluster_name, statement, params=None, job_xml=None) job = handle['jobs'][0] if job['status'] not in RUNNING_STATES: raise QueryError('Submission failure', handle=job['status']) return { 'id': job['jobId'], 'crn': job['crn'], 'has_result_set': False, }
def app(self, appid): handle = DataEngApi(self.user).describe_job(job_id=appid) job = handle['job'] common = { 'id': job['jobId'], 'name': job['jobId'], 'status': job['status'], 'apiStatus': self._api_status(job['status']), 'progress': 50, 'duration': 10 * 3600, 'submitted': job['creationDate'], 'type': 'dataeng-job-%s' % job['jobType'], } common['properties'] = {'properties': job} return common
def get_context_computes(request, interface): response = {} computes = [] clusters = get_clusters(request.user).values() if interface == 'hive' or interface == 'impala' or interface == 'oozie' or interface == 'report': computes.extend([{ 'id': cluster['id'], 'name': cluster['name'], 'namespace': cluster['id'], 'interface': interface, 'type': cluster['type'] } for cluster in clusters if cluster.get('type') == 'direct']) if interface == 'impala' or interface == 'report': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'namespace': cluster.get('namespaceCrn', cluster.get('crn')), 'type': 'altus-dw' } for cluster in AnalyticDbApi( request.user).list_clusters()['clusters'] if cluster.get('status') == 'CREATED' and cluster.get('cdhVersion') >= 'CDH515']) if interface == 'oozie' or interface == 'spark2': if [cluster for cluster in clusters if cluster['type'] == 'altus']: computes.extend([{ 'id': cluster.get('crn'), 'name': cluster.get('clusterName'), 'status': cluster.get('status'), 'environmentType': cluster.get('environmentType'), 'serviceType': cluster.get('serviceType'), 'namespace': cluster.get('namespaceCrn'), 'type': 'altus-de' } for cluster in DataEngApi(request.user).list_clusters() ['clusters']]) # TODO if interface == 'spark2' keep only SPARK type if interface == 'jobs': for cluster in clusters: cluster = { 'id': cluster.get('id'), 'name': cluster.get('name'), 'status': 'CREATED', 'environmentType': cluster.get('type'), 'serviceType': cluster.get('interface'), 'namespace': '', 'type': cluster.get('type') } if cluster.get('type') == 'altus': cluster['name'] = 'Altus DE' cluster['type'] = 'altus-de' computes.append(cluster) cluster = cluster.copy() cluster['name'] = 'Altus Data Warehouse' cluster['type'] = 'altus-dw' computes.append(cluster) response[interface] = computes response['status'] = 0 return JsonResponse(response)