def close_statement(*args, **kwargs): notebook = args[0] snippet = args[1] result = download_to_file.AsyncResult(notebook['uuid']) state = result.state status = 0 if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(state) < states.state( 'PROGRESS'): status = -1 elif state in states.EXCEPTION_STATES: status = -1 if status == 0: info = result.info snippet['result']['handle'] = info.get('handle', {}).copy() close_statement_async.apply_async( args=args, kwargs=kwargs, task_id=_close_statement_async_id(notebook)) result.forget() _cleanup(notebook) return {'status': status}
def test_lt(self): self.assertLess(states.PENDING, states.SUCCESS) self.assertLess(states.RECEIVED, states.FAILURE) self.assertLess(states.STARTED, states.REVOKED) self.assertLess(states.state("CRASHED"), states.SUCCESS) self.assertLess(states.state("CRASHED"), states.FAILURE) self.assertTrue(states.REVOKED < states.state("CRASHED"))
def get_jobs( notebook, snippet, logs, **kwargs ): #Re implement to fetch updated guid in download_to_file from DB result = download_to_file.AsyncResult(notebook['uuid']) state = result.state if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(state) < states.state( 'PROGRESS'): return [] elif state in states.EXCEPTION_STATES: result.maybe_reraise() return [] info = result.info snippet['result']['handle'] = info.get('handle', {}).copy() request = _get_request(**kwargs) api = get_api(request, snippet) #insiduous problem where each call in hive api transform the guid/secret to binary form. get_log does the transform, but not get_jobs. get_jobs called after get_log so usually not an issue. Our get_log implementation doesn't if hasattr( api, '_get_handle' ): # This is specific to impala, should be handled in hiveserver2 api._get_handle(snippet) return api.get_jobs(notebook, snippet, logs)
def get_log(notebook, snippet, startFrom=None, size=None, postdict=None, user_id=None): result = download_to_file.AsyncResult(notebook['uuid']) state = result.state if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(state) < states.state( 'PROGRESS'): return '' elif state in states.EXCEPTION_STATES: return '' if TASK_SERVER.RESULT_CACHE.get(): return '' else: if not startFrom: with storage.open(_log_key(notebook), 'r') as f: return f.read() else: count = 0 output = string_io() with storage.open(_log_key(notebook), 'r') as f: for line in f: count += 1 if count <= startFrom: continue output.write(line) return output.getvalue()
def wait_for_command(request, app_name, task_id, after): res = AsyncResult(task_id) if app_name != '_': app = models.App.objects.get(name=app_name) task, created = models.TaskLog.objects.get_or_create( task_id=task_id, defaults={ 'app': app, 'when': datetime.now() }) description = task.description else: description = "" if res.state == state(SUCCESS): return redirect( reverse(after, kwargs={ 'app_name': app_name, 'task_id': task_id })) log = ansi_escape.sub("", get_log(res)) if res.state == state(FAILURE): log += str(res.traceback) return render( request, 'command_wait.html', { 'app': app_name, 'task_id': task_id, 'log': log, 'state': res.state, 'running': res.state in [state(PENDING), state(STARTED)], 'description': description })
def get_log(notebook, snippet, startFrom=None, size=None, postdict=None, user_id=None): result = download_to_file.AsyncResult(notebook['uuid']) state = result.state if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(state) < states.state( 'PROGRESS'): return '' elif state in states.EXCEPTION_STATES: result.maybe_reraise() return '' info = result.info if not startFrom: with open(info.get('log_path'), 'r') as f: return f.read() else: count = 0 data = '' with open(info.get('log_path'), 'r') as f: for line in f: count += 1 if count <= startFrom: continue data += line return data
def test_gt(self): self.assertGreater(states.SUCCESS, states.PENDING) self.assertGreater(states.FAILURE, states.RECEIVED) self.assertGreater(states.REVOKED, states.STARTED) self.assertGreater(states.SUCCESS, states.state("CRASHED")) self.assertGreater(states.FAILURE, states.state("CRASHED")) self.assertFalse(states.REVOKED > states.state("CRASHED"))
def update(self, state, timestamp, fields): if self.worker: self.worker.on_heartbeat(timestamp=timestamp) if states.state(state) < states.state(self.state): self.merge(state, timestamp, fields) else: self.state = state self.timestamp = timestamp super(Task, self).update(fields)
def check_letsencrypt(request, app_name, task_id): res = AsyncResult(task_id) log = get_log(res) if log.find("Certificate retrieved successfully") != -1: clear_cache("letsencrypt:ls") return redirect(reverse('app_info', args=[app_name])) else: return render(request, 'command_logs.html', {'app': app_name, 'task_id': task_id, 'log': log, 'state': res.state, 'running': res.state in [state(PENDING), state(STARTED)]})
def update(self, state, timestamp, fields): if self.worker: self.worker.on_heartbeat(timestamp=timestamp) if state != states.RETRY and self.state != states.RETRY and \ states.state(state) < states.state(self.state): self.merge(state, timestamp, fields) else: self.state = state self.timestamp = timestamp super(Task, self).update(fields)
def after_return(self, status, retval, task_id, args, kwargs, einfo): """ Ensures status is updated after task completion, otherwise our signals may not fire """ # we assume that if the state is past starting, that this has been handled if states.state(status) > states.state(states.STARTED): return # mark the completion state = states.SUCCESS if einfo or isinstance(retval, Exception): state = states.FAILURE self.update_state(state=state, meta=retval)
def fetch_result_size(*args, **kwargs): notebook = args[0] result = download_to_file.AsyncResult(notebook['uuid']) state = result.state if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(result.state) < states.state('PROGRESS'): return {'rows': 0} elif state in states.EXCEPTION_STATES: result.maybe_reraise() return {'rows': 0} info = result.info return {'rows': info.get('row_counter', 0)}
def progress(notebook, snippet, logs=None, **kwargs): result = download_to_file.AsyncResult(notebook['uuid']) state = result.state if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(state) < states.state( 'PROGRESS'): return 1 elif state in states.EXCEPTION_STATES: return 1 info = result.info snippet['result']['handle'] = info.get('handle', {}).copy() request = _get_request(**kwargs) api = get_api(request, snippet) return api.progress(notebook, snippet, logs=logs)
def get_jobs(notebook, snippet, logs, **kwargs): # Re implementation to fetch updated guid in download_to_file from DB result = download_to_file.AsyncResult(notebook['uuid']) state = result.state if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(state) < states.state('PROGRESS'): return [] elif state in states.EXCEPTION_STATES: return [] info = result.info snippet['result']['handle'] = info.get('handle', {}).copy() request = _get_request(**kwargs) api = get_api(request, snippet) return api.get_jobs(notebook, snippet, logs)
def update_state(self, state, task_id, defaults): with transaction.atomic(): obj, created = self.select_for_update_or_create( task_id=task_id, defaults=defaults, ) if created: return obj if states.state(state) < states.state(obj.state): keep = Task.merge_rules[states.RECEIVED] else: keep = {} for key, value in defaults.items(): if key not in keep: setattr(obj, key, value) obj.save(update_fields=tuple(defaults.keys())) return obj
def get_log(res): key = tasks.task_key(res.id) if res.state > state(PENDING): raw = redis.get(key) if raw == None: return "" return raw.decode('utf-8') else: return ""
def update(self, state, timestamp, fields): """Update state from new event. :param state: State from event. :param timestamp: Timestamp from event. :param fields: Event data. """ if self.worker: self.worker.on_heartbeat(timestamp=timestamp) if state != states.RETRY and self.state != states.RETRY and \ states.state(state) < states.state(self.state): # this state logically happens-before the current state, so merge. self.merge(state, timestamp, fields) else: self.state = state self.timestamp = timestamp super(Task, self).update(fields)
def update_task(self, state, **kwargs): objects = self.TaskState.objects defaults = kwargs.pop('defaults', None) or {} if not defaults.get('name'): return obj, created = objects.get_or_create(defaults=defaults, **kwargs) if created: return obj else: if states.state(state) < states.state(obj.state): keep = Task.merge_rules[states.RECEIVED] defaults = dict( (k, v) for k, v in defaults.items() if k not in keep) for k, v in defaults.items(): setattr(obj, k, v) obj.save() return obj
def fetch_result(notebook, snippet, rows, start_over, **kwargs): result = download_to_file.AsyncResult(notebook['uuid']) state = result.state data = [] cols = [] results = {'has_more': False, 'data': data, 'meta': cols, 'type': 'table'} if state == states.PENDING: raise QueryExpired() elif state in states.EXCEPTION_STATES: result.maybe_reraise() return results elif state not in [states.SUCCESS, 'AVAILABLE']: return results info = result.info skip = 0 if not start_over: with open(info.get('progress_path'), 'r') as f: skip = int(f.read()) target = skip + rows with open(info.get('file_path'), 'r') as f: csv_reader = csv.reader(f, delimiter=','.encode('utf-8')) first = next(csv_reader) for col in first: split = col.split('|') if len(split) > 1: cols.append({ 'name': split[0], 'type': split[1], 'comment': None }) else: cols.append({ 'name': split[0], 'type': 'STRING_TYPE', 'comment': None }) count = 0 for row in csv_reader: count += 1 if count <= skip: continue data.append(row) if count >= target: break with open(info.get('progress_path'), 'w') as f: f.write(str(count)) results['has_more'] = count < info.get( 'row_counter') or state == states.state('PROGRESS') return results
def update_task(self, state, **kwargs): objects = self.TaskState.objects defaults = kwargs.pop("defaults", None) or {} try: obj = objects.get(**kwargs) except ObjectDoesNotExist: if not defaults.get("name"): return obj, created = objects.get_or_create(defaults=defaults, **kwargs) return obj else: if states.state(state) < states.state(obj.state): keep = Task.merge_rules[states.RECEIVED] defaults = dict((k, v) for k, v in defaults.items() if k not in keep) for k, v in defaults.items(): setattr(obj, k, v) obj.save() return obj
def close_statement(*args, **kwargs): notebook = args[0] snippet = args[1] result = download_to_file.AsyncResult(notebook['uuid']) state = result.state if state == states.PENDING: raise QueryExpired() elif state == 'SUBMITTED' or states.state(result.state) < states.state('PROGRESS'): return {'status': -1} elif state in states.EXCEPTION_STATES: result.maybe_reraise() return {'status': -1} info = result.info snippet['result']['handle'] = info.get('handle', {}) close_statement_async.apply_async(args=args, kwargs=kwargs, task_id=_close_statement_async_id(notebook)) result.forget() os.remove(info.get('file_path')) os.remove(info.get('log_path')) os.remove(info.get('progress_path')) return {'status': 0}
def fetch_result(notebook, snippet, rows, start_over, **kwargs): task_id = _get_query_key(notebook, snippet) result = download_to_file.AsyncResult(task_id) state = result.state data = [] cols = [] results = {'has_more': False, 'data': data, 'meta': cols, 'type': 'table'} if state == states.PENDING: raise QueryExpired() elif state in states.EXCEPTION_STATES: result.maybe_reraise() return results elif state not in [states.SUCCESS, 'AVAILABLE']: return results info = result.info skip = 0 if not start_over: skip = caches[CACHES_CELERY_KEY].get(_fetch_progress_key( notebook, snippet), default=0) target = skip + rows if info.get('handle', {}).get('has_result_set', False): csv.field_size_limit(sys.maxsize) count = 0 headers, csv_reader = _get_data(task_id) for col in headers: split = col.split('|') split_type = split[1] if len(split) > 1 else 'STRING_TYPE' cols.append({ 'name': split[0], 'type': split_type, 'comment': None }) for row in csv_reader: count += 1 if count <= skip: # TODO: seek(skip) or [skip:] continue data.append(row) if count >= target: break caches[CACHES_CELERY_KEY].set(_fetch_progress_key(notebook, snippet), count, timeout=None) results['has_more'] = count < info.get( 'row_counter') or state == states.state('PROGRESS') return results
def update_task(self, state, **kwargs): objects = self.TaskState.objects defaults = kwargs.pop("defaults", None) or {} try: obj = objects.get(**kwargs) except ObjectDoesNotExist: if not defaults.get("name"): return obj, created = objects.get_or_create(defaults=defaults, **kwargs) return obj else: if states.state(state) < states.state(obj.state): keep = Task.merge_rules[states.RECEIVED] defaults = dict( (k, v) for k, v in defaults.items() if k not in keep) for k, v in defaults.items(): setattr(obj, k, v) obj.save() return obj
def update_task(self, state, **kwargs): objects = self.TaskState.objects defaults = kwargs.pop('defaults', None) or {} if not defaults.get('name'): return obj, created = objects.get_or_create(defaults=defaults, **kwargs) if created: return obj else: if states.state(state) < states.state(obj.state): keep = Task.merge_rules[states.RECEIVED] defaults = dict( (k, v) for k, v in defaults.items() if k not in keep ) for k, v in defaults.items(): setattr(obj, k, v) obj.save() return obj
def fetch_result(notebook, snippet, rows, start_over, **kwargs): result = download_to_file.AsyncResult(notebook['uuid']) state = result.state data = [] cols = [] results = {'has_more': False, 'data': data, 'meta': cols, 'type': 'table'} if state == states.PENDING: raise QueryExpired() elif state in states.EXCEPTION_STATES: result.maybe_reraise() return results elif state not in [states.SUCCESS, 'AVAILABLE']: return results info = result.info skip = 0 if not start_over: skip = caches[CACHES_CELERY_KEY].get(_fetch_progress_key(notebook), default=0) target = skip + rows if info.get('handle', {}).get('has_result_set', False): csv.field_size_limit(sys.maxsize) count = 0 with storage.open(_result_key(notebook)) as f: csv_reader = csv.reader(f, delimiter=','.encode('utf-8')) first = next(csv_reader, None) if first: # else no data to read for col in first: split = col.split('|') split_type = split[1] if len(split) > 1 else 'STRING_TYPE' cols.append({ 'name': split[0], 'type': split_type, 'comment': None }) for row in csv_reader: count += 1 if count <= skip: continue data.append(row) if count >= target: break caches[CACHES_CELERY_KEY].set(_fetch_progress_key(notebook), count, timeout=None) results['has_more'] = count < info.get( 'row_counter') or state == states.state('PROGRESS') return results
def update_task(self, state, **kwargs): objects = self.TaskState.objects defaults = kwargs.pop('defaults', None) or {} if not defaults.get('name'): return obj, created = objects.get_or_create(defaults=defaults, **kwargs) if created: return obj else: if states.state(state) < states.state(obj.state): keep = Task.merge_rules[states.RECEIVED] defaults = dict( (k, v) for k, v in defaults.items() if k not in keep) for k, v in defaults.items(): setattr(obj, k, v) for datefield in ('eta', 'expires', 'tstamp'): # Brute force trying to fix #183 setattr(obj, datefield, maybe_make_aware(getattr(obj, datefield))) obj.save() return obj
def show_log(request, task_id): res = AsyncResult(task_id) task = models.TaskLog.objects.get(task_id=task_id) log = ansi_escape.sub("", get_log(res)) if res.state == state(FAILURE): log += str(res.traceback) return render(request, 'command_wait.html', { 'app': task.app.name, 'task_id': task_id, 'log': log, 'state': res.state, 'running': False, 'description': task.description})
def update_task(self, state, **kwargs): objects = self.TaskState.objects defaults = kwargs.pop('defaults', None) or {} if not defaults.get('name'): return obj, created = objects.get_or_create(defaults=defaults, **kwargs) if created: return obj else: if states.state(state) < states.state(obj.state): keep = Task.merge_rules[states.RECEIVED] defaults = dict( (k, v) for k, v in defaults.items() if k not in keep ) for k, v in defaults.items(): setattr(obj, k, v) for datefield in ('eta', 'expires', 'tstamp'): # Brute force trying to fix #183 setattr(obj, datefield, maybe_make_aware(getattr(obj, datefield))) obj.save() return obj
def update_tasklist(ids, tid): res = AsyncResult(id=tid) if res.ready(): print(res.status) try: Tasks.objects.filter(id=ids).update( task_id=tid, task_status=res.status, tasktime=datetime.datetime.now()) return res.status except Exception as e: print(e) Tasks.objects.filter(id=ids).update(task_id=tid, task_status=state('任务与发送成功'), tasktime=datetime.datetime.now()) return (res.ready())
def test_lte(self, r, l): assert states.state(r) <= states.state(l)
def test_gte(self, r, l): assert states.state(r) >= states.state(l)
def test_gt(self): self.assertGreater(state(states.SUCCESS), state(states.PENDING)) self.assertGreater(state(states.FAILURE), state(states.RECEIVED)) self.assertGreater(state(states.REVOKED), state(states.STARTED)) self.assertGreater(state(states.SUCCESS), state("CRASHED")) self.assertGreater(state(states.FAILURE), state("CRASHED")) self.assertFalse(state(states.REVOKED) > state("CRASHED"))
def test_lt(self): self.assertLess(state(states.PENDING), state(states.SUCCESS)) self.assertLess(state(states.RECEIVED), state(states.FAILURE)) self.assertLess(state(states.STARTED), state(states.REVOKED)) self.assertLess(state("CRASHED"), state(states.SUCCESS)) self.assertLess(state("CRASHED"), state(states.FAILURE)) self.assertTrue(state(states.REVOKED) < state("CRASHED")) self.assertTrue(state(states.REVOKED) <= state("CRASHED")) self.assertTrue(state("CRASHED") >= state(states.REVOKED))
def progress_updater(size, total): """Progress reporter for checksum verification.""" current_task.update_state(state=state('PROGRESS'), meta=dict(size=size, total=total))
def progress_updater(size, total): """Progress reporter for checksum verification.""" current_task.update_state( state=state('PROGRESS'), meta=dict(size=size, total=total) )
def test_gt(self): self.assertGreater( states.state(states.SUCCESS), states.state(states.PENDING), ) self.assertGreater( states.state(states.FAILURE), states.state(states.RECEIVED), ) self.assertGreater( states.state(states.REVOKED), states.state(states.STARTED), ) self.assertGreater( states.state(states.SUCCESS), states.state('CRASHED'), ) self.assertGreater( states.state(states.FAILURE), states.state('CRASHED'), ) self.assertLessEqual( states.state(states.REVOKED), states.state('CRASHED'), )
def test_lt(self): self.assertLess(state(states.PENDING), state(states.SUCCESS)) self.assertLess(state(states.RECEIVED), state(states.FAILURE)) self.assertLess(state(states.STARTED), state(states.REVOKED)) self.assertLess(state('CRASHED'), state(states.SUCCESS)) self.assertLess(state('CRASHED'), state(states.FAILURE)) self.assertTrue(state(states.REVOKED) < state('CRASHED')) self.assertTrue(state(states.REVOKED) <= state('CRASHED')) self.assertTrue(state('CRASHED') >= state(states.REVOKED))
def test_gt(self): self.assertGreater(state(states.SUCCESS), state(states.PENDING)) self.assertGreater(state(states.FAILURE), state(states.RECEIVED)) self.assertGreater(state(states.REVOKED), state(states.STARTED)) self.assertGreater(state(states.SUCCESS), state('CRASHED')) self.assertGreater(state(states.FAILURE), state('CRASHED')) self.assertFalse(state(states.REVOKED) > state('CRASHED'))