def async_job(self, data): async_job_time = time.time() try: new_data_object = GenericDataObject(data.return_attribute_dict()) new_data_object.data = data.return_attribute_dict() data = '' new_data_object.source = self.source new_data_object.resource = self AxolTask = TaskEngine().run_task(new_data_object) except Exception, e: CommonLogger.log(e, 'axol_resource', 'async_job')
def async_job(self, data): async_job_time = time.time() try: new_data_object = GenericDataObject(data.return_attribute_dict()) new_data_object.data = data.return_attribute_dict() data = "" new_data_object.source = self.source new_data_object.resource = self AxolTask = TaskEngine().run_task(new_data_object) except Exception, e: CommonLogger.log(e, "axol_resource", "async_job")
def insert(data_object, table_space): cols = [] values = [] query = 'begin batch ' insert_time = int(time.time()) * 1000 #insert_time = datetime.utcnow() time_string = str(time.time()) data_object = GenericDataObject().convert(data_object) for server in data_object: if server != 'method': cols.append('insert_time') cols.append('time_string') values.append(str(insert_time)) values.append(time_string) for key, value in data_object[server].iteritems(): cols.append(key) values.append(value) command = 'insert into %s %s values %s;' % ( table_space, str(tuple(cols)).replace('\'', ''), tuple(values) ) cols = [] values = [] query = query + command query = query + ' APPLY BATCH' return query
def run_task(self, task): print 'RUN TASK 1 ##############' print task for server in task.value: for _type in self.sources: key = '%s_%s' % (_type, str(server)) try: cache_data = DW.cache_key_get(key) if type(eval(str(cache_data))) is dict: try: task.value[server][_type] = GenericDataObject( eval(str(cache_data))) if task.value[server][_type].warnings[ server] != None: print 'RED FLAG: %s %s' % (server, _type) print task.value[server][_type].warnings else: task.value[server][ _type].health_indicator = CommonMath.adaptive_filtration( task.value[server] [_type].normalized_indicator, task.value[server][_type].multiplier, task.value[server] [_type].threshold_red, task.value[server][_type].scale) except Exception, e: print 'ERROR 1: %s' % e except Exception, e: CommonLogger.log(e, 'create_health_metrics', 'run_task-<%s-%s>' % (server, _type)) print 'ERROR 2: %s' % e return jsonify({'response': {'error': str(e)}})
def request_admin_api(): print request.method if request.method == 'GET': return jsonify(ResourceAdmin()._show_help()) else: print request.json if not 'server_name' in request.json \ or not 'action' in request.json \ or not 'network' in request.json: return jsonify({'error': 'missing required data'}) response_object = GenericDataObject(request.json) roledefs = literal_eval(DW.cache_key_get('roledefs')) if response_object.action == 'remove': changed_roles = {} try: for name in roledefs: if response_object.server_name in name: host = roledefs[name][response_object.network] for name in roledefs: if host in roledefs[name]: roledefs[name].remove(host) changed_roles[name] = roledefs[name] DW.cache_key_set('roledefs', roledefs) except Exception, e: return jsonify({'error': e}) return jsonify({'host': host, 'changed_roles': changed_roles}) elif response_object.action == 'list': return jsonify(roledefs)
def process_gdo(self, task, server, host, times): gdo_time = time.time() match_name_time = time.time() name = self._match_name(host) times['match_name_time'] = time.time() - match_name_time GDO = GenericDataObject(server) GDO.host = host GDO.name = name GDO.source = task.source GDO.warnings = {} if hasattr(task.resource, 'calculate_new_fields'): calculate_new_fields_time = time.time() GDO = task.resource.calculate_new_fields(GDO) times['calculate_new_fields_time'] = time.time( ) - calculate_new_fields_time times['gdo_time'] = time.time() - gdo_time GDO.times = times return GDO
print 'RED FLAG: %s %s' % (server, _type) print task.value[server][_type].warnings else: task.value[server][_type].health_indicator = CommonMath.adaptive_filtration( task.value[server][_type].normalized_indicator, task.value[server][_type].multiplier, task.value[server][_type].threshold_red, task.value[server][_type].scale ) except Exception, e: print 'ERROR 1: %s' % e except Exception, e: CommonLogger.log(e, 'create_health_metrics', 'run_task-<%s-%s>' % (server, _type)) print 'ERROR 2: %s' % e return jsonify({'response': {'error': str(e)}}) new_obj = GenericDataObject(task.value[server]) new_obj.name = server task.value[server] = new_obj try: for server in task.value: task.value[server] = self.validate_thresholds(task.value[server]) except Exception, e: print 'ERROR 3: %s' % e print 'RUN TASK 2 ##############' print task return task def validate_thresholds(self, data_object): def test_against_thresholds(sub_object): if type(sub_object) is GenericDataObject: sub_object.warnings = {}
task.value[server][ _type].health_indicator = CommonMath.adaptive_filtration( task.value[server] [_type].normalized_indicator, task.value[server][_type].multiplier, task.value[server] [_type].threshold_red, task.value[server][_type].scale) except Exception, e: print 'ERROR 1: %s' % e except Exception, e: CommonLogger.log(e, 'create_health_metrics', 'run_task-<%s-%s>' % (server, _type)) print 'ERROR 2: %s' % e return jsonify({'response': {'error': str(e)}}) new_obj = GenericDataObject(task.value[server]) new_obj.name = server task.value[server] = new_obj try: for server in task.value: task.value[server] = self.validate_thresholds( task.value[server]) except Exception, e: print 'ERROR 3: %s' % e print 'RUN TASK 2 ##############' print task return task def validate_thresholds(self, data_object): def test_against_thresholds(sub_object): if type(sub_object) is GenericDataObject:
'CELERYD_POOL_RESTARTS': False, 'CELERY_TASK_RESULT_EXPIRES': 15, 'CELERYD_TASK_TIME_LIMIT': 15, 'BROKER_POOL_LIMIT': 0, 'BROKER_CONNECTION_TIMEOUT': 1, }) # Cache instantiation #-----------------------------------------------------------------------# #DW = DatabaseWrapper('elasticache') DW = DatabaseWrapper(cache) print 'INIT CACHE: %s' % DW.service # Store roledefs #-----------------------------------------------------------------------# DW.cache_key_set('roledefs', roledefs) # Task Engine instantiation #-----------------------------------------------------------------------# TE = TaskEngine() TE.set_engine(celery) # Elasticsearch configuration #-----------------------------------------------------------------------# # print 'CONFIGURING ELASTICSEARCH' # ES = Elasticsearch(node) # Logger obj creation #-----------------------------------------------------------------------# LOG = GenericDataObject(log_params)