def get_pattern(self): cron = self.serialization() if not cron: return None schedule_id = int(re.search('(?<=-n )\d+', cron).group(0)) number_pattern = re.compile("\d{1,2}") list_time = re.findall(number_pattern, cron) ss = list_time[5] mm = list_time[0] hh = list_time[1] DD = list_time[2] MM = list_time[3] # nam lich YYYY = DateTime().get_year() dayofweek = list_time[4] full_date = 0 action = None alarm = '' state = 1 jid_pattern = re.compile("\d{3,10}") list_jid = re.findall(jid_pattern, cron) action = re.search('(?<=-s )\w+', cron).group(0) full_date = DateTime().convert_date_pattern_2_unix_timestamp( ss, mm, hh, DD, MM, YYYY) alarm = self.get_waiting_time(full_date) if alarm: state = 0 return schedule_id, ss, mm, hh, DD, MM, YYYY, dayofweek, list_jid, action, full_date, state, alarm
def get_job_detail_by_job_id(self, arr_job_id): #print arr_job_id job_xml = self.get_job_xml() xmldoc = minidom.parseString(job_xml) itemlist = xmldoc.getElementsByTagName('jGetList:JItem') args=[] workflow_list_json = Workflow(self.name).get_workflow() for job in itemlist: str_tmp = str(job.attributes.items()) JId = job.attributes['JId'].value if "'JId'" in str_tmp else '-1' if int(JId) in arr_job_id: State,Status,JId,Prog,StartDate,EndDate,Ver,jobname,workflowIdRef,workflow_name = self.parse_dom_object(job, workflow_list_json) args.append({'jname' : jobname, 'wid' : workflowIdRef, 'wname' : workflow_name, 'state' : State, 'status' : Status, 'jid' : JId, 'prog' : Prog, 'startdate' : DateTime().conver_UTC_2_unix_timestamp(StartDate) \ if StartDate else '', 'ver' : Ver, 'enddate' : DateTime().conver_UTC_2_unix_timestamp(EndDate) \ if EndDate else '' }) return args
def profile_json(request): if not request.user.is_authenticated(): return HttpResponseRedirect('/accounts/login') user = User.objects.get(pk=int(request.user.id)) agrs = [] agrs.append({ 'username': user.username, 'email': user.email, 'first_name': user.first_name, 'last_name': user.last_name, 'is_staff': user.is_staff, 'is_active': user.is_active, 'date_joined': DateTime().conver_human_creadeble_2_unix_timetamp_local( str(user.date_joined)), 'last_login': DateTime().conver_human_creadeble_2_unix_timetamp_local( str(user.last_login)) }) return HttpResponse(json.dumps(agrs), content_type='application/json', status=200)
def parse_xml_2_query(self, xml): xmldoc = minidom.parseString(xml) itemlist = xmldoc.getElementsByTagName('jGetList:JItem') sql = '' for s in itemlist: State, Status, JId, Prog, StartDate, EndDate, Ver = self.parse_dom_object( s) sql += "(%d,'%s','%s','%s',%d,%d,%d,%d)," % ( int(JId), self.host['host'], State, Status, int(Prog), int(Ver), DateTime().conver_UTC_2_unix_timestamp(StartDate), DateTime().conver_UTC_2_unix_timestamp(EndDate)) return sql
def get_new_id(self, request): try: data = json.loads(request.body) except Exception as e: return None name = data['name'].strip() #Cut white space name = name.replace(" ", "") if name.find(EXTENTION) < 0: name = name + EXTENTION stream_type = data['stream_type'].strip() source = data['source'] stream_key = data['stream_key'].strip() user_id = request.user.id user = User.objects.get(id=user_id) description = '' now = DateTime().get_now() new_stream = Stream(name=name, user=user, create_time=now, stream_type=stream_type, source=source, stream_key=stream_key, description=description) new_stream.save() n = Stream.objects.get(create_time=now) return n.id
def get_history_auto_return_main(thomson_host, jobid): query = { "from": 0, "size": 1000, "sort": [{ "@timestamp": { "order": "desc" } }], "_source": ["message"], "query": { "bool": { "must": [{ "match": { "host.keyword": "thomson" } }, { "match": { "message": "tool just" } }, { "match": { "message": "the main source" } }, { "match": { "message": "%s" % (thomson_host) } }, { "match": { "message": "%d" % (jobid) } }], "filter": { "range": { "@timestamp": { "gte": "now-5m", "lte": "now" } } } } } } d_time = DateTime.DateTime() now = d_time.get_now_as_logtash_fortmat() yesterday = d_time.get_yesterday_as_logtash_fortmat() index = "logstash-%s,logstash-%s" % (now, yesterday) elast = Elasticsearch([{ 'host': host, 'port': port }]).search( index=index, body=query, ) return elast['hits']['hits']
def get_date_time(self): date_time = '' error = '' try: date_time_data = self.data['date_time'] date_time_pattern = re.compile( "\d{4}[/.-]\d{2}[/.-]\d{2} \d{2}:\d{2}:\d{2}") date_time_data = re.findall(date_time_pattern, date_time_data) date_time = date_time_data[0] schedule_datetime = DateTime( ).conver_human_creadeble_2_unix_timetamp(date_time) now = DateTime().get_now() + 60 if schedule_datetime <= now: error = "Schedule must grater than now 1 minutes!" return date_time, error return date_time, error except Exception as e: error = "Invalid data datetime!" return date_time, error
def get_waiting_time(self, schedule_time): now = DateTime().get_now() minus_dt = schedule_time - now if minus_dt > 0: mm, ss = divmod(minus_dt, 60) hh, mm = divmod(mm, 60) DD, hh = divmod(hh, 24) waiting_time = "%d day(s) %02d:%02d:%02d" % (DD, hh, mm, ss) return waiting_time return None
def get_new_id(self, request): host = settings.THOMSON_HOST['thomson-hcm']['host'] user_id = request.user.id user = User.objects.get(id=user_id) action = RequestGetParam(request).get_action() jobid_list, error = RequestGetParam(request).get_job_id('thomson-hcm') schedule_time, error = RequestGetParam(request).get_date_time() description = RequestGetParam(request).get_description() schedule_time = DateTime().conver_human_creadeble_2_unix_timetamp( schedule_time) now = DateTime().get_now() new_schedule = Schedule(user=user, create_time=now, schedule_time=schedule_time, action=action, host=host, description=description) new_schedule.save() n = Schedule.objects.get(create_time=now) return n.id
def write_history(self, user_name, action, stream_id): stream_obj = Stream.objects.get(pk=stream_id) stream_name = stream_obj.name msg = self.create_message(user_name, action, stream_name) now = DateTime().get_now() new_history = History(stream=stream_obj, date_time=now, action=action, messages=msg) new_history.save() return 1
def write_history(self, user_name, action, schedule_id): schedule = Crontab().get_cron_by_id(schedule_id) schedule_obj = Schedule.objects.get(id=int(schedule_id)) msg = self.create_message(user_name, action, schedule) now = DateTime().get_now() new_history = History( schedule=schedule_obj, date_time=now, host=settings.THOMSON_HOST['thomson-hcm']['host'], messages=msg) new_history.save() self.write_rsyslog(user_name, action, schedule_id) return 1
def write_rsyslog(self, user_name, action, schedule_id): log = logger.getLogger("thomson-tool") schedule = Crontab().get_cron_by_id(schedule_id) msg = self.create_message(user_name, action, schedule) now = DateTime().get_now() args = OrderedDict([ ("sev", "Info"), ("host", settings.THOMSON_HOST['thomson-hcm']['host']), ("datetime", now), ("action", action), ("desc", msg) ]) log.warning(json.dumps(args, sort_keys=False)) print "write log schedule!" return 1
def create(self, date_time, jobid, action, schedule_id): if not date_time.isdigit(): try: date_time = DateTime().conver_human_creadeble_2_unix_timetamp( date_time) except Exception as e: return None dt = datetime.fromtimestamp(date_time) #print dt DD = dt.day MM = dt.month hh = dt.hour #print hh mm = dt.minute ss = dt.second dayofweek = dt.isocalendar()[2] task = """%s %s %s %s %s sleep %s; /bin/python /script/crontabSMP/job.py -j %s -s %s -n %s""" % ( mm, hh, DD, MM, dayofweek, ss, jobid, action, str(schedule_id)) return task
def get_schedule(self, thomson_name): agrs = [] schedule = CrontabDetail(self.task).serialization() if schedule: schedule_id, ss, mm, hh, DD, MM, YYYY, dayofweek, list_jid, action, full_date, state, alarm = self.get_pattern( ) array_jid = [] for jid in list_jid: array_jid.append(int(jid)) list_job = Job(thomson_name).get_job_detail_by_job_id(array_jid) now = DateTime().get_now() agrs.append({ 'list_job': list_job, 'action': action, 'schedule_date': full_date, 'svr_date': int(now), 'state': int(state), 'message': self.human_readable() }) return json.dumps(agrs)
def get_datetime(self): from setting.xmlReq import DateAndTimeReq headers = DateAndTimeReq.HEADERS body = DateAndTimeReq.BODY print body response_xml = self.get_response(headers, body) #response_xml = File().get_response('GetDateAndTimeRsp.xml') print response_xml xmldoc = minidom.parseString(response_xml) itemlist = xmldoc.getElementsByTagName('GetDateAndTime:RspOkGetDate') DateAndTime = itemlist[0].attributes['DateAndTime'].value if \ "'DateAndTime'" in str(itemlist[0].attributes.items()) else "" OlsonTZ = itemlist[0].attributes['OlsonTZ'].value if \ "'OlsonTZ'" in str(itemlist[0].attributes.items()) else "" #Convert response data to Json args = [] args.append({'dateAndTime' : DateTime().conver_UTC_2_unix_timestamp(DateAndTime) \ if DateAndTime else 1, 'timeZone' : OlsonTZ if OlsonTZ else "Asia/Ho_Chi_Minh" }) return json.dumps(args)
def get_the_last_active_backup_log_by_jobid(thomson_host, jobid): query = { "from": 0, "size": 1, "_source": ["message"], "sort": [{ "@timestamp": { "order": "desc" } }], "query": { "bool": { "must": [{ "match": { "message": "%s" % (thomson_host) } }, { "match": { "message": "%d" % (jobid) } }, { "match": { "message": "input:backup" } }] } } } d_time = DateTime.DateTime() now = d_time.get_now_as_logtash_fortmat() yesterday = d_time.get_yesterday_as_logtash_fortmat() index = "logstash-%s,logstash-%s" % (now, yesterday) elast = Elasticsearch([{ 'host': host, 'port': port }]).search( index=index, body=query, ) return elast['hits']['hits']
def create_message(self, user_name='', action='', stream_name=''): message = '' now_as_human_creadeble = DateTime().get_now_as_human_creadeble() message = 'At %s user %s %s stream name %s.' % ( now_as_human_creadeble, user_name, action, stream_name) return message
def abort(self, user): from setting.xmlReq.JobDetailReq import ABORT_HEADERS, ABORT_BODY headers = ABORT_HEADERS body = ABORT_BODY body = body.replace('JobID', str(self.jid)) response_xml = Thomson(self.name).get_response(headers, body) History().create_log(thomson_name=self.name, user=user, action='abort', jid=self.jid, datetime=DateTime().get_now()) # return self.parse_status(response_xml) return "OK"
def start(self, user): from setting.xmlReq.JobDetailReq import START_HEADERS, START_BODY headers = START_HEADERS body = START_BODY body = body.replace('JobID', str(self.jid)) response_xml = Thomson(self.name).get_response(headers, body) History().create_log(thomson_name=self.name, user=user, action='start', jid=self.jid, datetime=DateTime().get_now()) time.sleep(0.5) try: node_ID = dbNodeDetail(self.name).get_node_by_job(self.jid) except Exception as e: node_ID = 0 # node_ID = dbNodeDetail(self.name).get_node_by_job(self.jid) # status = self.parse_status(response_xml) return {'status': 'OK','nid': node_ID}
def query_by_ident(self, ident=None, time="24h", size=0, ip=0): """ time: thoi gian loc log tu hien tai minute(m),hour(h), day(d)... ip: ip address for match Return array full log by ident """ self.size = size if size else self.size self.ip = ip if ip else self.ip now = DateTime.DateTime() timesindex = now.get_date_as_human_creadeble() # timestamp = now.get_now_as_isofortmat() # print timestamp query = { "sort": [{ "@timestamp": "desc" }], "from": 0, "size": self.size, "_source": ["message"], "query": { "bool": { "must": [{ "bool": { "should": [{ "bool": { "should": [{ "bool": { "must": [{ "match": { "ident.keyword": "Monitor" } }, { "match": { "message": "origin" } }, { "match": { "message": "%s" % (ident) } }] } }, { "bool": { "must": [{ "match": { "ident.keyword": "Monitor" } }, { "match": { "message": "4500" } }] } }] } }, { "bool": { "should": [{ "bool": { "must": [{ "match": { "host.keyword": "thomson" } }, { "match": { "ident": "%s" % (ident) } }] } }, { "bool": { "must": [{ "match": { "ident.keyword": "Thomson-TOOL" } }, { "match": { "message": "%s" % (self.ip) } }] } }, { "terms": { "ident.keyword": ["LiveStream"] } }] } }] } }], "filter": { "range": { "@timestamp": { "gte": "now-%s" % (time), "lte": "now" } } } } } } index = "logstash-%s" % (timesindex.replace('-', '.')) elast = Elasticsearch([{ 'host': self.host, 'port': self.port }]).search( index=index, body=query, ) # print query return elast['hits']['hits']