def _executeInternal(self): from common import SystemStatusManager SystemStatusManager.setKafkaComponentStatus(MediationStatusProducer.name, MediationStatusProducer.instance().status, AppConfig.getCurrentTime()) SystemStatusManager.setKafkaComponentStatus(MediationDataConsumer.name, MediationDataConsumer.instance().status, AppConfig.getCurrentTime())
def _getLatestCompleteTicTime(self, time): granularity = self.granularity minuteOfDay = time.hour * 60 + time.minute minutesOverInterval = minuteOfDay % granularity if granularity == 60: latestClosedIntervalTime = time - datetime.timedelta(minutes=minutesOverInterval + granularity) latestClosedIntervalTime = latestClosedIntervalTime.replace(second=0, microsecond=0) return util.getTicTime(latestClosedIntervalTime.astimezone(AppConfig.getTimezone()), granularity) else: naiveTime = time.replace(tzinfo=None) latestClosedIntervalNaiveTime = naiveTime - datetime.timedelta(minutes=minutesOverInterval + granularity) latestClosedIntervalNaiveTime = latestClosedIntervalNaiveTime.replace(second=0, microsecond=0) localized = AppConfig.getTimezone().localize(latestClosedIntervalNaiveTime).astimezone(AppConfig.getTimezone()) return util.getTicTime(localized, granularity)
def execute(self): self.prepare() result = list(self.coll.aggregate(self.query)) resultDict = {} for i in result: group = i["_id"] date = datetime.datetime(group["year"], group["month"], group["dayOfMonth"], int(group["hour"]), int(group["minute"])) from common import AppConfig date = AppConfig.getTimezone().localize(date) i["_id"] = date resultDict[date] = i granularityDelta = datetime.timedelta(minutes=self.granularity) nullObject = {} for metric in self.metrics: nullObject[metric] = 0 for date in self.dates: d = date while d < date + datetime.timedelta(days=1): if d not in resultDict: # TODO: check if some results can have just few of the metrics. resultDict[d] = {**nullObject, **{"_id": d}} d += granularityDelta result = sorted(resultDict.values(), key=lambda x: x["_id"]) return result
def init(): global db, dataDb, _client _client = MongoClient("mongodb://localhost/", tz_aware=True) mongoConfig = AppConfig.getMongoConfig() _client.admin.authenticate(mongoConfig["user"], mongoConfig["password"], mechanism='SCRAM-SHA-1') dataDb = _client["mediation_data"] db = _client["mediation"]
def run(self): try: while True: job = self.queue.get_nowait() FlowAnalyzer(job).run(AppConfig.getCurrentTime()) except Exception as e: pass
def getTicTime(time, granularity): minuteOfDay = time.hour * 60 + time.minute minutesOverInterval = minuteOfDay % granularity naiveTime = time.replace(tzinfo=None) latestClosedIntervalNaiveTime = naiveTime - datetime.timedelta( minutes=minutesOverInterval) latestClosedIntervalNaiveTime = latestClosedIntervalNaiveTime.replace( second=0, microsecond=0) return AppConfig.getTimezone().localize(latestClosedIntervalNaiveTime)
def _setStatusMetadata(self, flowStatus, flow): ticTime = flowStatus["ticTime"] if not flow["options"]["enabled"]: return {"status": status.DISABLED} granDelta = datetime.timedelta( minutes=flow["options"]["granularity"] + 3 ) # set as N_A after it has not been analyzed for more than 3 minutes than it should have if ticTime + 2 * granDelta < AppConfig.getCurrentTime(): return {"status": status.NA} return flowStatus
def sendEmail(self, component, body): emailConfig = IntegrationConfig.getEmailConfig() if emailConfig is None: return if not self.canSendEmail(component): return msg = MIMEText(body + "\ntime:" + util.dateToTimeString(AppConfig.getCurrentTime()) + "\ncomponent:" + component) msg['Subject'] = 'Mediation monitoring: ' + component msg['From'] = emailConfig["from"] msg['To'] = emailConfig["to"] s = smtplib.SMTP(emailConfig["smtpHostname"]) s.login(emailConfig["login"], emailConfig["password"]) s.send_message(msg) self.lastComponentEmailTime[component] = AppConfig.getCurrentTime() s.quit()
def shouldSchedule(self, flow): granularity = flow["options"]["granularity"] lastExecution = self.lastExecutions[flow["gName"]] if lastExecution["status"] == status.NA: return True lastTicTime = lastExecution["ticTime"] if lastTicTime < AppConfig.getCurrentTime() - datetime.timedelta(minutes=2 * granularity): return True else: return False
def execute(self): timezone = AppConfig.getTimezone() def convertToTimezone(x): x["_id"] = x["anyDate"].astimezone(timezone) return x res = list(map(convertToTimezone, list(self._executeQuery()))) resultDict = self.aggregate(res) return resultDict
def execute(self): try: logging.debug(" Running " + self.name + " on thread " + str(threading.get_ident())) self._executeInternal() from common import SystemStatusManager SystemStatusManager.saveExecutorSuccessfulExecution( self.name, AppConfig.getCurrentTime()) except Exception as e: logging.exception("Executing failed.") return
def logStatusChangeEvent(flow, message, ticTime, newStatus): currentTime = AppConfig.getCurrentTime() obj = { "flowName": flow["name"], "lobName": flow["lobName"], "country": flow["country"], "time": currentTime, "message": message, "newStatus": newStatus, "ticTime": ticTime } logging.debug("flow: " + flow["gName"] + " message:" + message) mongo.events().insert_one(obj)
def _executeMongoAggregateQuery(self): result = list(self.coll.aggregate(self.query)) resultDict = {} from common import AppConfig appTimezone = AppConfig.getTimezone() for i in result: group = i["_id"] utcDate = datetime.datetime(group["year"], group["month"], group["dayOfMonth"], int(group["hour"]), int(group["minute"]), 0, 0, utc) date = utcDate.astimezone(appTimezone) anyDate = i["anyDate"].astimezone(appTimezone) i["_id"] = date resultDict[date] = i return resultDict
def getNextTic(d, granularity): """ returns next tick :param d: :param granularity: :return: """ prevOffsetSeconds = d.tzinfo._utcoffset.total_seconds() from common import AppConfig newTic = (d + datetime.timedelta(minutes=granularity)).astimezone( AppConfig.getTimezone()) newOffsetSeconds = newTic.tzinfo._utcoffset.total_seconds() if prevOffsetSeconds != newOffsetSeconds: if prevOffsetSeconds < newOffsetSeconds: newTic = getTicTime(newTic, granularity) else: newTic = roundToNextTicTime(newTic, granularity) return getTicTime(newTic, granularity)
def kafkaServers(): return AppConfig.getIntegrationConfig().get("kafka", {}).get("servers", None)
def getEmailConfig(): return AppConfig.getIntegrationConfig().get("email", None)
def inputTopic(): return AppConfig.getIntegrationConfig().get("kafka", {}).get( "inputTopic", None)
def jsStringToDate(string): return AppConfig.getTimezone().localize(dateutil.parser.parse(string))
def canSendEmail(self, component): lastTime = self.lastComponentEmailTime.get(component, None) if lastTime is None: return True return AppConfig.getCurrentTime() - lastTime > MIN_DELAY
def threadsCount(): return AppConfig.getMediationConfig().get("threadsCount", 1)
def _analyzeFlow(self, flow, lastExecution): analyzer = FlowAnalyzer(flow) analyzer.run(AppConfig.getCurrentTime()) previousStatus = lastExecution["status"] newStatus = analyzer.status self.statusManager.saveStatus(flow, previousStatus, newStatus, analyzer.difference, analyzer.ticTime)
def __init__(self): super().__init__(DiscoverFlowsExecutor.name, DiscoverFlowsExecutor.interval) self.toDate = AppConfig.getCurrentTime() self.fromDate = self.toDate - datetime.timedelta(days=7)
def _statusIsExpired(time, maxSeconds=60 * 5): return AppConfig.getCurrentTime() - time > datetime.timedelta(seconds=maxSeconds)
resulttic["expected"] = tic["expected"] resulttic["dayAverage"] = tic["dayAverage"] resultData.append(resulttic) self.metrics = [ "tickDifference", "dayDifference", "expected", "dayAverage" ] return resultData if __name__ == "__main__": gran = 120 flow = { 'lobName': 'ACI', 'dataPath': 'CZ.ACI.inputs.GSM', 'country': 'CZ', 'gName': 'CZ_ACI_GSM', 'name': 'GSM', 'options': { 'softAlarmLevel': 0.75, 'hardAlarmLevel': 0.51, 'minimalExpectation': 1, 'enabled': True, 'difference': 'day', 'granularity': 480 }, 'type': 'inputs' } dates = [ AppConfig.getTimezone().localize(datetime.datetime(2017, 3, 26, 0, 0)) ] FlowLevelDateRangeQuery(flow, dates, gran).execute()
def getNextDay(d): dayDelta = datetime.timedelta(days=1) return AppConfig.getTimezone().localize(d.replace(tzinfo=None) + dayDelta)
import csv import pytz import mediation.data_receiver.DataReceiverConfig as config import mediation.data_receiver.DataReceiverUtil as util from common import AppConfig from .data_insertor import DataInsertor LATEST_DATE = util.stringToDate("20.02.16 00:00:00").replace(tzinfo=AppConfig.getTimezone()) def isValidFlow(flow): return flow["date"] > LATEST_DATE and flow["country"] in config.COUNTRIES and flow["lob"] not in config.IGNORE_LOBS """ deprecated """ class FileParser: def __init__(self): self.batchSize = 100000 def parseInputs(self, inputFile): inputsList = [] dataInsertor = DataInsertor() with open(inputFile, 'r') as csvfile: spamreader = csv.reader(csvfile, delimiter=';', quotechar='"') for row in spamreader: try: input = self.createInputRow(row) if isValidFlow(input):
def stringToDate(dateTimeStr): return AppConfig.getTimezone().localize( datetime.strptime(dateTimeStr, "%d.%m.%y %H:%M:%S"))
def currentTime(): return jsonify( {"currentTime": util.dateToTimeString(AppConfig.getCurrentTime())})
def handle_invalid_usage(error): """ Exceptions handler which returns json with message and status code. """ if (type(error) == StatusException): response = jsonify({"message": error.message}) response.status_code = error.status else: response = jsonify({"message": str(error)}) response.status_code = 500 traceback.print_exc() return response from mediation.api.data import dataAPI from mediation.api.config import configAPI from mediation.api.status import lobsStatus from mediation.api.flows import flowsAPI from common.api import appAPI, StatusException from zookeeper.api import zookeeperAPI #Registration of all endpoints app.register_blueprint(dataAPI, url_prefix="/mediation/data") app.register_blueprint(configAPI, url_prefix="/mediation/config") app.register_blueprint(lobsStatus, url_prefix="/mediation/status") app.register_blueprint(flowsAPI, url_prefix="/mediation/flows") app.register_blueprint(appAPI, url_prefix="/app") app.register_blueprint(zookeeperAPI, url_prefix="/zookeeper") app.run(debug=AppConfig.getFlaskConfig().get("debug",False), host="0.0.0.0", port=5000, threaded=True)
def _executeInternal(self): clusterStatus = self.analyzer.run() self.checkStatusChange(StatusManager.getClusterStatus(), clusterStatus) StatusManager.saveClusterStatus(clusterStatus, AppConfig.getCurrentTime())