コード例 #1
0
 def test_stringToDate(self):
     self.assertEqual(util.stringToDate("01.02.2017"),
                      UtilTest.localizeTime(datetime.datetime(2017, 2, 1)))
     self.assertEqual(util.stringToDate("01.03.2017"),
                      UtilTest.localizeTime(datetime.datetime(2017, 3, 1)))
     self.assertEqual(util.stringToDate("01.05.2017"),
                      UtilTest.localizeTime(datetime.datetime(2017, 5, 1)))
     self.assertEqual(util.stringToDate("01.06.2017"),
                      UtilTest.localizeTime(datetime.datetime(2017, 6, 1)))
コード例 #2
0
 def __init__(self, date, flows, granularity=0):
   self.flows = flows
   self.flow = flows[0]
   self.date = date
   self.granularity = granularity
   self.dates = SimilarPastDaysFinder(self.flow).findSimilarPastDays(date)
   country = MediationConfig.getCountryByName(self.flow["country"])
   self.options = self.flow["options"]
   self.adjustment = None
   lazyDayDifference = self.flow["options"].get("lazyDayDifference", None)
   if lazyDayDifference != None:
     for lazyDayStr in country["lazyDays"]:
       lazyDay = util.stringToDate(lazyDayStr)
       if lazyDay.date() == date.date():
         self.adjustment = lazyDayDifference
         break
コード例 #3
0
    (1, 1),
    (17, 4),
    (14, 4),
    (1, 5),
    (8, 5),
    (5, 7),
    (6, 7),
    (28, 9),
    (28, 10),
    (17, 11),
    (24, 12),
    (25, 12),
    (26, 12),
]

INITIAL_DATE = util.stringToDate("01.08.2016")

IGNORE_DAYS = [(24, 12), (31, 12), (1, 1)]


def _getPastHolidays(lobName, date, days):
    resultDays = []
    i = 1
    dayToTest = date
    while len(resultDays) <= days and dayToTest > INITIAL_DATE:
        dayToTest -= datetime.timedelta(days=1)
        if _isHoliday(dayToTest) and _isUsualDay(dayToTest):
            resultDays.append(dayToTest)
        i += 1

    return resultDays
コード例 #4
0
ファイル: data.py プロジェクト: froxCZ/t-mobile-monitoring
def dataQueryV2():
    """
Endpoint for getting traffic data.
POST body:
{
  "from":"01.02.2017",
  "to":"15.02.2017",
  "country":"CZ",
  "lobName":"ACI",
  "flowName":["GSM"],
  "forwards":[],
  "granularity":0
}

Response:
{
  "data": [
    {
      "GSM": 188385532,
      "_id": "2017-02-01T00:00:00+01:00",
      "dayAverage": 1162595297.6666667,
      "dayDifference": 1.023,
      "expected": 161627916,
      "status": "OK",
      "tickDifference": 1.166
    },
    ...
  ],
   "metadata": {
    "flowName": "GSM",
    "granularity": 480,
    "metrics": {
      "GSM": {
        "type": "traffic"
      },
      "dayAverage": {
        "type": "traffic"
      },
      "dayDifference": {
        "type": "difference"
      },
      "expected": {
        "type": "traffic"
      },
      "status": {
        "type": "other"
      },
      "tickDifference": {
        "type": "difference"
      }
    }
  }
"""
    searchParam = request.get_json()
    fromDate = util.stringToDate(searchParam["from"])
    toDate = util.stringToDate(searchParam["to"])
    country = searchParam["country"]
    lobName = searchParam["lobName"]
    lobConfig = MediationConfig.getLobWithCountry(country, lobName)
    flows = []
    granularity = searchParam.get("granularity", 0)
    flows.append(lobConfig["flows"][searchParam["flowName"]])
    response = {}

    # Query the traffic data and add to metric list
    mongoQuery = data_query.DateRangeGroupQuery(fromDate,
                                                toDate,
                                                flows,
                                                granularity=granularity)
    data = mongoQuery.execute()
    metrics = {}
    metricsList = []
    flowName = mongoQuery.metrics[0]
    metricsList.append(flowName)

    metadata = mongoQuery.metadata
    if len(flows) == 1:
        # Run outage detection analysis
        metric = metricsList[0]
        flowLevelQuery = data_query.FlowLevelDateRangeQuery(
            fromDate, toDate, flows, metadata["granularity"], data)
        flowLevelData = flowLevelQuery.execute()
        data = util.merge2DateLists(flowLevelData, None, data, None)
        metricsList.extend(flowLevelQuery.metrics)
        outageQuery = data_query.OutageDateRangeQuery(fromDate, toDate,
                                                      flows[0],
                                                      metadata["granularity"])
        outageQuery.setPrecomputedData(data, metric)
        outageList = outageQuery.execute()
        data = util.merge2DateLists(outageList, [outageQuery.metric], data,
                                    None)
        metricsList.append(outageQuery.metric)

    # Create metadata infor
    for metric in metricsList:
        if metric == flowName or metric == "dayAverage" or metric == "expected":
            type = "traffic"
        elif "Difference" in metric:
            type = "difference"
        else:
            type = "other"
        metrics[metric] = {"type": type}
    response["data"] = data
    response["metadata"] = {
        **{
            "metrics": metrics
        },
        **metadata, "flowName": flowName
    }
    return jsonify(response)
コード例 #5
0
    dates = list(
        map(lambda x: x.strftime("%d.%m %H:%M"),
            util.dateDataListToList(data, "_id")))
    data1 = util.dateDataListToList(data, flowName)
    expected = util.dateDataListToList(data, "expected")
    dataTicks = []
    dataTickPos = []
    for i in range(0, len(dates)):
        if True:
            dataTicks.append(dates[i])
            dataTickPos.append(i)
    return data1, expected, dataTicks, dataTickPos


fromDate = util.stringToDate("30.01.2017")
toDate = util.stringToDate("12.02.2017")
# lobName = "ICG"
# flowName = "CENTREX01"
lobName = "GSM"
flowName = "MSSBRN1B"
lob = MediationConfig.getLobWithCountry("CZ", lobName)
flow = lob["flows"][flowName]
flows = [flow]
granularity = 240

data = []
expected = []
ticks = []
pos = []
d, e, t, p = getData(flows, util.stringToDate("28.09.2016"),
コード例 #6
0
# lob1Data = util.dateDataListToList(q1.execute(), flowName)
# lob2Data = util.dateDataListToList(q2.execute(), flowName)
# lob3Data = util.dateDataListToList(q3.execute(), flowName)
#
# ticks = util.dateDataListToList(q1.execute(), "_id")
# plt.figure(figsize=(12, 6))
# plt.plot(lob2Data, color="blue", label="21.10")
# plt.plot(lob1Data, color="red", label="27.10")
# plt.plot(lob3Data, color="green", label="17.11")
#
# #plt.xticks(ticks, rotation='vertical')
# plt.title(flowName)
# plt.legend(loc='upper left')
# plt.show()

fromDate = util.stringToDate("02.03.2017")
toDate = util.stringToDate("03.03.2017")
lobName = "GSM"
flowName = "MSSBRN1A"
lob = MediationConfig.getLobWithCountry("CZ", lobName)
flow = lob["flows"][flowName]
flows = [flow]
granularity = 5

response = {}
mongoQuery = data_query.DateRangeGroupQuery(fromDate,
                                            toDate,
                                            flows,
                                            granularity=granularity)
data = mongoQuery.execute()
metrics = {}
コード例 #7
0
import matplotlib.pyplot as plt

from common import util
from mediation import MediationConfig
from mediation import data_query

fromDate = util.stringToDate("21.12.2016")
toDate = util.stringToDate("30.12.2016")
lobName = "GSM"
flowName = "MSSBRN1A"
lob = MediationConfig.getLobWithCountry("CZ", lobName)
flow = lob["flows"][flowName]
flows = [flow]
granularity = 60

response = {}
mongoQuery = data_query.DateRangeGroupQuery(fromDate,
                                            toDate,
                                            flows,
                                            granularity=granularity)
data = mongoQuery.execute()
metrics = {}
metricsList = mongoQuery.metrics
metadata = mongoQuery.metadata
if len(flows) == 1:
    metric = metricsList[0]
    flowLevelQuery = data_query.FlowLevelDateRangeQuery(
        fromDate, toDate, flows, metadata["granularity"], data)
    flowLevelData = flowLevelQuery.execute()
    data = util.merge2DateLists(flowLevelData, None, data, None)
    metricsList.extend(flowLevelQuery.metrics)