def __load_dynatrace_data(self, start, end: datetime): problems = self.dynatrace_gateway.get_problems(start, end) items = sorted(problems["result"]["problems"], key=lambda x: DatetimeUtils.convert_from_timestamp( int(x['endTime']))) for problem in items: key = "{}".format(problem['startTime']) self.logger.info("problem {}".format(problem['displayName'])) problem['startTime'] = DatetimeUtils.convert_from_timestamp( int(problem['startTime'])).isoformat() problem['endTime'] = DatetimeUtils.convert_from_timestamp( int(problem['endTime'])).isoformat() self.csv_gateway.create_problem_entry(key, problem)
def get_service_methods_totals(self, start, end): start_stamp = DatetimeUtils.convert_to_timestamp(start) end_stamp = DatetimeUtils.convert_to_timestamp(end) target_url = self.host + "timeseries/com.dynatrace.builtin:servicemethod.requests?includeData=true&aggregationType=COUNT&startTimestamp={}&endTimestamp={}&queryMode=TOTAL&tag={}&includeParentIds=true".format( start_stamp, end_stamp, self.service_tag) response = requests.get(target_url, headers={"Authorization": self.token, "accept": "application/json"}, verify=False, proxies = PROXY_DIR) if response.status_code != 200: raise ValueError(response.status_code) return response.json()
def get_problems(self, start, end): start_stamp = DatetimeUtils.convert_to_timestamp(start) end_stamp = DatetimeUtils.convert_to_timestamp(end) target_url = self.host + "problem/feed?tag={}&status=CLOSED&startTimestamp={}&endTimestamp={}".format( self.service_tag, start_stamp, end_stamp) response = requests.get(target_url, headers={"Authorization": self.token, "accept": "application/json"}, verify=False, proxies = PROXY_DIR) if response.status_code > 299: raise ValueError(response.status_code) return response.json()
def get_service_method_latency(self, start, end, entity, percentile): if percentile < 1: percentile = percentile * 100 percentile = int(percentile) start_stamp = DatetimeUtils.convert_to_timestamp(start) end_stamp = DatetimeUtils.convert_to_timestamp(end) target_url = self.host + "timeseries/com.dynatrace.builtin:servicemethod.responsetime?includeData=true&aggregationType=PERCENTILE&startTimestamp={}&endTimestamp={}&queryMode=TOTAL&entity={}&tag={}&percentile={}&includeParentIds=true".format( start_stamp, end_stamp, entity, percentile, self.service_tag) response = requests.get(target_url, headers={"Authorization": self.token, "accept": "application/json"}, verify=False, proxies = PROXY_DIR) if response.status_code != 200: raise ValueError(response.status_code) return response.json()
def get_problem_anchor(self): if len(os.listdir(self.current_problem_dir)) > 0: _, _, files = next(walk(self.current_problem_dir)) last_file = max(files) last_file = last_file.replace(".json", "") d = DatetimeUtils.convert_from_timestamp(int(last_file)) return datetime(d.year, d.month, d.day) else: d = datetime.now() return datetime(d.year, 1, 1, 0, 0, 0)
def parse(services: dict, target, prefix): result = list() methods = {} data = target["dataResult"] data_entities = data["entities"] for k, v in data_entities.items(): if "SERVICE_METHOD" in k: tmp = ServiceMethodEntity(prefix) tmp.name = v tmp.method_id = k methods[k] = tmp elif "SERVICE" in k: if k not in services: tmp = ServiceEntity(k, v) services[k] = tmp data_points = data["dataPoints"] for k, v in data_points.items(): service_id, method_id = k.split(",") method = methods[method_id.strip()] method.set_service(services[service_id]) for item in v: stamp_date = DatetimeUtils.convert_from_timestamp(item[0]) if item[1]: date_key = stamp_date if date_key in method.points: raise ValueError('duplicate entry') else: method.points[date_key] = item[1] for k, v in methods.items(): if v.points: result.append(v) return result