class Resources: def __init__(self, access_token, from_email): self.schedules = None self.teams = None self.session = APISession(access_token, default_from=from_email) def get_schedules(self): if self.schedules is None: log.info("Retrieving all schedules on the account. This could take several minutes.") print("Retrieving all schedules on the account. This could take several minutes.") self.schedules = self.session.list_all('schedules') log.info("Retrieving a list of users for each schedule. This could take several minutes.") for schedule in self.schedules: schedule['details'] = self.session.rget(schedule.get('self')) return self.schedules def get_teams(self): if self.teams is None: log.info("Retrieving all teams on the account. This could take several minutes.") print("Retrieving all teams on the account. This could take several minutes.") self.teams = self.session.list_all('teams') log.info("Retrieving a list of users for each team. This could take several minutes.") for team in self.teams: team['users'] = self.session.list_all('users', params={'team_ids[]': team.get('id')}) return self.teams
def service_iter_print(api_key): api_session = APISession(api_key) # all_services = list(api_session.iter_all("services")) count = 1 for service in api_session.iter_all("services"): print(count, service) count += 1
def test_pdpyras_route(): session = APISession(ENV.get("PAGERDUTY_REST_API_KEY")) # Using requests.Session.get: response = session.get('/users?total=true') if response.ok: total_users = response.json()['total'] return f"Account has {total_users} users."
def service_iter_selected(api_key, service_ids): api_session = APISession(api_key) all_services = [] for service_id in service_ids: service = api_session.rget('/services/%s' % service_id) all_services.append(service) return all_services
def is_token_valid(api_token): session = APISession(api_token) try: response = session.get("/users?limit-1&offset=0") if response.status_code == 200: return True except PDClientError: return False
def is_ep_valid(api_token, ep_id): session = APISession(api_token) try: response = session.rget("/escalation_policies/" + ep_id) if response: return True return False except PDClientError: return False
def is_service_valid(api_token, service_id): session = APISession(api_token) try: response = session.rget("/services/" + service_id) if response: return True return False except PDClientError: return False
def create(self, user_token, log_file): basicConfig(filename=log_file, level=DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M:%S') lf = open(log_file, 'a+') time_now = strftime('%Y-%m-%dT%H:%M:%S-00', gmtime(int(time()))) data = { "name": self.name, "description": self.description + time_now, "point_of_contact": self.point_of_contact, "response_play": self.response_play, self.relationship: self.linked_services } try: lf.write( "\n[%s] [%s]: ===============Creating Business Services==================" % (datetime.now(), 'Info')) lf.write("\n[%s] [%s]: Payload before POST: %s" % (datetime.now(), 'Info', data)) session = APISession(user_token) provision_biz_service = session.rpost( "/business_services", json={'business_service': data}) info("===== The provisioned business service was: =====") # print(provision_biz_service.json()) self.payload = provision_biz_service self.id = self.payload["id"] lf.write("\n[%s] [%s]: Metrics value: %s" % (datetime.now(), 'Info', self.payload)) # print("BIZ PAYLOAD:", self.payload) lf.write( "\n[%s] [%s]: =================== BUSINESS SERVICE CREATED ====================" % (datetime.now(), 'Info')) except PDClientError as e: lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.url)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.text)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', print_exc())) lf.close()
def get_ep(api_token): while True: ep_id = input("Enter a default Escalation Policy ID: ") try: session = APISession(api_token) ep_object = session.rget("/escalation_policies/%s" % ep_id) break except PDClientError as e: print(e.response) print(e.response.url) print(e.response.text) print("Some errors happened. Try again.\n") return ep_object
def get_escalation_policies(pd_session: APISession) -> List[Dict[str, Any]]: all_escalation_policies: List[Dict[str, Any]] = [] params = {"include[]": ["services", "teams", "targets"]} for escalation_policy in pd_session.iter_all("escalation_policies", params=params): all_escalation_policies.append(escalation_policy) return all_escalation_policies
def create_incident(appDict): """ logic to create a pagerduty incident :param appDict: :return: the json string from the PD API """ payload = build_incident_payload(appDict) LOG.debug(payload) # build url session = APISession(appDict['api_token'], name=appDict['resilient_client'], default_from=appDict['from_email']) resp = session.post(INCIDENT_FRAGMENT, payload) return resp.json()
def get_service(api_token, service_id): try: session = APISession(api_token) service = session.rget("/services/%s" % service_id) if not service['last_incident_timestamp']: print("The selected service '%s' [%s] has no incident" % (service['summary'], service['id'])) service = None except PDClientError as e: # print(e.response) # print(e.response.url) # print(e.response.text) service = None return service
def get_pagerduty(): global pagerduty if pagerduty is None: api_token = os.environ['PAGERDUTY_API_TOKEN'] user_email_from = os.environ['PAGERDUTY_USER_EMAIL'] pagerduty = APISession(api_token, default_from=user_email_from) return pagerduty
def selftest_function(opts): """ Placeholder for selftest function. An example use would be to test package api connectivity. Suggested return values are be unimplemented, success, or failure. """ # Reading configuration variables from app.config file options = opts.get("pagerduty", {}) try: session = APISession(options['api_token']) session.get('users') return {"state": "Success"} except Exception as err: LOG.info("Failed Connection to PagerDuty Error - {}".format(err)) return {"state": "Failed", "reason": str(err)}
def get_pagerduty(): global pagerduty if pagerduty is None: pagerduty = APISession( settings.PAGERDUTY_API_TOKEN, default_from=settings.PAGERDUTY_USER_EMAIL, ) return pagerduty
def main(): api_token = os.getenv('API_TOKEN') assignee_user_id = os.getenv('ASSIGNEE_USER_ID') cf_build_id = os.getenv('CF_BUILD_ID') cf_build_url = os.getenv('CF_BUILD_URL') event_source = os.getenv('EVENT_SOURCE') event_summary = os.getenv('EVENT_SUMMARY') from_email = os.getenv('FROM_EMAIL') service_id = os.getenv('SERVICE_ID') title = os.getenv('TITLE') pagerduty_type = os.getenv('PAGERDUTY_ALERT_TYPE') if pagerduty_type == 'incident': session = APISession(api_token, default_from=from_email) payload = { 'type': 'incident', 'title': '{}'.format(title), 'service': { 'id': '{}'.format(service_id), 'type': 'service_reference' }, 'assignments': [ { 'assignee': { 'id': '{}'.format(assignee_user_id), 'type': 'user_reference' } } ], } pd_incident = session.rpost('incidents', json=payload) elif pagerduty_type == 'change_event': session = ChangeEventsAPISession(api_token) pd_change_event = session.submit( summary='{}'.format(event_summary), source='{}'.format(event_source), custom_details={"Build ID":'{}'.format(cf_build_id)}, links=[{'href':'{}'.format(cf_build_url)}] )
def create_note(appDict, incident_id, note): """ Create a PagerDuty note :param appDict: :param incident_id: :param note: :return: the json string from the PD API """ payload = build_note_payload(note) # build url url = NOTE_FRAGMENT.format(incident_id) session = APISession(appDict['api_token'], name=appDict['resilient_client'], default_from=appDict['from_email']) resp = session.post(url, payload) return resp.json()
def incident_iter_selected(api_key, duration, service_ids, integrations, all_tags): api_session = APISession(api_key) durations = {"0": 30, "1": 60, "2": 90, "3": 120, "4": 150, "5": 180, "6": 210, "7": 240, "8": 270, "9": 300, "10": 330, "11": 360, "12": 440, "13": 720, "14": 900, "15": 1080} incidents = get_incidents(durations[duration], api_session, service_ids, integrations, all_tags) print("Found %s for Service %s for %s months with integration: %s" % (\ str(len(incidents)), service_ids[0], str(int(duration) + 1), integrations)) return incidents
def find_element_by_name(appDict, element, name): """ find the internal id for a pagerduty element (policy, service, priority, etc.) :param appDict: :param element: escalation_policies, service, priority, etc. :param name: :return: id of policy or None """ session = APISession(appDict['api_token']) try: rtn_element = session.find(element, name.strip().lower()) LOG.debug(rtn_element) return rtn_element['id'] if rtn_element else None except PDClientError as err: LOG.error(str(err)) return None
def get_subdomain(auth_token): account = APISession(auth_token) try: subdomain = account.subdomain except Exception as e: subdomain = None if subdomain is None: subdomain = 'NETWORK_ERR_OR_TOKEN_INVALID' print(subdomain) return subdomain
def update_incident(appDict, incident_id, status, priority, resolution): """ update an incident. Used to raise the severity or to close the Incident :param appDict: :param incident_id: :param status: :param priority: :param resolution: :return: the json string from the PD API """ payload = build_update_payload(appDict, status, priority, resolution) # build url url = UPDATE_FRAGMENT.format(incident_id) session = APISession(appDict['api_token'], name=appDict['resilient_client'], default_from=appDict['from_email']) resp = session.put(url, payload) return resp.json()
def create(self, api_token, log_file): # basicConfig(filename=log_file, level=DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', # datefmt='%Y-%m-%d %H:%M:%S') lf = open(log_file, 'a+') data = { "condition": self.condition, "actions": [[self.actions, self.service_id]] } print("\n[%s] [%s]: Event Rules Before Payload: %s" % (datetime.now(), 'Info', data)) # print(data) try: session = APISession(api_token) provision_event_rules = session.post("/event_rules", json=data) lf.write( "\n[%s] [%s]: ===== The provisioned event rule was: =====" % (datetime.now(), 'Info')) self.payload = provision_event_rules.json() lf.write("\n[%s] [%s]: Event Rules Payload: %s" % (datetime.now(), 'Info', self.payload)) print("\n[%s] [%s]: Event Rules Payload: %s" % (datetime.now(), 'Info', self.payload)) self.id = self.payload["id"] lf.write("\n[%s] [%s]: EVent Rules ID: %s" % (datetime.now(), 'Info', self.id)) lf.write( "\n[%s] [%s]: =================== EVENT RULE CREATED ====================" % (datetime.now(), 'Info')) except PDClientError as e: lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.url)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.text)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', print_exc())) lf.close()
def save_incident_to_csv(): # Initialize Session connection with pagerduty session = APISession(api_token) # Fetch Incidents list incidents = session.list_all('incidents') # Generate Column titles columns = [ 'incident_number', 'id', 'title', 'description', 'created_at', 'last_status_change_at', 'status', 'incident_key', 'summary', 'is_mergeable', 'assigned_via', 'urgency', 'type', 'html_url' ] # Save to file with open('incidents.csv', 'w+') as file: file.write(F"{','.join(columns)}\n") # Format data and write to file. file.write(''.join([ F"{','.join([str(incident.get(column, '')) for column in columns])}\n" for incident in incidents ])) # Return the saved file name return 'incidents.csv'
def assign_dependent_services(self, user_token, service_id, log_file): lf = open(log_file, 'a+') try: session = APISession(user_token) data = { "relationships": [{ "supporting_service": { "id": self.id, "type": "business_service" }, "dependent_service": { "id": service_id, "type": "service" } }] } lf.write( "\n[%s] [%s]: ===============Associating Dependent Services==================" % (datetime.now(), 'Info')) lf.write("\n[%s] [%s]: TESTING TECH SVC: %s" % (datetime.now(), 'Info', data)) assign_tech_services = session.post( "/service_dependencies/associate", json=data) self.payload = assign_tech_services lf.write("\n[%s] [%s]: Payload: %s" % (datetime.now(), 'Info', self.payload)) lf.write( "\n[%s] [%s]: =================== DEPENDENT SERVICES ASSOCIATED ====================" % (datetime.now(), 'Info')) except PDClientError as e: lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.url)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.text)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', print_exc()))
def assign_impact_metrics(self, user_token, impact_id, log_file): # basicConfig(filename=log_file, level=DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', # datefmt='%Y-%m-%d %H:%M:%S') lf = open(log_file, 'a+') try: session = APISession(user_token) data = { "business_services": [{ "id": self.id, "type": "business_service_reference" }] } lf.write( "\n[%s] [%s]: ===============Associating Impact Metrics==================" % (datetime.now(), 'Info')) lf.write("\n[%s] [%s]: Payload before POST: %s" % (datetime.now(), 'Info', data)) assign_impect_metric = session.put( "business_services/impact_metrics_associations/" + impact_id, json=data) lf.write("\n[%s] [%s]: Metrics value: %s" % (datetime.now(), 'Info', assign_impect_metric.json())) self.payload = assign_impect_metric.json() lf.write( "\n[%s] [%s]: =================== IMPACT METRIC ASSOCIATED ====================" % (datetime.now(), 'Info')) except PDClientError as e: lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.url)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.text)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', print_exc())) lf.close()
def assign_dependent_services_old(self, user_token, dependent_services, log_file): # basicConfig(filename=log_file, level=DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', # datefmt='%Y-%m-%d %H:%M:%S') lf = open(log_file, 'a+') try: session = APISession(user_token) data = { "business_service": { "dependent_services": dependent_services } } lf.write( "\n[%s] [%s]: ===============Associating Supporting Services==================" % (datetime.now(), 'Info')) lf.write("\n[%s] [%s]: TESTING TECH SVC: %s" % (datetime.now(), 'Info', data)) assign_tech_services = session.rput("/business_services/" + self.id, json=data) # print(assign_tech_services.json()) self.payload = assign_tech_services lf.write("\n[%s] [%s]: Payload: %s" % (datetime.now(), 'Info', self.payload)) # self.dependent_services = self.payload["business_service"]["dependent_services"] lf.write( "\n[%s] [%s]: =================== DEPENDENT SERVICES ASSOCIATED ====================" % (datetime.now(), 'Info')) except PDClientError as e: lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.url)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.text)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', print_exc())) lf.close()
def create(self, api_token, log_file): """ (this, str, str) -> NoneType Given API token key and path to the log file Create a PagerDuty Service associated with the API token """ # Logfile for writing service API logs lf = open(log_file, 'a+') data = {"name": self.name, "description": self.description, "status": self.status, "escalation_policy": {"id": self.escalation_policy, "type": "escalation_policy_reference"}, "alert_creation": self.alert_creation, "alert_grouping": self.alert_grouping} if self.time_grouping: data["alert_grouping_timeout"] = self.time_grouping lf.write("\n[%s] [%s] ===== Provisioning Service =====" % (datetime.now(), 'Info')) lf.write("\n[%s] [%s] Payload before provisioning: %s" % (datetime.now(), 'Info', data)) try: session = APISession(api_token) provision_service = session.rpost("/services", json={'service': data}) lf.write("\n[%s] [%s] Provisioned Service: %s" % (datetime.now(), 'Info:', provision_service['name'])) self.provisioned_name = provision_service['name'] self.id = provision_service['id'] self.html_url = provision_service['html_url'] self.payload = provision_service lf.write("\n[%s] [%s] =================== SERVICE CREATED ====================" % (datetime.now(), 'Info')) except PDClientError as e: lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.url)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.text)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', print_exc())) lf.close()
def deprovision(global_api, user_api, api_logs, log_file, template): global_session = APISession(global_api) user_session = APISession(user_api) for key in api_logs: lf = open(log_file, 'a+') if "service" in api_logs[key]: service_id = api_logs[key]["service"]["id"] lf.write( "\n[%s] [%s]: Deleting components for Service: %s [id: %s]" % (datetime.now(), 'Info', api_logs[key]['service']['name'], service_id)) if "event_rules" in api_logs[key]: event_rules_id = api_logs[key]["event_rules"]["id"] delete_pd_field(global_session, event_rules_id, "event_rules", "Event Rules", log_file) if "service" in api_logs[key]: delete_pd_field(global_session, service_id, "services", "Service", log_file) if "business_service" in api_logs[key]: biz_service_id = api_logs[key]["business_service"]["id"] delete_pd_field(user_session, biz_service_id, "business_services", "Business Service", log_file) if "impact_metrics" in api_logs[key]: impact_metrics_id = api_logs[key]["impact_metrics"]["id"] delete_pd_field(user_session, impact_metrics_id, "business_impact_metrics", "Impact Metrics", log_file) lf.write( "\n[%s] [%s]: Deleted all components for Service: %s [id: %s]" % (datetime.now(), 'Info', api_logs[key]['service']['name'], service_id)) lf.close()
def get_team_members( pd_session: APISession, teams: List[Dict[str, Any]], ) -> List[Dict[str, str]]: relations: List[Dict[str, str]] = [] for team in teams: team_id = team["id"] for member in pd_session.iter_all(f"teams/{team_id}/members"): relations.append( { "team": team_id, "user": member["user"]["id"], "role": member["role"] }, ) return relations
def create(self, user_token, log_file): # basicConfig(filename=log_file, level=DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', # datefmt='%Y-%m-%d %H:%M:%S') lf = open(log_file, 'a+') data = {"name": self.name, "description": self.description, "aggregation_types": self.aggregation_types, "precision": self.precision, "unit_short": self.unit_short, "y_range_max": self.y_range_max, "y_range_min": self.y_range_min} try: session = APISession(user_token) provision_impact_metric = session.post("/business_impact_metrics", json={'business_impact_metric': data}) lf.write("\n[%s] [%s]: ===== The provisioned business impact metric was: =====" % (datetime.now(), 'Info')) lf.write("\n[%s] [%s]: Business Impact Metrics response: %s" % (datetime.now(), 'Info', str(provision_impact_metric.json()))) self.payload = provision_impact_metric.json() self.id = provision_impact_metric.json()["business_impact_metric"]["id"] self.provisioned_name = provision_impact_metric.json()["business_impact_metric"]["name"] lf.write("\n[%s] [%s]: =================== BUSINESS IMPACT METRIC CREATED ====================" % (datetime.now(), 'Info')) except PDClientError as e: lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.url)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', e.response.text)) lf.write("\n[%s] [%s]: %s" % (datetime.now(), 'Error', print_exc())) lf.close()