def test_postprocess(self): logger = MagicMock() response = Response(201, json.dumps({'key': 'value'}), method='POST') response.url = 'https://api.pagerduty.com/users/PCWKOPZ/contact_methods' sess = pdpyras.APISession('apikey') sess.postprocess(response) # Nested index endpoint self.assertEqual( 1, sess.api_call_counts['post:users/{id}/contact_methods/{index}']) self.assertEqual( 1.5, sess.api_time['post:users/{id}/contact_methods/{index}']) response.url = 'https://api.pagerduty.com/users/PCWKOPZ' response.request.method = 'GET' sess.postprocess(response) # Individual resource access endpoint self.assertEqual(1, sess.api_call_counts['get:users/{id}']) self.assertEqual(1.5, sess.api_time['get:users/{id}']) response = Response(500, json.dumps({'key': 'value'}), method='GET') response.url = 'https://api.pagerduty.com/users/PCWKOPZ/contact_methods' sess = pdpyras.APISession('apikey') sess.log = logger sess.postprocess(response) if not (sys.version_info.major == 3 and sys.version_info.minor == 5): # These assertion methods are not available in Python 3.5 logger.error.assert_called_once() logger.debug.assert_called_once() # Make sure we have correct logging params / number of params: logger.error.call_args[0][0] % logger.error.call_args[0][1:] logger.debug.call_args[0][0] % logger.debug.call_args[0][1:]
def test_profiler_key(self): sess = pdpyras.APISession('token') self.assertEqual( 'post:users/{id}/contact_methods/{index}', sess.profiler_key( 'POST', 'https://api.pagerduty.com/users/PCWKOPZ/contact_methods'))
def prompt_for_creds(self) -> None: api_key = self.env_or_prompt("PAGERDUTY_API_KEY", "PagerDuty API Key") self.session = pdpyras.APISession(api_key) try: self.session.get("users") except pdpyras.PDClientError: raise IntegrationException("Invalid API Key")
def mass_update_incidents(args): session = pdpyras.APISession(args.api_key, default_from=args.requester_email) if args.user_id: PARAMETERS['user_ids[]'] = args.user_id.split(',') if args.service_id: PARAMETERS['service_ids[]'] = args.service_id.split(',') if args.action == 'resolve': PARAMETERS['statuses[]'] = ['triggered', 'acknowledged'] elif args.action == 'acknowledge': PARAMETERS['statuses[]'] = ['triggered'] try: for incident in session.list_all('incidents', params=PARAMETERS): session.rput( incident['self'], json={ 'type': 'incident_reference', 'id': incident['id'], 'status': '{0}d'.format(args.action), # acknowledged or resolved }) except pdpyras.PDClientError as e: if e.response is not None: print(e.response.text) raise e
def main(): """ For each user, add them to every team they are not already a member of """ if os.path.exists(LOGGER_FILE): with open(LOGGER_FILE, 'rt') as logger_file: logging_config = json.load(logger_file) logging.config.dictConfig(logging_config) else: logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') # Parse configuration args = parse_args() with open(args.secrets or SECRETS_FILE) as secrets: secrets = json.load(secrets) if args.source is None: logging.fatal("No source email address supplied. Exiting.") exit(1) if args.target is None: logging.fatal("No target email address supplied. Exiting.") exit(2) # instantiate a PagerDuty Python REST API Sessions object pdapi = pdpyras.APISession(secrets.get('pagerduty').get('key')) # Find source user in PagerDuty API by email address user_source = pdapi.find('users', args.source, attribute='email') if user_source is None: logging.fatal("Supplied source email address (%s) did not match a user. Exiting.", args.source) exit(3) else: logging.info("Found source user %s (%s) with %d Teams", user_source.get('name'), user_source.get('email'), len(user_source.get('teams'))) for team in user_source.get('teams'): logging.debug("Source Team: %s: %s", team.get('id'), team.get('summary')) # Find target user(s) in PagerDuty API by email address for target in args.target: try: user_target = pdapi.find('users', target, attribute='email') except pdpyras.PDClientError as error: logging.critical(error) exit(4) if user_target is None: logging.error("Supplied target email address (%s) did not match a user.", target) else: logging.info("Found target user %s (%s) with %d Teams", user_target.get('name'), user_target.get('email'), len(user_target.get('teams'))) for team in user_source.get('teams'): # scan the target user's teams for this team id - don't add them if it's already present if not any(d['id'] == team.get('id') for d in user_target.get('teams')): method = '/teams/{:s}/users/{:s}'.format(team.get('id'), user_target.get('id')) response = pdapi.request('PUT', method) if response.ok: logging.info("User %s (%s) added to Team %s: %s", user_target.get('name'), user_target.get('email'), team.get('id'), team.get('summary')) else: logging.warning("User%s (%s) WAS NOT added to Team %s: %s", user_target.get('name'), user_target.get('email'), team.get('id'), team.get('summary')) else: logging.info("User %s is already a member of %s", user_target.get('name'), team.get('summary'))
def main(): parser = argparse.ArgumentParser(description="Deletes overrides listed "\ "in a CSV file. The first column should be the schedule ID and the "\ "second should be the override ID. More columns can be included after " "the first.") parser.add_argument('-k', '--api-key', type=str, required=True, dest='api_key', help="REST API key") parser.add_argument('-f', '--csv-file', type=argparse.FileType('r'), dest="csv_file", help="Path to input CSV file. Data should begin in "\ "the very first row; no column names.") args = parser.parse_args() session = pdpyras.APISession(args.api_key) for row in csv.reader(args.csv_file): schedule_id, override_id = row[:2] try: session.rdelete('/schedules/%s/overrides/%s'%( schedule_id, override_id )) print("Deleted override "+override_id) except pdpyras.PDClientError as e: error = 'Network error' if e.response is not None: error = e.response.text print("Could not delete override %s; %s"%(override_id, error)) continue
def test_iter_cursor(self, get): sess = pdpyras.APISession('token') sess.log = MagicMock() # Or go with self.debug(sess) to see output page = lambda e, r, c: json.dumps({e: r, 'next_cursor': c}) wrong_envelope_name = [Response(200, page('stuffs', [1, 2, 3], None))] get.side_effect = wrong_envelope_name # Note, for this next test and the one after it, we must send a lambda # to assertRaises because the method returns a generator # # Test: guessing the envelope name, incorrect self.assertRaises(ValueError, lambda p: list(sess.iter_cursor(p)), '/things/stuff') get.reset_mock() # Test: taking user's input for the envelope name, incorrect get.side_effect = wrong_envelope_name self.assertRaises( ValueError, lambda p: list(sess.iter_cursor(p, attribute="thing")), '/stuff/things') get.reset_mock() # Test: guessing the envelope name, correct guess, cursor parameter # exchange, stop iteration when records run out, etc. get.side_effect = [ Response(200, page('numbers', [1, 2, 3], 2)), Response(200, page('numbers', [4, 5, 6], 5)), Response(200, page('numbers', [7, 8, 9], None)) ] self.assertEqual(list(sess.iter_cursor('/sequence/numbers')), list(range(1, 10))) # It should send the next_cursor body parameter from the second to # last response as the cursor query parameter in the final request self.assertEqual(get.mock_calls[-1][2]['params']['cursor'], 5)
def _check_pager_duty(self) -> List: pd_session = pdpyras.APISession(self.conf['pd_api_key']) logger.info('Requesting incidents to PagerDuty...') incidents = pd_session.list_all('incidents', params={ 'team_ids[]': self.conf['pd_teams'], 'user_ids[]': self.conf['pd_users'], 'statuses[]': ['triggered', 'acknowledged'] }) return incidents
def test_rerole_users(self): """ Test doing a "live" re-roling of users in an account w/API calls This is a very high-level test that depends on the notion that rerole operations are reversible using CSV files to back-up and restore. """ global api_key if not api_key: return rbf = lambda i: 'rollback_%d.csv' % i rbtrf = lambda i: 'rollback_teams_%d.csv' % i rerole_users.session = pdpyras.APISession(api_key) args = MagicMock() args.assume_yes = True args.skip_roles = ['owner'] # Step 1: Run a rerole then the inverse (WARNING: can mess up account) args.rollback_file = open(rbf(1), 'w') args.rollback_teamroles_file = open(rbtrf(1), 'w') args.all_users = True args.new_base_role = 'observer' args.new_team_role = 'responder' rerole_users.rerole_users(args) # Go # Step 2: Roll back (CSV file should trump all) args.all_users = False args.rollback_file.close() args.rollback_teamroles_file.close() args.roles_file = open(rbf(1), 'r') args.teamroles_file = open(rbtrf(1), 'r') args.rollback_file = open(rbf(2), 'w') args.rollback_teamroles_file = open(rbtrf(2), 'w') rerole_users.rerole_users(args) # Go args.roles_file.close() args.teamroles_file.close() # Step 3: Roll forward args.roles_file = open(rbf(2), 'r') args.teamroles_file = open(rbtrf(2), 'r') args.rollback_file = open(rbf(3), 'w') args.rollback_teamroles_file = open(rbtrf(3), 'w') rerole_users.rerole_users(args) # Go # Step 4: Roll back again and verify that the files are identical args.roles_file = open(rbf(3), 'r') args.teamroles_file = open(rbtrf(3), 'r') args.rollback_file = open(rbf(4), 'w') args.rollback_teamroles_file = open(rbtrf(4), 'w') rerole_users.rerole_users(args) # Go args.roles_file.close() args.teamroles_file.close() # If all was done properly, the rollback files in step 1 and 3 should be # idenetical (they reflect the roles in the account at the beginning) for rolefile in (rbf, rbtrf): self.assertEqual( open(rolefile(1), 'r').read(), open(rolefile(3), 'r').read())
def test_find(self, iter_all): sess = pdpyras.APISession('token') iter_all.return_value = iter([ {'type':'user', 'name': 'Someone Else', 'email':'*****@*****.**'}, {'type':'user', 'name': 'Space Person', 'email':'[email protected] '}, {'type':'user', 'name': 'Someone Personson', 'email':'*****@*****.**'}, ]) self.assertEqual( 'Someone Personson', sess.find('users', '*****@*****.**', attribute='email')['name'] ) iter_all.assert_called_with('users', params={'query':'*****@*****.**'})
def get_session(self) -> pdpyras.APISession: """ Returns `pdpyras.APISession` for use with sending or receiving data through the PagerDuty REST API. The `pdpyras` library supplies a class `pdpyras.APISession` extending `requests.Session` from the Requests HTTP library. Documentation on how to use the `APISession` class can be found at: https://pagerduty.github.io/pdpyras/#data-access-abstraction """ self._session = pdpyras.APISession(self.token) return self._session
def test_persist(self, iterator, creator): user = { "name": "User McUserson", "email": "*****@*****.**", "type": "user" } iterator.return_value = iter([user]) sess = pdpyras.APISession('apiKey') sess.persist('users', 'email', user) creator.assert_not_called() iterator.return_value = iter([]) sess.persist('users', 'email', user) creator.assert_called_with('users', json=user)
def test_rget(self, get): response200 = Response(200, '{"user":{"type":"user_reference",' '"email":"*****@*****.**","summary":"User McUserson"}}') get.return_value = response200 s = pdpyras.APISession('token') self.assertEqual( {"type":"user_reference","email":"*****@*****.**", "summary":"User McUserson"}, s.rget('/users/P123ABC')) # This is (forcefully) valid JSON but no matter; it should raise # PDClientErorr nonetheless response404 = Response(404, '{"user": {"email": "*****@*****.**"}}') get.reset_mock() get.return_value = response404 self.assertRaises(pdpyras.PDClientError, s.rget, '/users/P123ABC')
def test_iter_all(self, get): sess = pdpyras.APISession('token') sess.log = MagicMock() # Or go with self.debug(sess) to see output page = lambda n, t: { 'users': [{'id':i} for i in range(10*n, 10*(n+1))], 'total': t, 'more': n<(t/10)-1 } iter_param = lambda p: json.dumps({ 'limit':10, 'total': True, 'offset': 0 }) get.side_effect = [ Response(200, json.dumps(page(0, 30))), Response(200, json.dumps(page(1, 30))), Response(200, json.dumps(page(2, 30))), ] weirdurl='https://api.pagerduty.com/users?number=1' hook = MagicMock() items = list(sess.iter_all(weirdurl, item_hook=hook, total=True, page_size=10)) self.assertEqual(3, get.call_count) self.assertEqual(30, len(items)) get.assert_has_calls( [ call(weirdurl, params={'limit':10, 'total':1, 'offset':0}), call(weirdurl, params={'limit':10, 'total':1, 'offset':10}), call(weirdurl, params={'limit':10, 'total':1, 'offset':20}), ], ) hook.assert_any_call({'id':14}, 15, 30) get.reset_mock() # Test stopping iteration on non-success status error_encountered = [ Response(200, json.dumps(page(0, 50))), Response(200, json.dumps(page(1, 50))), Response(200, json.dumps(page(2, 50))), Response(400, json.dumps(page(3, 50))), # break Response(200, json.dumps(page(4, 50))), ] get.side_effect = copy.deepcopy(error_encountered) sess.raise_if_http_error = False new_items = list(sess.iter_all(weirdurl)) self.assertEqual(items, new_items) get.reset_mock() # Now test raising an exception: get.side_effect = copy.deepcopy(error_encountered) sess.raise_if_http_error = True self.assertRaises(pdpyras.PDClientError, list, sess.iter_all(weirdurl))
def mass_update_incidents(args): session = pdpyras.APISession(args.api_key, default_from=args.requester_email) session.headers.update({"X-SOURCE-SCRIPT": "pupblic-support-scripts/mass_update_incidents"}) if args.user_id: PARAMETERS['user_ids[]'] = args.user_id.split(',') print("Acting on incidents assigned to user(s): "+args.user_id) if args.service_id: PARAMETERS['service_ids[]'] = args.service_id.split(',') print("Acting on incidents corresponding to service ID(s): " + args.service_id) if args.action == 'resolve': PARAMETERS['statuses[]'] = ['triggered', 'acknowledged'] print("Resolving incidents") elif args.action == 'acknowledge': PARAMETERS['statuses[]'] = ['triggered'] print("Acknowledging incidents") if args.date_range is not None: sinceuntil = args.date_range.split(',') if len(sinceuntil) != 2: raise ValueError("Date range must be two ISO8601-formatted time " "stamps separated by a comma.") PARAMETERS['since'] = sinceuntil[0] PARAMETERS['until'] = sinceuntil[1] print("Getting incidents for date range: "+" to ".join(sinceuntil)) else: PARAMETERS['date_range'] = 'all' print("Getting incidents of all time") print("Parameters: "+str(PARAMETERS)) try: print("Please be patient as this can take a while for large volumes " "of incidents.") for incident in session.list_all('incidents', params=PARAMETERS): print("* Incident {}: {}".format(incident['id'], args.action)) if args.dry_run: continue time.sleep(0.25) self_url = f"https://api.pagerduty.com/incidents/{incident['id']}" session.rput(self_url, json={ 'type': 'incident_reference', 'id': incident['id'], 'status': '{0}d'.format(args.action), # acknowledged or resolved }) except pdpyras.PDClientError as e: if e.response is not None: print(e.response.text) raise e
def test_profile(self): response = Response(201, json.dumps({'key': 'value'}), method='POST') response.url = 'https://api.pagerduty.com/users/PCWKOPZ/contact_methods' sess = pdpyras.APISession('apikey') sess.profile(response) # Nested index endpoint self.assertEqual( 1, sess.api_call_counts['post:users/{id}/contact_methods/{index}']) self.assertEqual( 1.5, sess.api_time['post:users/{id}/contact_methods/{index}']) response.url = 'https://api.pagerduty.com/users/PCWKOPZ' response.request.method = 'GET' sess.profile(response) # Individual resource access endpoint self.assertEqual(1, sess.api_call_counts['get:users/{id}']) self.assertEqual(1.5, sess.api_time['get:users/{id}'])
def test_resource_path(self, put_method): sess = pdpyras.APISession('some-key') resource_url = 'https://api.pagerduty.com/users/PSOMEUSR' user = { 'id': 'PSOMEUSR', 'type': 'user', 'self': resource_url, 'name': 'User McUserson', 'email': '*****@*****.**' } put_method.return_value = Response(200, json.dumps({'user': user}), method='PUT', url=resource_url) sess.rput(user, json=user) put_method.assert_called_with(resource_url, json={'user': user})
def main(): ap = argparse.ArgumentParser( description="Gets all overrides in a " "schedule and export to a CSV file. The first column of output will " "be the schedule ID and the second should be the override ID, and the " "third will be a column identifying the user and time.") ap.add_argument('-k', '--api-key', type=str, required=True, dest='api_key', help="REST API key") ap.add_argument('-f', '--csv-file', type=argparse.FileType('w'), dest="csv_file", help="Output CSV file. Data will begin in the very "\ "first row; no column names.") ap.add_argument('-s', '--start', required=True, help="Start date of search") ap.add_argument('-e', '--end', required=True, help="End date of search") ap.add_argument( '-c', '--schedules', default=[], action='append', help="IDs of schedules in which to find overrides. If unspecified, " "all schedules will be included.") args = ap.parse_args() session = pdpyras.APISession(args.api_key) window = {'since': args.start, 'until': args.end} writer = csv.writer(args.csv_file) schedules = args.schedules if not args.schedules: print("Getting schedules...") schedules = [s['id'] for s in session.iter_all('schedules')] for sid in schedules: for override in session.iter_all('/schedules/%s/overrides' % sid, params=window): idtag = "%s: %s to %s" % (override['user']['summary'], override['start'], override['end']) writer.writerow((sid, override['id'], idtag))
def mass_update_incidents(args): session = pdpyras.APISession(args.api_key, default_from=args.requester_email) if args.user_id: PARAMETERS['user_ids[]'] = args.user_id.split(',') if args.service_id: PARAMETERS['service_ids[]'] = args.service_id.split(',') if args.action == 'resolve': PARAMETERS['statuses[]'] = ['triggered', 'acknowledged'] elif args.action == 'acknowledge': PARAMETERS['statuses[]'] = ['triggered'] for incident in session.iter_all('incidents', params=PARAMETERS): session.rput( incident['self'], json={ 'type': 'incident_reference', 'id': incident['id'], 'status': '{0}d'.format(args.action), # acknowledged or resolved })
def no_sms(args): session = pdpyras.APISession(args.api_key) users = session.iter_all( 'users', params={'include[]': ['contact_methods', 'notification_rules']}) for user in users: for rule in user['notification_rules']: if rule['contact_method']['type'] == 'sms_contact_method': print('{name}: deleting notification rule {id}'.format( **{ 'name': user['name'], 'id': rule['id'] })) session.delete(rule['self']) for method in user['contact_methods']: if method['type'] == 'sms_contact_method': print('{name}: deleting contact method {id}'.format( **{ 'name': user['name'], 'id': method['id'] })) session.delete(method['self'])
def test_persist(self, iterator, creator, updater): user = { "name": "User McUserson", "email": "*****@*****.**", "type": "user" } # Do not create if the user exists already (default) iterator.return_value = iter([user]) sess = pdpyras.APISession('apiKey') sess.persist('users', 'email', user) creator.assert_not_called() # Call session.rpost to create if the user does not exist iterator.return_value = iter([]) sess.persist('users', 'email', user) creator.assert_called_with('users', json=user) # Call session.rput to update an existing user if update is True iterator.return_value = iter([user]) new_user = dict(user) new_user.update({ 'job_title': 'Testing the app', 'self': 'https://api.pagerduty.com/users/PCWKOPZ' }) sess.persist('users', 'email', new_user, update=True) updater.assert_called_with(new_user['self'], json=new_user)
def create_recurring_maintenance_windows(args): sref = lambda s: {'type': 'service_reference', 'id': s} session = pdpyras.APISession(args.api_key, default_from=args.requester) start_date = dateparser.parse(args.first_maint_window_date) end_date = dateparser.parse(args.first_maint_window_date) + \ timedelta(minutes=args.duration_minutes) for iter in range(1, args.num_repetitions, 1): start_date = start_date + timedelta(hours=args.period_hours) end_date = end_date + timedelta(hours=args.period_hours) print("Creating a %d-minute maintenance window starting %s." % (args.duration_minutes, start_date)) if args.dry_run: continue try: mw = session.rpost('maintenance_windows', json={ 'type': 'maintenance_window', 'start_time': start_date.isoformat(), 'end_time': end_date.isoformat(), 'description': args.description, 'services': [sref(s_id) for s_id in args.service_ids] }) except pdpyras.PDClientError as e: msg = "API Error: " if e.response is not None: msg += "HTTP %d: %s" % (e.response.status_code, e.response.text) print(msg) print( "(Note: no maintenance windows actually created because -n/--dry-run " "was given)")
def remove_all_future_maintenance_windows(args): session = pdpyras.APISession(args.api_key) progress_printer = lambda o, i, n: (print("Deleting %d/%d: %s"%( i, n, o['summary'] ))) mw_params = {"filter":"future"} if len(args.service_ids): mw_params['service_ids[]'] = args.service_ids for mw in session.iter_all("maintenance_windows", item_hook=progress_printer, params=mw_params, total=True): if args.dry_run: continue try: session.delete(mw['self']) except PDClientError as e: message = "API Error: %s"%e if e.response is not None: message += " HTTP %d: %s"%(e.response.status_code, e.response.text) print(message) continue if args.dry_run: print("(Didn't actually delete anything, since -n/--dry-run was given)")
def create_overrides(): """For shift in find_shifts(), create an override to replace vacationing_user with replacement_user.""" ap = argparse.ArgumentParser( description="For a given user going on " "vacation, and another given user who will fill their shoes while " "away, create overrides on all the vacationing user's schedules, such " "that the replacement user covers all the shifts that the vacationing " "user will be gone.") ap.add_argument('-v', '--vacationer', required=True, help="Login email " "address of the user who is going on vacation.") ap.add_argument( '-u', '--substitute', required=True, help="Login email " "address of the user who is covering the shifts of the vacationing " "user.") ap.add_argument('-k', '--api-key', required=True, help="PagerDuty REST API " "key to use for operations.") ap.add_argument('-s', '--start', required=True, help="Start date of the " "vacation.") ap.add_argument('-e', '--end', required=True, help="End date of the " "vacation.") ap.add_argument( '-c', '--schedules', default=[], action='append', help="IDs of schedules in which to create overrides. If unspecified, " "all schedules will be included.") args = ap.parse_args() session = pdpyras.APISession(args.api_key) vacationing_user = session.find('users', args.vacationer, attribute='email') replacement_user = session.find('users', args.substitute, attribute='email') if None in (vacationing_user, replacement_user): print("Invalid login email specified for the vacationing user and/or " "substitute user.") return schedules = args.schedules if not args.schedules: print("Getting schedules...") schedules = [s['id'] for s in session.iter_all('schedules')] print("Looking for shifts that will require coverage...") shifts = find_shifts(session, vacationing_user['id'], args.start, args.end, schedules) for dates, schedule in shifts.items(): start, end = dates print("Creating override on schedule %s (%s) from %s to %s..." % (schedule['id'], schedule['summary'], start, end)) create_response = session.post( '/schedules/%s/overrides' % schedule['id'], json={ 'override': { "start": start, "end": end, "user": { "id": replacement_user['id'], "type": "user_reference" } } }) if not create_response.ok: message = "HTTP error: %d" % e.response.status_code print("Error creating override; " + message) continue print("Success.")
def main(): global session ap = argparse.ArgumentParser(description="Mass-update email addresses of " "users in a PagerDuty account.") ap.add_argument('-k', '--api-key', required=True, help="API key") # "CSV", # description="CSV-based update: users to update and how to make the " # "update is specified in a CSV file", # "Query", # description="Query-based update: users to update given by a query and " # "replacement pattern" query_or_csv_group = ap.add_mutually_exclusive_group(required=True) query_or_csv_group.add_argument( '-f', '--csv-file', default=None, type=argparse.FileType('r'), help="CSV file specifying users to update " "and their new email addresses. Must have two columns: the first is " "the login email of the user, and the second is the email address to " "which it should be changed. Input is expected to start at the first " "line; there should be no column titles row.") query_or_csv_group.add_argument( '-q', '--query', type=str, help="Query to use for matching users to be updated. The query is run " "against the users' login emails.") query_or_csv_group.add_argument('-a', '--all-users', action='store_true', default=False, help="Perform replacement over all users.") ap.add_argument( '-e', '--find', dest='find_pattern', default=None, help="Regex to match in the email addresses. If this is unspecified " "and -q is given, the query itself will be used as the search pattern, " "and a plain text find/replace will be performed instead of regex.") ap.add_argument('-r', '--replace', dest='replacement_pattern', default=None, help="Replacement pattern to use for emails.") ap.add_argument( '-n', '--dry-run', default=False, action='store_true', help="Don't actually make changes, but print out each change that " "would be made.") ap.add_argument( '-o', '--no-contact-methods', default=False, action='store_true', help="Do not update email contact methods as well " "as login email addresses. By default this is False and contact " "methods will be included.") args = ap.parse_args() if args.all_users and None in (args.find_pattern, args.replacement_pattern)\ or (args.query and args.replacement_pattern is None): print( "There is insufficient information to determine what you want to " "do.\n- If using the --all-users option, you must also provide " "--find and --replace to specify how the emails should " "be updated\n- If using --query, you must at least provide " "--replace") return session = pdpyras.APISession(args.api_key) replace_emails(args)
def main(): global session parser = argparse.ArgumentParser(description="Give users new roles. In "\ "cases where the users' new roles are given on a per-user basis, i.e. "\ "in a CSV, that will take precedence over roles specified via command "\ "line arguments; both can be used. Note, this script cannot yet be "\ "used to specify team roles on a per-team basis.") # General options helptxt = "PagerDuty full-access REST API key" parser.add_argument('-k', '--api-key', required=True, help=helptxt) parser.add_argument('-r', '--new-role', help="The users' new role.", dest='new_base_role', default=None) helptxt = "Roles such that, if a user has that role, they will be skipped "\ "in the re-roling. To specify multiple roles, include this option "\ "several times. This will always include the account owner (who "\ "cannot be demoted through the API) and stakeholders (because "\ "changing the stakeholder count can affect billing)." parser.add_argument('--skip-users-with-role', '-s', dest="skip_roles", action='append', default=['owner'], help=helptxt) # Team role setting options teamrole_args = parser.add_mutually_exclusive_group() helptxt = "New team-level role to set for users on their respective teams." teamrole_args.add_argument('-e', '--team-role', dest='new_team_role', required=False, default=None, help=helptxt) helptxt = "Derive the users' new team roles from their current default "\ "roles, which will be replaced with the role specified by the "\ "--new-role option. For instance, if a given user is a manager "\ "and --new-role is observer, they will have manager access to "\ "their teams but a new base role of observer." teamrole_args.add_argument('-u', '--auto-team-roles', dest='adapt_roles', required=False, default=False, action='store_true', help=helptxt) # Input file options from_file = parser.add_mutually_exclusive_group(required=True) helptxt = "Rerole all users in the account." from_file.add_argument('-a', '--all-users', dest='all_users', default=False, action='store_true', help=helptxt) helptxt = "File specifying list of user roles. The file should be a CSV "\ "with user login email as the first column and role as the second, "\ "and optionally the third column the team role to give them." from_file.add_argument('-o', '--roles-from-file', dest='roles_file', default=None, type=argparse.FileType('r'), help=helptxt) helptxt = "File specifying list of users, one login email per line." helptxt="File specifying per-user per-team roles to set. The file should "\ "be a CSV with the user login email as the first colum, the team "\ "role as the second column, and the name of the team on which the "\ "user has that role as the third. Note, you can include an individual "\ "user multiple times in the same file to set distinct roles on any "\ "number of teams." from_file.add_argument('-t', '--team-roles-from-file', dest='teamroles_file', type=argparse.FileType('r'), default=None, help=helptxt) # Output (save) file option: helptxt = "File to which the prior user base roles should be written. "\ "Files written to with this option can then be used to reset the "\ "permissions to the previous state before having run the rerole "\ "script, via the --roles-from-file option." parser.add_argument('-b', '--rollback-file', dest='rollback_file', default=None, type=argparse.FileType('w'), help=helptxt) helptxt = "File to which the prior user team roles should be written. "\ "Files written to with this option can then be used to reset the "\ "fine-grained per-user-per-team roles to the previous state before "\ "having run the rerole script, via the --team-roles-from-file option." parser.add_argument('-m', '--rollback-teamroles-file', dest='rollback_teamroles_file', type=argparse.FileType('w'), help=helptxt) helptxt="Assume a yes answer to all prompts i.e. to proceed in the case "\ "that no backup file was specified." parser.add_argument('-y', '--yes-to-all', default=False, dest='assume_yes', action='store_true', help=helptxt) args = parser.parse_args() # Print a conspicuous warning message to avoid making a big mistake if args.all_users and not args.assume_yes: cont = False cont = input("THIS WILL UPDATE ALL USERS IN THE ACCOUNT. Are you sure " "you want to proceed? ") cont = cont.strip().lower() == 'y' if not cont: print("Aborted.") return session = pdpyras.APISession(args.api_key) rerole_users(args)
def get_contact_methods(user_id, session): for contact_method in session.iter_all('users/%s/contact_methods' % user_id): if 'phone' in contact_method['type']: sys.stdout.write("Phone: ") sys.stdout.write( '%s %s' % (contact_method['country_code'], contact_method['address'])) elif 'sms' in contact_method['type']: sys.stdout.write("SMS: ") sys.stdout.write( '%s %s' % (contact_method['country_code'], contact_method['address'])) elif 'email' in contact_method['type']: sys.stdout.write("Email: ") sys.stdout.write(contact_method['address']) elif 'push_notification' in contact_method['type']: sys.stdout.write("Push: ") sys.stdout.write(contact_method['label']) sys.stdout.write("\n") if __name__ == '__main__': ap = argparse.ArgumentParser(description="Retrieves contact info for all " "users in a PagerDuty account") ap.add_argument('-k', '--api-key', required=True, help="REST API key") args = ap.parse_args() session = pdpyras.APISession(args.api_key) get_users(session)
def test_truncated_token(self): sess = pdpyras.APISession('abcd1234') self.assertEqual('*1234', sess.trunc_token)
def test_subdomain(self, rget): rget.return_value = [{'html_url': 'https://something.pagerduty.com'}] sess = pdpyras.APISession('key') self.assertEqual('something', sess.subdomain) self.assertEqual('something', sess.subdomain) rget.assert_called_once_with('users', params={'limit': 1})
def test_resource_envelope(self): do_http_things = MagicMock() response = MagicMock() do_http_things.return_value = response my_self = pdpyras.APISession('some_key') self.debug(my_self) dummy_session = MagicMock() def reset_mocks(): do_http_things.reset_mock() response.reset_mock() do_http_things.return_value = response dummy_session.reset_mock() # OK response, good JSON: JSON-decode and unpack response response.ok = True response.json.return_value = {'service': {'name': 'value'}} do_http_things.__name__ = 'rput' # just for instance self.assertEqual( pdpyras.resource_envelope(do_http_things)(my_self, '/services/PTHINGY'), {'name': 'value'}) reset_mocks() # OK response, bad JSON: raise exception. response.ok = True do_http_things.__name__ = 'rput' # just for instance response.json.side_effect = [ValueError('Bad JSON!')] self.assertRaises(pdpyras.PDClientError, pdpyras.resource_envelope(do_http_things), my_self, '/services') reset_mocks() # OK response, but ruh-roh we hit an anti-pattern (probably won't exist # except maybe in beta/reverse-engineered endpoints; this design is thus # anticipatory rather than practical). Raise exception. do_http_things.reset_mock() response.reset_mock() response.json = MagicMock() response.ok = True do_http_things.return_value = response do_http_things.__name__ = 'rput' # just for instance response.json.return_value = {'nope': 'nopenope'} self.assertRaises(pdpyras.PDClientError, pdpyras.resource_envelope(do_http_things), my_self, '/services') reset_mocks() # Not OK response, raise (daisy-chained w/raise_on_error decorator) response.reset_mock() response.ok = False do_http_things.__name__ = 'rput' # just for instance self.assertRaises(pdpyras.PDClientError, pdpyras.resource_envelope(do_http_things), my_self, '/services') reset_mocks() # GET /<index>: use a different envelope name response.ok = True users_array = [{ "type": "user", "email": "*****@*****.**", "summary": "User McUserson" }] response.json.return_value = {'users': users_array} do_http_things.__name__ = 'rget' dummy_session.url = 'https://api.pagerduty.com' self.assertEqual( users_array, pdpyras.resource_envelope(do_http_things)(dummy_session, '/users', query='user')) reset_mocks() # Test request body JSON envelope stuff in post/put # Response body validation do_http_things.__name__ = 'rpost' user_payload = {'email': '*****@*****.**', 'name': 'User McUserson'} self.assertRaises(pdpyras.PDClientError, pdpyras.resource_envelope(do_http_things), dummy_session, '/users', json=user_payload) reset_mocks() # Add type property; should work now and automatically pack the user # object into a JSON object inside the envelope. user_payload['type'] = 'user' do_http_things.__name__ = 'rpost' response.ok = True created_user = user_payload.copy() created_user['id'] = 'P456XYZ' response.json.return_value = {'user': created_user} self.assertEqual( created_user, pdpyras.resource_envelope(do_http_things)(dummy_session, '/users', json=user_payload)) do_http_things.assert_called_with(dummy_session, '/users', json={'user': user_payload}) reset_mocks() # Test auto-envelope functionality for multi-update incidents = [{'id': 'PABC123'}, {'id': 'PDEF456'}] do_http_things.__name__ = 'rput' response.ok = True updated_incidents = copy.deepcopy(incidents) response.json.return_value = {'incidents': updated_incidents} self.assertEqual( updated_incidents, pdpyras.resource_envelope(do_http_things)(dummy_session, '/incidents', json=incidents)) # The final value of the json parameter passed to the method (which goes # straight to put) should be the plural resource name self.assertEqual(do_http_things.mock_calls[0][2]['json'], {'incidents': incidents})