def test_get_all_pages_prints_attribute_last_item_in_paging_message(self): self.mock_method.return_value.execute.side_effect = self.simple_3_page_response paging_message = 'Last item in page: %%last_item%%' with patch.object(gapi.sys.stderr, 'write') as mock_write: gapi.get_all_pages( self.mock_service, self.mock_method_name, page_message=paging_message, message_attribute='position') messages_written = [ call_args[0][0] for call_args in mock_write.call_args_list ] page_1_message = paging_message.replace( '%%last_item%%', self.simple_3_page_response[0]['items'][-1]['position']) self.assertIn(page_1_message, messages_written) page_2_message = paging_message.replace( '%%last_item%%', self.simple_3_page_response[1]['items'][-1]['position']) self.assertIn(page_2_message, messages_written) # Assert that the template text is always replaced. for message in messages_written: self.assertNotIn('%%last_item', message)
def test_get_all_pages_passes_additional_kwargs_to_service_method(self): self.mock_method.return_value.execute.return_value = self.empty_items_response gapi.get_all_pages( self.mock_service, self.mock_method_name, my_param_1=1, my_param_2=2) method_kwargs = self.mock_method.call_args[1] self.assertEqual(method_kwargs.get('my_param_1'), 1) self.assertEqual(method_kwargs.get('my_param_2'), 2)
def test_get_all_pages_uses_default_max_page_size(self): sample_api_id = list(gapi.MAX_RESULTS_API_EXCEPTIONS.keys())[0] sample_api_max_results = gapi.MAX_RESULTS_API_EXCEPTIONS[sample_api_id] self.mock_method.return_value.methodId = sample_api_id self.mock_service._rootDesc = { 'resources': { 'someResource': { 'methods': { 'someMethod': { 'id': sample_api_id, 'parameters': { 'maxResults': { 'maximum': sample_api_max_results } } } } } } } self.mock_method.return_value.execute.return_value = self.empty_items_response gapi.get_all_pages(self.mock_service, self.mock_method_name) request_method_kwargs = self.mock_method.call_args[1] self.assertIn('maxResults', request_method_kwargs) self.assertEqual(request_method_kwargs['maxResults'], gapi.MAX_RESULTS_API_EXCEPTIONS.get(sample_api_id))
def test_get_all_pages_prints_attribute_total_items_in_paging_message(self): self.mock_method.return_value.execute.side_effect = self.simple_3_page_response paging_message = 'Total number of items discovered: %%total_items%%' with patch.object(gapi.sys.stderr, 'write') as mock_write: gapi.get_all_pages( self.mock_service, self.mock_method_name, page_message=paging_message) messages_written = [ call_args[0][0] for call_args in mock_write.call_args_list ] page_1_item_count = len(self.simple_3_page_response[0]['items']) page_1_message = paging_message.replace('%%total_items%%', str(page_1_item_count)) self.assertIn(page_1_message, messages_written) page_2_item_count = len(self.simple_3_page_response[1]['items']) page_2_message = paging_message.replace( '%%total_items%%', str(page_1_item_count + page_2_item_count)) self.assertIn(page_2_message, messages_written) page_3_item_count = len(self.simple_3_page_response[2]['items']) page_3_message = paging_message.replace( '%%total_items%%', str(page_1_item_count + page_2_item_count + page_3_item_count)) self.assertIn(page_3_message, messages_written) # Assert that the template text is always replaced. for message in messages_written: self.assertNotIn('%%total_items', message)
def test_get_all_pages_max_page_size_overrided(self): self.mock_method.return_value.execute.return_value = self.empty_items_response gapi.get_all_pages( self.mock_service, self.mock_method_name, pageSize=123456) request_method_kwargs = self.mock_method.call_args[1] self.assertIn('pageSize', request_method_kwargs) self.assertEqual(123456, request_method_kwargs['pageSize'])
def test_get_all_pages_includes_next_pagetoken_in_request(self): page_1 = {'items': ['1-1', '1-2', '1-3'], 'nextPageToken': 'someToken'} page_2 = {'items': ['2-1', '2-2', '2-3']} self.mock_method.return_value.execute.side_effect = [page_1, page_2] gapi.get_all_pages(self.mock_service, self.mock_method_name, pageSize=100) self.assertEqual(self.mock_method.call_count, 2) call_2_kwargs = self.mock_method.call_args_list[1][1] self.assertIn('pageToken', call_2_kwargs) self.assertEqual(call_2_kwargs['pageToken'], page_1['nextPageToken'])
def test_get_all_pages_prints_paging_message(self): self.mock_method.return_value.execute.side_effect = self.simple_3_page_response paging_message = 'A simple string displayed during paging' with patch.object(gapi.sys.stderr, 'write') as mock_write: gapi.get_all_pages( self.mock_service, self.mock_method_name, page_message=paging_message) messages_written = [ call_args[0][0] for call_args in mock_write.call_args_list ] self.assertIn(paging_message, messages_written)
def test_get_all_pages_passes_throw_and_retry_reasons(self, mock_call): throw_for = MagicMock() retry_for = MagicMock() mock_call.return_value = self.empty_items_response gapi.get_all_pages( self.mock_service, self.mock_method_name, throw_reasons=throw_for, retry_reasons=retry_for) method_kwargs = mock_call.call_args[1] self.assertEqual(method_kwargs.get('throw_reasons'), throw_for) self.assertEqual(method_kwargs.get('retry_reasons'), retry_for)
def test_get_all_pages_ends_paging_message_with_newline(self): self.mock_method.return_value.execute.side_effect = self.simple_3_page_response paging_message = 'A simple string displayed during paging' with patch.object(gapi.sys.stderr, 'write') as mock_write: gapi.get_all_pages( self.mock_service, self.mock_method_name, page_message=paging_message) messages_written = [ call_args[0][0] for call_args in mock_write.call_args_list ] last_page_message_index = len( messages_written) - messages_written[::-1].index(paging_message) last_carriage_return_index = len( messages_written) - messages_written[::-1].index('\r\n') self.assertGreater(last_carriage_return_index, last_page_message_index)
def printHolds(): v = buildGAPIObject() todrive = False csvRows = [] initialTitles = ['matterId', 'holdId', 'name', 'corpus', 'updateTime'] titles = initialTitles[:] matters = [] matterIds = [] i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'todrive': todrive = True i += 1 elif myarg in ['matter', 'matters']: matters = sys.argv[i + 1].split(',') i += 2 else: controlflow.invalid_argument_exit(myarg, "gam print holds") if not matters: fields = 'matters(matterId,state),nextPageToken' matters_results = gapi.get_all_pages(v.matters(), 'list', 'matters', view='BASIC', fields=fields) for matter in matters_results: matterState = matter['state'] matterId = matter['matterId'] if matterState != 'OPEN': print(f'ignoring matter {matterId} in state {matterState}') continue matterIds.append(matterId) else: for matter in matters: matterIds.append(getMatterItem(v, matter)) for matterId in matterIds: sys.stderr.write(f'Retrieving holds for matter {matterId}\n') holds = gapi.get_all_pages(v.matters().holds(), 'list', 'holds', matterId=matterId) for hold in holds: display.add_row_titles_to_csv_file( utils.flatten_json(hold, flattened={'matterId': matterId}), csvRows, titles) display.sort_csv_titles(initialTitles, titles) display.write_csv_file(csvRows, titles, 'Vault Holds', todrive)
def printMatters(): v = buildGAPIObject() todrive = False csvRows = [] initialTitles = ['matterId', 'name', 'description', 'state'] titles = initialTitles[:] view = 'FULL' i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'todrive': todrive = True i += 1 elif myarg in PROJECTION_CHOICES_MAP: view = PROJECTION_CHOICES_MAP[myarg] i += 1 else: controlflow.invalid_argument_exit(myarg, "gam print matters") __main__.printGettingAllItems('Vault Matters', None) page_message = gapi.got_total_items_msg('Vault Matters', '...\n') matters = gapi.get_all_pages(v.matters(), 'list', 'matters', page_message=page_message, view=view) for matter in matters: display.add_row_titles_to_csv_file(utils.flatten_json(matter), csvRows, titles) display.sort_csv_titles(initialTitles, titles) display.write_csv_file(csvRows, titles, 'Vault Matters', todrive)
def test_get_all_pages_non_default_items_field_name(self): field_name = 'things' fake_response = {field_name: [{}, {}, {}]} self.mock_method.return_value.execute.return_value = fake_response page = gapi.get_all_pages( self.mock_service, self.mock_method_name, items=field_name) self.assertEqual(page, fake_response[field_name])
def transferSecCals(users): target_user = sys.argv[5] remove_source_user = sendNotifications = True i = 6 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'keepuser': remove_source_user = False i += 1 elif myarg == 'sendnotifications': sendNotifications = __main__.getBoolean(sys.argv[i + 1], myarg) i += 2 else: controlflow.invalid_argument_exit(sys.argv[i], "gam <users> transfer seccals") if remove_source_user: target_user, target_cal = buildCalendarGAPIObject(target_user) if not target_cal: return for user in users: user, source_cal = buildCalendarGAPIObject(user) if not source_cal: continue calendars = gapi.get_all_pages(source_cal.calendarList(), 'list', 'items', soft_errors=True, minAccessRole='owner', showHidden=True, fields='items(id),nextPageToken') for calendar in calendars: calendarId = calendar['id'] if calendarId.find('@group.calendar.google.com') != -1: body = { 'role': 'owner', 'scope': { 'type': 'user', 'value': target_user } } gapi.call(source_cal.acl(), 'insert', calendarId=calendarId, body=body, sendNotifications=sendNotifications) if remove_source_user: body = { 'role': 'none', 'scope': { 'type': 'user', 'value': user } } gapi.call(target_cal.acl(), 'insert', calendarId=calendarId, body=body, sendNotifications=sendNotifications)
def printEvents(): calendarId, cal = buildCalendarDataGAPIObject(sys.argv[2]) if not cal: return q = showDeleted = showHiddenInvitations = timeMin = \ timeMax = timeZone = updatedMin = None toDrive = False titles = [] csvRows = [] i = 4 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'query': q = sys.argv[i + 1] i += 2 elif myarg == 'includedeleted': showDeleted = True i += 1 elif myarg == 'includehidden': showHiddenInvitations = True i += 1 elif myarg == 'after': timeMin = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'before': timeMax = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'timezone': timeZone = sys.argv[i + 1] i += 2 elif myarg == 'updated': updatedMin = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'todrive': toDrive = True i += 1 else: controlflow.invalid_argument_exit( sys.argv[i], "gam calendar <email> printevents") page_message = gapi.got_total_items_msg(f'Events for {calendarId}', '') results = gapi.get_all_pages(cal.events(), 'list', 'items', page_message=page_message, calendarId=calendarId, q=q, showDeleted=showDeleted, showHiddenInvitations=showHiddenInvitations, timeMin=timeMin, timeMax=timeMax, timeZone=timeZone, updatedMin=updatedMin) for result in results: row = {'calendarId': calendarId} display.add_row_titles_to_csv_file( utils.flatten_json(result, flattened=row), csvRows, titles) display.sort_csv_titles(['calendarId', 'id', 'summary', 'status'], titles) display.write_csv_file(csvRows, titles, 'Calendar Events', toDrive)
def test_get_all_pages_returns_all_items(self): page_1 = {'items': ['1-1', '1-2', '1-3'], 'nextPageToken': '2'} page_2 = {'items': ['2-1', '2-2', '2-3'], 'nextPageToken': '3'} page_3 = {'items': ['3-1', '3-2', '3-3']} self.mock_method.return_value.execute.side_effect = [page_1, page_2, page_3] response_items = gapi.get_all_pages(self.mock_service, self.mock_method_name) self.assertListEqual(response_items, page_1['items'] + page_2['items'] + page_3['items'])
def test_get_all_pages_prints_paging_message_inline(self): self.mock_method.return_value.execute.side_effect = self.simple_3_page_response paging_message = 'A simple string displayed during paging' with patch.object(gapi.sys.stderr, 'write') as mock_write: gapi.get_all_pages( self.mock_service, self.mock_method_name, page_message=paging_message) messages_written = [ call_args[0][0] for call_args in mock_write.call_args_list ] # Make sure a return carriage was written between two pages paging_message_call_positions = [ i for i, message in enumerate(messages_written) if message == paging_message ] self.assertGreater(len(paging_message_call_positions), 1) printed_between_page_messages = messages_written[ paging_message_call_positions[0]:paging_message_call_positions[1]] self.assertIn('\r', printed_between_page_messages)
def convertMatterNameToID(v, nameOrID): nameOrID = nameOrID.lower() cg = UID_PATTERN.match(nameOrID) if cg: return cg.group(1) fields = 'matters(matterId,name),nextPageToken' matters = gapi.get_all_pages(v.matters( ), 'list', 'matters', view='BASIC', fields=fields) for matter in matters: if matter['name'].lower() == nameOrID: return matter['matterId'] return None
def convertExportNameToID(v, nameOrID, matterId): nameOrID = nameOrID.lower() cg = UID_PATTERN.match(nameOrID) if cg: return cg.group(1) fields = 'exports(id,name),nextPageToken' exports = gapi.get_all_pages(v.matters().exports( ), 'list', 'exports', matterId=matterId, fields=fields) for export in exports: if export['name'].lower() == nameOrID: return export['id'] controlflow.system_error_exit(4, f'could not find export name {nameOrID} ' f'in matter {matterId}')
def convertHoldNameToID(v, nameOrID, matterId): nameOrID = nameOrID.lower() cg = UID_PATTERN.match(nameOrID) if cg: return cg.group(1) fields = 'holds(holdId,name),nextPageToken' holds = gapi.get_all_pages(v.matters().holds( ), 'list', 'holds', matterId=matterId, fields=fields) for hold in holds: if hold['name'].lower() == nameOrID: return hold['holdId'] controlflow.system_error_exit(4, f'could not find hold name {nameOrID} ' f'in matter {matterId}')
def printExports(): v = buildGAPIObject() todrive = False csvRows = [] initialTitles = ['matterId', 'id', 'name', 'createTime', 'status'] titles = initialTitles[:] matters = [] matterIds = [] i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'todrive': todrive = True i += 1 elif myarg in ['matter', 'matters']: matters = sys.argv[i+1].split(',') i += 2 else: controlflow.invalid_argument_exit(myarg, "gam print exports") if not matters: fields = 'matters(matterId),nextPageToken' matters_results = gapi.get_all_pages(v.matters( ), 'list', 'matters', view='BASIC', state='OPEN', fields=fields) for matter in matters_results: matterIds.append(matter['matterId']) else: for matter in matters: matterIds.append(getMatterItem(v, matter)) for matterId in matterIds: sys.stderr.write(f'Retrieving exports for matter {matterId}\n') exports = gapi.get_all_pages( v.matters().exports(), 'list', 'exports', matterId=matterId) for export in exports: display.add_row_titles_to_csv_file(utils.flatten_json( export, flattened={'matterId': matterId}), csvRows, titles) display.sort_csv_titles(initialTitles, titles) display.write_csv_file(csvRows, titles, 'Vault Exports', todrive)
def download_bucket(): bucket = sys.argv[3] s = build_gapi() page_message = gapi.got_total_items_msg('Files', '...') fields = 'nextPageToken,items(name,id,md5Hash)' objects = gapi.get_all_pages(s.objects(), 'list', 'items', page_message=page_message, bucket=bucket, projection='noAcl', fields=fields) i = 1 for object_ in objects: print(f'{i}/{len(objects)}') expectedMd5 = base64.b64decode(object_['md5Hash']).hex() get_cloud_storage_object( s, bucket, object_['name'], expectedMd5=expectedMd5) i += 1
def printShowCalendars(users, csvFormat): if csvFormat: todrive = False titles = [] csvRows = [] i = 5 while i < len(sys.argv): myarg = sys.argv[i].lower() if csvFormat and myarg == 'todrive': todrive = True i += 1 else: controlflow.invalid_argument_exit( myarg, f"gam <users> {['show', 'print'][csvFormat]} calendars") i = 0 count = len(users) for user in users: i += 1 user, cal = buildCalendarGAPIObject(user) if not cal: continue result = gapi.get_all_pages(cal.calendarList(), 'list', 'items', soft_errors=True) jcount = len(result) if not csvFormat: print(f'User: {user}, Calendars:{display.current_count(i, count)}') if jcount == 0: continue j = 0 for userCalendar in result: j += 1 _showCalendar(userCalendar, j, jcount) else: if jcount == 0: continue for userCalendar in result: row = {'primaryEmail': user} display.add_row_titles_to_csv_file( utils.flatten_json(userCalendar, flattened=row), csvRows, titles) if csvFormat: display.sort_csv_titles(['primaryEmail', 'id'], titles) display.write_csv_file(csvRows, titles, 'Calendars', todrive)
def showCalSettings(users): i = 0 count = len(users) for user in users: i += 1 user, cal = buildCalendarGAPIObject(user) if not cal: continue feed = gapi.get_all_pages( cal.settings(), 'list', 'items', soft_errors=True) if feed: current_count = display.current_count(i, count) print(f'User: {user}, Calendar Settings:{current_count}') settings = {} for setting in feed: settings[setting['id']] = setting['value'] for attr, value in sorted(settings.items()): print(f' {attr}: {value}')
def getCrOSDeviceEntity(i, cd): myarg = sys.argv[i].lower() if myarg == 'cros_sn': return i+2, __main__.getUsersToModify('cros_sn', sys.argv[i+1]) if myarg == 'query': return i+2, __main__.getUsersToModify('crosquery', sys.argv[i+1]) if myarg[:6] == 'query:': query = sys.argv[i][6:] if query[:12].lower() == 'orgunitpath:': kwargs = {'orgUnitPath': query[12:]} else: kwargs = {'query': query} fields = 'nextPageToken,chromeosdevices(deviceId)' devices = gapi.get_all_pages(cd.chromeosdevices(), 'list', 'chromeosdevices', customerId=GC_Values[GC_CUSTOMER_ID], fields=fields, **kwargs) return i+1, [device['deviceId'] for device in devices] return i+1, sys.argv[i].replace(',', ' ').split()
def printShowACLs(csvFormat): calendarId, cal = buildCalendarDataGAPIObject(sys.argv[2]) if not cal: return toDrive = False i = 4 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if csvFormat and myarg == 'todrive': toDrive = True i += 1 else: action = ['showacl', 'printacl'][csvFormat] message = f"gam calendar <email> {action}" controlflow.invalid_argument_exit(sys.argv[i], message) acls = gapi.get_all_pages(cal.acl(), 'list', 'items', calendarId=calendarId) i = 0 if csvFormat: titles = [] rows = [] else: count = len(acls) for rule in acls: i += 1 if csvFormat: row = utils.flatten_json(rule, None) for key in row: if key not in titles: titles.append(key) rows.append(row) else: formatted_acl = formatACLRule(rule) current_count = display.current_count(i, count) print( f'Calendar: {calendarId}, ACL: {formatted_acl}{current_count}') if csvFormat: display.write_csv_file(rows, titles, f'{calendarId} Calendar ACLs', toDrive)
def printMatters(): v = buildGAPIObject() todrive = False csvRows = [] initialTitles = ['matterId', 'name', 'description', 'state'] titles = initialTitles[:] view = 'FULL' state = None i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'todrive': todrive = True i += 1 elif myarg in PROJECTION_CHOICES_MAP: view = PROJECTION_CHOICES_MAP[myarg] i += 1 elif myarg == 'matterstate': valid_states = gapi.get_enum_values_minus_unspecified( v._rootDesc['schemas']['Matter']['properties']['state'][ 'enum']) state = sys.argv[i+1].upper() if state not in valid_states: controlflow.expected_argument_exit( 'state', ', '.join(valid_states), state) i += 2 else: controlflow.invalid_argument_exit(myarg, "gam print matters") __main__.printGettingAllItems('Vault Matters', None) page_message = gapi.got_total_items_msg('Vault Matters', '...\n') matters = gapi.get_all_pages( v.matters(), 'list', 'matters', page_message=page_message, view=view, state=state) for matter in matters: display.add_row_titles_to_csv_file( utils.flatten_json(matter), csvRows, titles) display.sort_csv_titles(initialTitles, titles) display.write_csv_file(csvRows, titles, 'Vault Matters', todrive)
def doPrintCrosDevices(): def _getSelectedLists(myarg): if myarg in CROS_ACTIVE_TIME_RANGES_ARGUMENTS: selectedLists['activeTimeRanges'] = True elif myarg in CROS_RECENT_USERS_ARGUMENTS: selectedLists['recentUsers'] = True elif myarg in CROS_DEVICE_FILES_ARGUMENTS: selectedLists['deviceFiles'] = True elif myarg in CROS_CPU_STATUS_REPORTS_ARGUMENTS: selectedLists['cpuStatusReports'] = True elif myarg in CROS_DISK_VOLUME_REPORTS_ARGUMENTS: selectedLists['diskVolumeReports'] = True elif myarg in CROS_SYSTEM_RAM_FREE_REPORTS_ARGUMENTS: selectedLists['systemRamFreeReports'] = True cd = gapi.directory.buildGAPIObject() todrive = False fieldsList = [] fieldsTitles = {} titles = [] csvRows = [] display.add_field_to_csv_file( 'deviceid', CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList, fieldsTitles, titles) projection = orderBy = sortOrder = orgUnitPath = None queries = [None] noLists = sortHeaders = False selectedLists = {} startDate = endDate = None listLimit = 0 i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg in ['query', 'queries']: queries = __main__.getQueries(myarg, sys.argv[i+1]) i += 2 elif myarg == 'limittoou': orgUnitPath = __main__.getOrgUnitItem(sys.argv[i+1]) i += 2 elif myarg == 'todrive': todrive = True i += 1 elif myarg == 'nolists': noLists = True selectedLists = {} i += 1 elif myarg == 'listlimit': listLimit = __main__.getInteger(sys.argv[i+1], myarg, minVal=0) i += 2 elif myarg in CROS_START_ARGUMENTS: startDate = _getFilterDate(sys.argv[i+1]) i += 2 elif myarg in CROS_END_ARGUMENTS: endDate = _getFilterDate(sys.argv[i+1]) i += 2 elif myarg == 'orderby': orderBy = sys.argv[i+1].lower().replace('_', '') validOrderBy = ['location', 'user', 'lastsync', 'notes', 'serialnumber', 'status', 'supportenddate'] if orderBy not in validOrderBy: controlflow.expected_argument_exit( "orderby", ", ".join(validOrderBy), orderBy) if orderBy == 'location': orderBy = 'annotatedLocation' elif orderBy == 'user': orderBy = 'annotatedUser' elif orderBy == 'lastsync': orderBy = 'lastSync' elif orderBy == 'serialnumber': orderBy = 'serialNumber' elif orderBy == 'supportenddate': orderBy = 'supportEndDate' i += 2 elif myarg in SORTORDER_CHOICES_MAP: sortOrder = SORTORDER_CHOICES_MAP[myarg] i += 1 elif myarg in PROJECTION_CHOICES_MAP: projection = PROJECTION_CHOICES_MAP[myarg] sortHeaders = True if projection == 'FULL': fieldsList = [] else: fieldsList = CROS_BASIC_FIELDS_LIST[:] i += 1 elif myarg == 'allfields': projection = 'FULL' sortHeaders = True fieldsList = [] i += 1 elif myarg == 'sortheaders': sortHeaders = True i += 1 elif myarg in CROS_LISTS_ARGUMENTS: _getSelectedLists(myarg) i += 1 elif myarg in CROS_ARGUMENT_TO_PROPERTY_MAP: display.add_field_to_fields_list( myarg, CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList) i += 1 elif myarg == 'fields': fieldNameList = sys.argv[i+1] for field in fieldNameList.lower().replace(',', ' ').split(): if field in CROS_LISTS_ARGUMENTS: _getSelectedLists(field) elif field in CROS_ARGUMENT_TO_PROPERTY_MAP: display.add_field_to_fields_list( field, CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList) else: controlflow.invalid_argument_exit( field, "gam print cros fields") i += 2 else: controlflow.invalid_argument_exit(sys.argv[i], "gam print cros") if selectedLists: noLists = False projection = 'FULL' for selectList in selectedLists: display.add_field_to_fields_list( selectList, CROS_ARGUMENT_TO_PROPERTY_MAP, fieldsList) if fieldsList: fieldsList.append('deviceId') fields = f'nextPageToken,chromeosdevices({",".join(set(fieldsList))})'.replace( '.', '/') else: fields = None for query in queries: __main__.printGettingAllItems('CrOS Devices', query) page_message = gapi.got_total_items_msg('CrOS Devices', '...\n') all_cros = gapi.get_all_pages(cd.chromeosdevices(), 'list', 'chromeosdevices', page_message=page_message, query=query, customerId=GC_Values[GC_CUSTOMER_ID], projection=projection, orgUnitPath=orgUnitPath, orderBy=orderBy, sortOrder=sortOrder, fields=fields) for cros in all_cros: _checkTPMVulnerability(cros) if not noLists and not selectedLists: for cros in all_cros: if 'notes' in cros: cros['notes'] = cros['notes'].replace('\n', '\\n') if 'autoUpdateExpiration' in cros: cros['autoUpdateExpiration'] = utils.formatTimestampYMD( cros['autoUpdateExpiration']) for cpuStatusReport in cros.get('cpuStatusReports', []): tempInfos = cpuStatusReport.get('cpuTemperatureInfo', []) for tempInfo in tempInfos: tempInfo['label'] = tempInfo['label'].strip() display.add_row_titles_to_csv_file(utils.flatten_json( cros, listLimit=listLimit), csvRows, titles) continue for cros in all_cros: if 'notes' in cros: cros['notes'] = cros['notes'].replace('\n', '\\n') if 'autoUpdateExpiration' in cros: cros['autoUpdateExpiration'] = utils.formatTimestampYMD( cros['autoUpdateExpiration']) row = {} for attrib in cros: if attrib not in set(['kind', 'etag', 'tpmVersionInfo', 'recentUsers', 'activeTimeRanges', 'deviceFiles', 'cpuStatusReports', 'diskVolumeReports', 'systemRamFreeReports']): row[attrib] = cros[attrib] if selectedLists.get('activeTimeRanges'): timergs = cros.get('activeTimeRanges', []) else: timergs = [] activeTimeRanges = _filterTimeRanges(timergs, startDate, endDate) if selectedLists.get('recentUsers'): recentUsers = cros.get('recentUsers', []) else: recentUsers = [] if selectedLists.get('deviceFiles'): device_files = cros.get('deviceFiles', []) else: device_files = [] deviceFiles = _filterCreateReportTime(device_files, 'createTime', startDate, endDate) if selectedLists.get('cpuStatusReports'): cpu_reports = cros.get('cpuStatusReports', []) else: cpu_reports = [] cpuStatusReports = _filterCreateReportTime(cpu_reports, 'reportTime', startDate, endDate) if selectedLists.get('diskVolumeReports'): diskVolumeReports = cros.get('diskVolumeReports', []) else: diskVolumeReports = [] if selectedLists.get('systemRamFreeReports'): ram_reports = cros.get('systemRamFreeReports', []) else: ram_reports = [] systemRamFreeReports = _filterCreateReportTime(ram_reports, 'reportTime', startDate, endDate) if noLists or (not activeTimeRanges and \ not recentUsers and \ not deviceFiles and \ not cpuStatusReports and \ not diskVolumeReports and \ not systemRamFreeReports): display.add_row_titles_to_csv_file(row, csvRows, titles) continue lenATR = len(activeTimeRanges) lenRU = len(recentUsers) lenDF = len(deviceFiles) lenCSR = len(cpuStatusReports) lenDVR = len(diskVolumeReports) lenSRFR = len(systemRamFreeReports) max_len = max(lenATR, lenRU, lenDF, lenCSR, lenDVR, lenSRFR) for i in range(min(max_len, listLimit or max_len)): nrow = row.copy() if i < lenATR: nrow['activeTimeRanges.date'] = \ activeTimeRanges[i]['date'] nrow['activeTimeRanges.activeTime'] = \ str(activeTimeRanges[i]['activeTime']) active_time = activeTimeRanges[i]['activeTime'] nrow['activeTimeRanges.duration'] = \ utils.formatMilliSeconds(active_time) nrow['activeTimeRanges.minutes'] = active_time // 60000 if i < lenRU: nrow['recentUsers.type'] = recentUsers[i]['type'] nrow['recentUsers.email'] = recentUsers[i].get('email') if not nrow['recentUsers.email']: if nrow['recentUsers.type'] == 'USER_TYPE_UNMANAGED': nrow['recentUsers.email'] = 'UnmanagedUser' else: nrow['recentUsers.email'] = 'Unknown' if i < lenDF: nrow['deviceFiles.type'] = deviceFiles[i]['type'] nrow['deviceFiles.createTime'] = \ deviceFiles[i]['createTime'] if i < lenCSR: nrow['cpuStatusReports.reportTime'] = \ cpuStatusReports[i]['reportTime'] tempInfos = cpuStatusReports[i].get('cpuTemperatureInfo', []) for tempInfo in tempInfos: label = tempInfo["label"].strip() base = 'cpuStatusReports.cpuTemperatureInfo.' nrow[f'{base}{label}'] = tempInfo['temperature'] cpu_field = 'cpuUtilizationPercentageInfo' cpu_reports = cpuStatusReports[i][cpu_field] cpu_pcts = [str(x) for x in cpu_reports] nrow[f'cpuStatusReports.{cpu_field}'] = ','.join(cpu_pcts) if i < lenDVR: volumeInfo = diskVolumeReports[i]['volumeInfo'] j = 0 vfield = 'diskVolumeReports.volumeInfo.' for volume in volumeInfo: nrow[f'{vfield}{j}.volumeId'] = \ volume['volumeId'] nrow[f'{vfield}{j}.storageFree'] = \ volume['storageFree'] nrow[f'{vfield}{j}.storageTotal'] = \ volume['storageTotal'] j += 1 if i < lenSRFR: nrow['systemRamFreeReports.reportTime'] = \ systemRamFreeReports[i]['reportTime'] ram_reports = systemRamFreeReports[i]['systemRamFreeInfo'] ram_info = [str(x) for x in ram_reports] nrow['systenRamFreeReports.systemRamFreeInfo'] = \ ','.join(ram_info) display.add_row_titles_to_csv_file(nrow, csvRows, titles) if sortHeaders: display.sort_csv_titles(['deviceId', ], titles) display.write_csv_file(csvRows, titles, 'CrOS', todrive)
def showUsage(): rep = buildGAPIObject() throw_reasons = [gapi.errors.ErrorReason.INVALID, gapi.errors.ErrorReason.BAD_REQUEST] todrive = False report = sys.argv[3].lower() titles = ['date'] if report == 'customer': endpoint = rep.customerUsageReports() kwargs = [{}] elif report == 'user': endpoint = rep.userUsageReport() kwargs = [{'userKey': 'all'}] titles.append('user') else: controlflow.expected_argument_exit( 'usage', ['user', 'customer'], report) customerId = GC_Values[GC_CUSTOMER_ID] if customerId == MY_CUSTOMER: customerId = None parameters = [] filters = None start_date = end_date = orgUnitId = None skip_day_numbers = [] skip_dates = [] i = 4 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'startdate': start_date = parse(sys.argv[i+1]) i += 2 elif myarg == 'enddate': end_date = parse(sys.argv[i+1]) i += 2 elif myarg == 'todrive': todrive = True i += 1 elif myarg in ['orgunit', 'org', 'ou']: if report != 'user': controlflow.invalid_argument_exit(myarg, f'gam usage {report}') _, orgUnitId = __main__.getOrgUnitId(sys.argv[i+1]) i += 2 elif myarg == 'parameters': parameters = sys.argv[i+1].split(',') i += 2 elif myarg == 'skipdates': skips = sys.argv[i+1].split(',') skip_dates = [utils.get_yyyymmdd(d) for d in skips] i += 2 elif myarg == 'skipdaysofweek': skipdaynames = sys.argv[i+1].split(',') dow = [d.lower() for d in calendar.day_abbr] skip_day_numbers = [dow.index(d) for d in skipdaynames if d in dow] i += 2 elif myarg in usergroup_types: if report != 'user': controlflow.invalid_argument_exit(myarg, f'gam usage {report}') entity_type = myarg entity = sys.argv[i+1] users = __main__.getUsersToModify(entity_type, entity) kwargs = [{'userKey': user} for user in users] i += 3 else: controlflow.invalid_argument_exit(sys.argv[i], "gam usage") if not start_date: start_date = datetime.datetime.now() + relativedelta(months=-1) if not end_date: end_date = datetime.datetime.now() if orgUnitId: for i in range(len(kwargs)): kwargs[i-1]['orgUnitID'] = orgUnitId one_day = datetime.timedelta(days=1) usage_on_date = start_date titles.extend(parameters) csvRows = [] vtypes = ['intValue', 'stringValue', 'intValue', 'boolValue', 'datetimeValue'] while usage_on_date <= end_date: use_date = usage_on_date.strftime('%Y-%m-%d') if usage_on_date.weekday() in skip_day_numbers or \ use_date in skip_dates: usage_on_date += one_day continue usage_on_date += one_day try: for kwarg in kwargs: try: usage = gapi.get_all_pages(endpoint, 'get', 'usageReports', throw_reasons=throw_reasons, customerId=customerId, date=use_date, parameters=','.join(parameters), **kwarg) except gapi.errors.GapiBadRequestError: continue for entity in usage: row = {'date': use_date} if 'userEmail' in entity['entity']: row['user'] = entity['entity']['userEmail'] for item in entity['parameters']: if 'name' not in item: continue name = item['name'] if name == 'cros:device_version_distribution': for cros_ver in item['msgValue']: v = cros_ver['version_number'] column_name = f'cros:num_devices_chrome_{v}' if column_name not in titles: titles.append(column_name) row[column_name] = cros_ver['num_devices'] else: for vtype in vtypes: if vtype in item: value = item[vtype] break row[name] = value csvRows.append(row) except gapi.errors.GapiInvalidError: continue display.write_csv_file( csvRows, titles, f'Usage Reports', todrive)
def showReport(): rep = buildGAPIObject() throw_reasons = [gapi.errors.ErrorReason.INVALID] report = sys.argv[2].lower() report = REPORT_CHOICE_MAP.get(report.replace('_', ''), report) valid_apps = gapi.get_enum_values_minus_unspecified( rep._rootDesc['resources']['activities']['methods']['list'] ['parameters']['applicationName']['enum']) + ['customer', 'user'] if report not in valid_apps: controlflow.expected_argument_exit("report", ", ".join(sorted(valid_apps)), report) customerId = GC_Values[GC_CUSTOMER_ID] if customerId == MY_CUSTOMER: customerId = None filters = parameters = actorIpAddress = startTime = endTime = eventName = orgUnitId = None tryDate = datetime.date.today().strftime(YYYYMMDD_FORMAT) to_drive = False userKey = 'all' fullDataRequired = None i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower() if myarg == 'date': tryDate = utils.get_yyyymmdd(sys.argv[i + 1]) i += 2 elif myarg in ['orgunit', 'org', 'ou']: _, orgUnitId = __main__.getOrgUnitId(sys.argv[i + 1]) i += 2 elif myarg == 'fulldatarequired': fullDataRequired = [] fdr = sys.argv[i + 1].lower() if fdr and fdr != 'all': fullDataRequired = fdr.replace(',', ' ').split() i += 2 elif myarg == 'start': startTime = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'end': endTime = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'event': eventName = sys.argv[i + 1] i += 2 elif myarg == 'user': userKey = __main__.normalizeEmailAddressOrUID(sys.argv[i + 1]) i += 2 elif myarg in ['filter', 'filters']: filters = sys.argv[i + 1] i += 2 elif myarg in ['fields', 'parameters']: parameters = sys.argv[i + 1] i += 2 elif myarg == 'ip': actorIpAddress = sys.argv[i + 1] i += 2 elif myarg == 'todrive': to_drive = True i += 1 else: controlflow.invalid_argument_exit(sys.argv[i], "gam report") if report == 'user': while True: try: if fullDataRequired is not None: warnings = gapi.get_items(rep.userUsageReport(), 'get', 'warnings', throw_reasons=throw_reasons, date=tryDate, userKey=userKey, customerId=customerId, orgUnitID=orgUnitId, fields='warnings') fullData, tryDate = _check_full_data_available( warnings, tryDate, fullDataRequired) if fullData < 0: print('No user report available.') sys.exit(1) if fullData == 0: continue page_message = gapi.got_total_items_msg('Users', '...\n') usage = gapi.get_all_pages(rep.userUsageReport(), 'get', 'usageReports', page_message=page_message, throw_reasons=throw_reasons, date=tryDate, userKey=userKey, customerId=customerId, orgUnitID=orgUnitId, filters=filters, parameters=parameters) break except gapi.errors.GapiInvalidError as e: tryDate = _adjust_date(str(e)) if not usage: print('No user report available.') sys.exit(1) titles = ['email', 'date'] csvRows = [] ptypes = ['intValue', 'boolValue', 'datetimeValue', 'stringValue'] for user_report in usage: if 'entity' not in user_report: continue row = { 'email': user_report['entity']['userEmail'], 'date': tryDate } for item in user_report.get('parameters', []): if 'name' not in item: continue name = item['name'] if not name in titles: titles.append(name) for ptype in ptypes: if ptype in item: row[name] = item[ptype] break else: row[name] = '' csvRows.append(row) display.write_csv_file(csvRows, titles, f'User Reports - {tryDate}', to_drive) elif report == 'customer': while True: try: if fullDataRequired is not None: warnings = gapi.get_items(rep.customerUsageReports(), 'get', 'warnings', throw_reasons=throw_reasons, customerId=customerId, date=tryDate, fields='warnings') fullData, tryDate = _check_full_data_available( warnings, tryDate, fullDataRequired) if fullData < 0: print('No customer report available.') sys.exit(1) if fullData == 0: continue usage = gapi.get_all_pages(rep.customerUsageReports(), 'get', 'usageReports', throw_reasons=throw_reasons, customerId=customerId, date=tryDate, parameters=parameters) break except gapi.errors.GapiInvalidError as e: tryDate = _adjust_date(str(e)) if not usage: print('No customer report available.') sys.exit(1) titles = ['name', 'value', 'client_id'] csvRows = [] auth_apps = list() for item in usage[0]['parameters']: if 'name' not in item: continue name = item['name'] if 'intValue' in item: value = item['intValue'] elif 'msgValue' in item: if name == 'accounts:authorized_apps': for subitem in item['msgValue']: app = {} for an_item in subitem: if an_item == 'client_name': app['name'] = 'App: ' + \ subitem[an_item].replace('\n', '\\n') elif an_item == 'num_users': app['value'] = f'{subitem[an_item]} users' elif an_item == 'client_id': app['client_id'] = subitem[an_item] auth_apps.append(app) continue values = [] for subitem in item['msgValue']: if 'count' in subitem: mycount = myvalue = None for key, value in list(subitem.items()): if key == 'count': mycount = value else: myvalue = value if mycount and myvalue: values.append(f'{myvalue}:{mycount}') value = ' '.join(values) elif 'version_number' in subitem \ and 'num_devices' in subitem: values.append(f'{subitem["version_number"]}:' f'{subitem["num_devices"]}') else: continue value = ' '.join(sorted(values, reverse=True)) csvRows.append({'name': name, 'value': value}) for app in auth_apps: # put apps at bottom csvRows.append(app) display.write_csv_file(csvRows, titles, f'Customer Report - {tryDate}', todrive=to_drive) else: page_message = gapi.got_total_items_msg('Activities', '...\n') activities = gapi.get_all_pages(rep.activities(), 'list', 'items', page_message=page_message, applicationName=report, userKey=userKey, customerId=customerId, actorIpAddress=actorIpAddress, startTime=startTime, endTime=endTime, eventName=eventName, filters=filters, orgUnitID=orgUnitId) if activities: titles = ['name'] csvRows = [] for activity in activities: events = activity['events'] del activity['events'] activity_row = utils.flatten_json(activity) purge_parameters = True for event in events: for item in event.get('parameters', []): if set(item) == set(['value', 'name']): event[item['name']] = item['value'] elif set(item) == set(['intValue', 'name']): if item['name'] in ['start_time', 'end_time']: val = item.get('intValue') if val is not None: val = int(val) if val >= 62135683200: event[item['name']] = \ datetime.datetime.fromtimestamp( val-62135683200).isoformat() else: event[item['name']] = item['intValue'] elif set(item) == set(['boolValue', 'name']): event[item['name']] = item['boolValue'] elif set(item) == set(['multiValue', 'name']): event[item['name']] = ' '.join(item['multiValue']) elif item['name'] == 'scope_data': parts = {} for message in item['multiMessageValue']: for mess in message['parameter']: value = mess.get( 'value', ' '.join(mess.get('multiValue', []))) parts[mess['name']] = parts.get( mess['name'], []) + [value] for part, v in parts.items(): if part == 'scope_name': part = 'scope' event[part] = ' '.join(v) else: purge_parameters = False if purge_parameters: event.pop('parameters', None) row = utils.flatten_json(event) row.update(activity_row) for item in row: if item not in titles: titles.append(item) csvRows.append(row) display.sort_csv_titles([ 'name', ], titles) display.write_csv_file(csvRows, titles, f'{report.capitalize()} Activity Report', to_drive)
def doPrintCrosActivity(): cd = gapi.directory.buildGAPIObject() todrive = False titles = ['deviceId', 'annotatedAssetId', 'annotatedLocation', 'serialNumber', 'orgUnitPath'] csvRows = [] fieldsList = ['deviceId', 'annotatedAssetId', 'annotatedLocation', 'serialNumber', 'orgUnitPath'] startDate = endDate = None selectActiveTimeRanges = selectDeviceFiles = selectRecentUsers = False listLimit = 0 delimiter = ',' orgUnitPath = None queries = [None] i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg in ['query', 'queries']: queries = __main__.getQueries(myarg, sys.argv[i+1]) i += 2 elif myarg == 'limittoou': orgUnitPath = __main__.getOrgUnitItem(sys.argv[i+1]) i += 2 elif myarg == 'todrive': todrive = True i += 1 elif myarg in CROS_ACTIVE_TIME_RANGES_ARGUMENTS: selectActiveTimeRanges = True i += 1 elif myarg in CROS_DEVICE_FILES_ARGUMENTS: selectDeviceFiles = True i += 1 elif myarg in CROS_RECENT_USERS_ARGUMENTS: selectRecentUsers = True i += 1 elif myarg == 'both': selectActiveTimeRanges = selectRecentUsers = True i += 1 elif myarg == 'all': selectActiveTimeRanges = selectDeviceFiles = True selectRecentUsers = True i += 1 elif myarg in CROS_START_ARGUMENTS: startDate = _getFilterDate(sys.argv[i+1]) i += 2 elif myarg in CROS_END_ARGUMENTS: endDate = _getFilterDate(sys.argv[i+1]) i += 2 elif myarg == 'listlimit': listLimit = __main__.getInteger(sys.argv[i+1], myarg, minVal=0) i += 2 elif myarg == 'delimiter': delimiter = sys.argv[i+1] i += 2 else: controlflow.invalid_argument_exit( sys.argv[i], "gam print crosactivity") if not selectActiveTimeRanges and \ not selectDeviceFiles and \ not selectRecentUsers: selectActiveTimeRanges = selectRecentUsers = True if selectRecentUsers: fieldsList.append('recentUsers') display.add_titles_to_csv_file(['recentUsers.email', ], titles) if selectActiveTimeRanges: fieldsList.append('activeTimeRanges') titles_to_add = ['activeTimeRanges.date', 'activeTimeRanges.duration', 'activeTimeRanges.minutes'] display.add_titles_to_csv_file(titles_to_add, titles) if selectDeviceFiles: fieldsList.append('deviceFiles') titles_to_add = ['deviceFiles.type', 'deviceFiles.createTime'] display.add_titles_to_csv_file(titles_to_add, titles) fields = f'nextPageToken,chromeosdevices({",".join(fieldsList)})' for query in queries: __main__.printGettingAllItems('CrOS Devices', query) page_message = gapi.got_total_items_msg('CrOS Devices', '...\n') all_cros = gapi.get_all_pages(cd.chromeosdevices(), 'list', 'chromeosdevices', page_message=page_message, query=query, customerId=GC_Values[GC_CUSTOMER_ID], projection='FULL', fields=fields, orgUnitPath=orgUnitPath) for cros in all_cros: row = {} skip_attribs = ['recentUsers', 'activeTimeRanges', 'deviceFiles'] for attrib in cros: if attrib not in skip_attribs: row[attrib] = cros[attrib] if selectActiveTimeRanges: activeTimeRanges = _filterTimeRanges( cros.get('activeTimeRanges', []), startDate, endDate) lenATR = len(activeTimeRanges) num_ranges = min(lenATR, listLimit or lenATR) for activeTimeRange in activeTimeRanges[:num_ranges]: newrow = row.copy() newrow['activeTimeRanges.date'] = activeTimeRange['date'] active_time = activeTimeRange['activeTime'] newrow['activeTimeRanges.duration'] = \ utils.formatMilliSeconds(active_time) newrow['activeTimeRanges.minutes'] = \ activeTimeRange['activeTime']//60000 csvRows.append(newrow) if selectRecentUsers: recentUsers = cros.get('recentUsers', []) lenRU = len(recentUsers) num_ranges = min(lenRU, listLimit or lenRU) recent_users = [] for recentUser in recentUsers[:num_ranges]: useremail = recentUser.get("email") if not useremail: if recentUser["type"] == "USER_TYPE_UNMANAGED": useremail = 'UnmanagedUser' else: useremail = 'Unknown' recent_users.append(useremail) row['recentUsers.email'] = delimiter.join(recent_users) csvRows.append(row) if selectDeviceFiles: deviceFiles = _filterCreateReportTime( cros.get('deviceFiles', []), 'createTime', startDate, endDate) lenDF = len(deviceFiles) num_ranges = min(lenDF, listLimit or lenDF) for deviceFile in deviceFiles[:num_ranges]: newrow = row.copy() newrow['deviceFiles.type'] = deviceFile['type'] create_time = deviceFile['createTime'] newrow['deviceFiles.createTime'] = create_time csvRows.append(newrow) display.write_csv_file(csvRows, titles, 'CrOS Activity', todrive)