def test_open_file_expands_user_file_path(self, mock_open): file_path = '~/some/path/containing/tilde/shortcut/to/home' fileutils.open_file(file_path) opened_path = mock_open.call_args[0][0] home_path = os.environ.get('HOME') self.assertIsNotNone(home_path) self.assertIn(home_path, opened_path)
def test_open_file_strips_utf_bom_in_utf(self): bom_prefixed_data = '\ufefffoobar' fake_file = io.StringIO(bom_prefixed_data) mock_open = MagicMock(spec=open, return_value=fake_file) with patch.object(fileutils, 'open', mock_open): f = fileutils.open_file(self.fake_path, strip_utf_bom=True) self.assertEqual('foobar', f.read())
def test_open_file_strips_utf_bom_in_binary(self): bom_prefixed_data = '\ufefffoobar'.encode() fake_file = io.BytesIO(bom_prefixed_data) mock_open = MagicMock(spec=open, return_value=fake_file) with patch.object(fileutils, 'open', mock_open): f = fileutils.open_file(self.fake_path, mode='rb', strip_utf_bom=True) self.assertEqual(b'foobar', f.read())
def md5_matches_file(local_file, expected_md5, exitOnError): f = fileutils.open_file(local_file, 'rb') hash_md5 = md5() for chunk in iter(lambda: f.read(4096), b''): hash_md5.update(chunk) actual_hash = hash_md5.hexdigest() if exitOnError and actual_hash != expected_md5: controlflow.system_error_exit( 6, f'actual hash was {actual_hash}. Exiting on corrupt file.') return actual_hash == expected_md5
def test_open_file_strip_utf_bom_when_no_bom_in_data(self): no_bom_data = 'This data has no BOM' fake_file = io.StringIO(no_bom_data) mock_open = MagicMock(spec=open, return_value=fake_file) with patch.object(fileutils, 'open', mock_open): f = fileutils.open_file(self.fake_path, strip_utf_bom=True) # Since there was no opening BOM, we should be back at the beginning of # the file. self.assertEqual(fake_file.tell(), 0) self.assertEqual(f.read(), no_bom_data)
def downloadExport(): verifyFiles = True extractFiles = True v = buildGAPIObject() s = gapi_storage.build_gapi() matterId = getMatterItem(v, sys.argv[3]) exportId = convertExportNameToID(v, sys.argv[4], matterId) targetFolder = GC_Values[GC_DRIVE_DIR] i = 5 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'targetfolder': targetFolder = os.path.expanduser(sys.argv[i + 1]) if not os.path.isdir(targetFolder): os.makedirs(targetFolder) i += 2 elif myarg == 'noverify': verifyFiles = False i += 1 elif myarg == 'noextract': extractFiles = False i += 1 else: controlflow.invalid_argument_exit(sys.argv[i], 'gam download export') export = gapi.call(v.matters().exports(), 'get', matterId=matterId, exportId=exportId) for s_file in export['cloudStorageSink']['files']: bucket = s_file['bucketName'] s_object = s_file['objectName'] filename = os.path.join(targetFolder, s_object.replace('/', '-')) print(f'saving to {filename}') request = s.objects().get_media(bucket=bucket, object=s_object) f = fileutils.open_file(filename, 'wb') downloader = googleapiclient.http.MediaIoBaseDownload(f, request) done = False while not done: status, done = downloader.next_chunk() sys.stdout.write(' Downloaded: {0:>7.2%}\r'.format( status.progress())) sys.stdout.flush() sys.stdout.write('\n Download complete. Flushing to disk...\n') fileutils.close_file(f, True) if verifyFiles: expected_hash = s_file['md5Hash'] sys.stdout.write(f' Verifying file hash is {expected_hash}...') sys.stdout.flush() utils.md5_matches_file(filename, expected_hash, True) print('VERIFIED') if extractFiles and re.search(r'\.zip$', filename): gam.extract_nested_zip(filename, targetFolder)
def get_cloud_storage_object(s, bucket, object_, local_file=None, expectedMd5=None): if not local_file: local_file = object_ if os.path.exists(local_file): sys.stdout.write(' File already exists. ') sys.stdout.flush() if expectedMd5: sys.stdout.write(f'Verifying {expectedMd5} hash...') sys.stdout.flush() if utils.md5_matches_file(local_file, expectedMd5, False): print('VERIFIED') return print('not verified. Downloading again and over-writing...') else: return # nothing to verify, just assume we're good. print(f'saving to {local_file}') request = s.objects().get_media(bucket=bucket, object=object_) file_path = os.path.dirname(local_file) if not os.path.exists(file_path): os.makedirs(file_path) f = fileutils.open_file(local_file, 'wb') downloader = googleapiclient.http.MediaIoBaseDownload(f, request) done = False while not done: status, done = downloader.next_chunk() sys.stdout.write(f' Downloaded: {status.progress():>7.2%}\r') sys.stdout.flush() sys.stdout.write('\n Download complete. Flushing to disk...\n') fileutils.close_file(f, True) if expectedMd5: f = fileutils.open_file(local_file, 'rb') sys.stdout.write(f' Verifying file hash is {expectedMd5}...') sys.stdout.flush() utils.md5_matches_file(local_file, expectedMd5, True) print('VERIFIED') fileutils.close_file(f)
def test_open_file_strips_utf_bom_in_non_utf(self): bom_prefixed_data = b'\xef\xbb\xbffoobar'.decode('iso-8859-1') # We need to trick the method under test into believing that a StringIO # instance is a file with an encoding. Since StringIO does not usually have, # an encoding, we'll mock it and add our own encoding, but send the other # methods in use (read and seek) back to the real StringIO object. real_stringio = io.StringIO(bom_prefixed_data) mock_file = MagicMock(spec=io.StringIO) mock_file.read.side_effect = real_stringio.read mock_file.seek.side_effect = real_stringio.seek mock_file.encoding = 'iso-8859-1' mock_open = MagicMock(spec=open, return_value=mock_file) with patch.object(fileutils, 'open', mock_open): f = fileutils.open_file(self.fake_path, strip_utf_bom=True) self.assertEqual('foobar', f.read())
def test_open_file_opens_correct_mode(self, mock_open): fileutils.open_file(self.fake_path) self.assertEqual('r', mock_open.call_args[0][1])
def changeAttendees(users): do_it = True i = 5 allevents = False start_date = end_date = None while len(sys.argv) > i: myarg = sys.argv[i].lower() if myarg == 'csv': csv_file = sys.argv[i + 1] i += 2 elif myarg == 'dryrun': do_it = False i += 1 elif myarg == 'start': start_date = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'end': end_date = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'allevents': allevents = True i += 1 else: controlflow.invalid_argument_exit( sys.argv[i], 'gam <users> update calattendees') attendee_map = {} f = fileutils.open_file(csv_file) csvFile = csv.reader(f) for row in csvFile: attendee_map[row[0].lower()] = row[1].lower() fileutils.close_file(f) for user in users: sys.stdout.write(f'Checking user {user}\n') user, cal = buildCalendarGAPIObject(user) if not cal: continue page_token = None while True: events_page = gapi.call(cal.events(), 'list', calendarId=user, pageToken=page_token, timeMin=start_date, timeMax=end_date, showDeleted=False, showHiddenInvitations=False) print(f'Got {len(events_page.get("items", []))}') for event in events_page.get('items', []): if event['status'] == 'cancelled': # print u' skipping cancelled event' continue try: event_summary = event['summary'] except (KeyError, UnicodeEncodeError, UnicodeDecodeError): event_summary = event['id'] try: organizer = event['organizer']['email'].lower() if not allevents and organizer != user: #print(f' skipping not-my-event {event_summary}') continue except KeyError: pass # no email for organizer needs_update = False try: for attendee in event['attendees']: try: if attendee['email'].lower() in attendee_map: old_email = attendee['email'].lower() new_email = attendee_map[ attendee['email'].lower()] print(f' SWITCHING attendee {old_email} to ' \ f'{new_email} for {event_summary}') event['attendees'].remove(attendee) event['attendees'].append({'email': new_email}) needs_update = True except KeyError: # no email for that attendee pass except KeyError: continue # no attendees if needs_update: body = {} body['attendees'] = event['attendees'] print(f'UPDATING {event_summary}') if do_it: gapi.call(cal.events(), 'patch', calendarId=user, eventId=event['id'], sendNotifications=False, body=body) else: print(' not pulling the trigger.') # else: # print(f' no update needed for {event_summary}') try: page_token = events_page['nextPageToken'] except KeyError: break
def move(): cbcm = build() body = {'resource_ids': []} i = 3 resource_ids = [] batch_size = 600 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'ids': resource_ids.extend(sys.argv[i + 1].split(',')) i += 2 elif myarg == 'query': query = sys.argv[i + 1] page_message = gapi.got_total_items_msg('Browsers', '...\n') browsers = gapi.get_all_pages(cbcm.chromebrowsers(), 'list', 'browsers', page_message=page_message, customer=GC_Values[GC_CUSTOMER_ID], query=query, projection='BASIC', fields='browsers(deviceId),nextPageToken') ids = [browser['deviceId'] for browser in browsers] resource_ids.extend(ids) i += 2 elif myarg == 'file': with fileutils.open_file(sys.argv[i+1], strip_utf_bom=True) as filed: for row in filed: rid = row.strip() if rid: resource_ids.append(rid) i += 2 elif myarg == 'csvfile': drive, fname_column = os.path.splitdrive(sys.argv[i+1]) if fname_column.find(':') == -1: controlflow.system_error_exit( 2, 'Expected csvfile FileName:FieldName') (filename, column) = fname_column.split(':') with fileutils.open_file(drive + filename) as filed: input_file = csv.DictReader(filed, restval='') if column not in input_file.fieldnames: controlflow.csv_field_error_exit(column, input_file.fieldnames) for row in input_file: rid = row[column].strip() if rid: resource_ids.append(rid) i += 2 elif myarg in ['ou', 'orgunit', 'org']: org_unit = gapi_directory_orgunits.getOrgUnitItem(sys.argv[i + 1]) body['org_unit_path'] = org_unit i += 2 elif myarg == 'batchsize': batch_size = int(sys.argv[i+1]) i += 2 else: controlflow.invalid_argument_exit(sys.argv[i], 'gam move browsers') if 'org_unit_path' not in body: controlflow.missing_argument_exit('ou', 'gam move browsers') elif not resource_ids: controlflow.missing_argument_exit('query or ids', 'gam move browsers') # split moves into max 600 devices per batch for chunk in range(0, len(resource_ids), batch_size): body['resource_ids'] = resource_ids[chunk:chunk + batch_size] print(f' moving {len(body["resource_ids"])} browsers to ' \ f'{body["org_unit_path"]}') gapi.call(cbcm.chromebrowsers(), 'moveChromeBrowsersToOu', customer=GC_Values[GC_CUSTOMER_ID], body=body)
def sync(): ci = gapi_cloudidentity.build_dwd() device_types = gapi.get_enum_values_minus_unspecified( ci._rootDesc['schemas']['GoogleAppsCloudidentityDevicesV1Device'] ['properties']['deviceType']['enum']) customer = _get_device_customerid() device_filter = None csv_file = None serialnumber_column = 'serialNumber' devicetype_column = 'deviceType' static_devicetype = None assettag_column = None unassigned_missing_action = 'delete' assigned_missing_action = 'donothing' missing_actions = ['delete', 'wipe', 'donothing'] i = 3 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg in ['filter', 'query']: device_filter = sys.argv[i + 1] i += 2 elif myarg == 'csvfile': csv_file = sys.argv[i + 1] i += 2 elif myarg == 'serialnumbercolumn': serialnumber_column = sys.argv[i + 1] i += 2 elif myarg == 'devicetypecolumn': devicetype_column = sys.argv[i + 1] i += 2 elif myarg == 'staticdevicetype': static_devicetype = sys.argv[i + 1].upper() if static_devicetype not in device_types: controlflow.expected_argument_exit('device_type', ', '.join(device_types), sys.argv[i + 1]) i += 2 elif myarg in {'assettagcolumn', 'assetidcolumn'}: assettag_column = sys.argv[i + 1] i += 2 elif myarg == 'unassignedmissingaction': unassigned_missing_action = sys.argv[i + 1].lower().replace( '_', '') if unassigned_missing_action not in missing_actions: controlflow.expected_argument_exit('unassigned_missing_action', ', '.join(missing_actions), sys.argv[i + 1]) i += 2 elif myarg == 'assignedmissingaction': assigned_missing_action = sys.argv[i + 1].lower().replace('_', '') if assigned_missing_action not in missing_actions: controlflow.expected_argument_exit('assigned_missing_action', ', '.join(missing_actions), sys.argv[i + 1]) i += 2 else: controlflow.invalid_argument_exit(sys.argv[i], 'gam sync devices') if not csv_file: controlflow.system_error_exit( 3, 'csvfile is a required argument for "gam sync devices".') f = fileutils.open_file(csv_file) input_file = csv.DictReader(f, restval='') if serialnumber_column not in input_file.fieldnames: controlflow.csv_field_error_exit(serialnumber_column, input_file.fieldnames) if not static_devicetype and devicetype_column not in input_file.fieldnames: controlflow.csv_field_error_exit(devicetype_column, input_file.fieldnames) if assettag_column and assettag_column not in input_file.fieldnames: controlflow.csv_field_error_exit(assettag_column, input_file.fieldnames) local_devices = {} for row in input_file: # upper() is very important to comparison since Google # always return uppercase serials local_device = { 'serialNumber': row[serialnumber_column].strip().upper() } if static_devicetype: local_device['deviceType'] = static_devicetype else: local_device['deviceType'] = row[devicetype_column].strip() sndt = f"{local_device['serialNumber']}-{local_device['deviceType']}" if assettag_column: local_device['assetTag'] = row[assettag_column].strip() sndt += f"-{local_device['assetTag']}" local_devices[sndt] = local_device fileutils.close_file(f) page_message = gapi.got_total_items_msg('Company Devices', '...\n') device_fields = ['serialNumber', 'deviceType', 'lastSyncTime', 'name'] if assettag_column: device_fields.append('assetTag') fields = f'nextPageToken,devices({",".join(device_fields)})' remote_devices = {} remote_device_map = {} result = gapi.get_all_pages(ci.devices(), 'list', 'devices', customer=customer, page_message=page_message, pageSize=100, filter=device_filter, view='COMPANY_INVENTORY', fields=fields) for remote_device in result: sn = remote_device['serialNumber'] last_sync = remote_device.pop('lastSyncTime', NEVER_TIME_NOMS) name = remote_device.pop('name') sndt = f"{remote_device['serialNumber']}-{remote_device['deviceType']}" if assettag_column: if 'assetTag' not in remote_device: remote_device['assetTag'] = '' sndt += f"-{remote_device['assetTag']}" remote_devices[sndt] = remote_device remote_device_map[sndt] = {'name': name} if last_sync == NEVER_TIME_NOMS: remote_device_map[sndt]['unassigned'] = True devices_to_add = [] for sndt, device in iter(local_devices.items()): if sndt not in remote_devices: devices_to_add.append(device) missing_devices = [] for sndt, device in iter(remote_devices.items()): if sndt not in local_devices: missing_devices.append(device) print( f'Need to add {len(devices_to_add)} and remove {len(missing_devices)} devices...' ) for add_device in devices_to_add: print(f'Creating {add_device["serialNumber"]}') try: result = gapi.call( ci.devices(), 'create', customer=customer, throw_reasons=[gapi_errors.ErrorReason.FOUR_O_NINE], body=add_device) print( f' created {result["response"]["deviceType"]} device {result["response"]["name"]} with serial {result["response"]["serialNumber"]}' ) except googleapiclient.errors.HttpError: print(f' {add_device["serialNumber"]} already exists') for missing_device in missing_devices: sn = missing_device['serialNumber'] sndt = f"{sn}-{missing_device['deviceType']}" if assettag_column: sndt += f"-{missing_device['assetTag']}" name = remote_device_map[sndt]['name'] unassigned = remote_device_map[sndt].get('unassigned') action = unassigned_missing_action if unassigned else assigned_missing_action if action == 'donothing': pass else: if action == 'delete': kwargs = {'customer': customer} else: kwargs = {'body': {'customer': customer}} gapi.call(ci.devices(), action, name=name, **kwargs) print(f'{action}d {sn}')
def test_open_file_utf8_encoding_specified(self, mock_open): fileutils.open_file(self.fake_path, encoding='UTF-8') self.assertEqual(fileutils.UTF8_SIG, mock_open.call_args[1]['encoding'])
def test_open_file_exits_on_io_error(self, mock_open): mock_open.side_effect = IOError('Fake IOError') with self.assertRaises(SystemExit) as context: fileutils.open_file(self.fake_path) self.assertEqual(context.exception.code, 6)
def test_open_file_default_system_encoding(self, mock_open): fileutils.open_file(self.fake_path) self.assertEqual(fileutils.GM_Globals[fileutils.GM_SYS_ENCODING], mock_open.call_args[1]['encoding'])
def test_open_file_encoding_for_binary(self, mock_open): fileutils.open_file(self.fake_path, mode='b') self.assertIsNone(mock_open.call_args[1]['encoding'])
def test_open_file_stdin(self, mock_stdin): mock_stdin.read.return_value = 'some stdin content' f = fileutils.open_file('-', mode='r') self.assertIsInstance(f, fileutils.io.StringIO) self.assertEqual(f.getvalue(), mock_stdin.read.return_value)
def test_open_file_stdout(self): f = fileutils.open_file('-', mode='w') self.assertEqual(fileutils.sys.stdout, f)
def test_open_file_opens_correct_path(self, mock_open): f = fileutils.open_file(self.fake_path) self.assertEqual(self.fake_path, mock_open.call_args[0][0]) self.assertEqual(mock_open.return_value, f)