def downloadExport(): verifyFiles = True extractFiles = True v = buildGAPIObject() s = storage.build_gapi() matterId = getMatterItem(v, sys.argv[3]) exportId = convertExportNameToID(v, sys.argv[4], matterId) targetFolder = GC_Values[GC_DRIVE_DIR] i = 5 while i < len(sys.argv): myarg = sys.argv[i].lower().replace('_', '') if myarg == 'targetfolder': targetFolder = os.path.expanduser(sys.argv[i + 1]) if not os.path.isdir(targetFolder): os.makedirs(targetFolder) i += 2 elif myarg == 'noverify': verifyFiles = False i += 1 elif myarg == 'noextract': extractFiles = False i += 1 else: controlflow.invalid_argument_exit(sys.argv[i], "gam download export") export = gapi.call(v.matters().exports(), 'get', matterId=matterId, exportId=exportId) for s_file in export['cloudStorageSink']['files']: bucket = s_file['bucketName'] s_object = s_file['objectName'] filename = os.path.join(targetFolder, s_object.replace('/', '-')) print(f'saving to {filename}') request = s.objects().get_media(bucket=bucket, object=s_object) f = fileutils.open_file(filename, 'wb') downloader = googleapiclient.http.MediaIoBaseDownload(f, request) done = False while not done: status, done = downloader.next_chunk() sys.stdout.write(' Downloaded: {0:>7.2%}\r'.format( status.progress())) sys.stdout.flush() sys.stdout.write('\n Download complete. Flushing to disk...\n') fileutils.close_file(f, True) if verifyFiles: expected_hash = s_file['md5Hash'] sys.stdout.write(f' Verifying file hash is {expected_hash}...') sys.stdout.flush() utils.md5_matches_file(filename, expected_hash, True) print('VERIFIED') if extractFiles and re.search(r'\.zip$', filename): __main__.extract_nested_zip(filename, targetFolder)
def get_cloud_storage_object(s, bucket, object_, local_file=None, expectedMd5=None): if not local_file: local_file = object_ if os.path.exists(local_file): sys.stdout.write(' File already exists. ') sys.stdout.flush() if expectedMd5: sys.stdout.write(f'Verifying {expectedMd5} hash...') sys.stdout.flush() if utils.md5_matches_file(local_file, expectedMd5, False): print('VERIFIED') return print('not verified. Downloading again and over-writing...') else: return # nothing to verify, just assume we're good. print(f'saving to {local_file}') request = s.objects().get_media(bucket=bucket, object=object_) file_path = os.path.dirname(local_file) if not os.path.exists(file_path): os.makedirs(file_path) f = fileutils.open_file(local_file, 'wb') downloader = googleapiclient.http.MediaIoBaseDownload(f, request) done = False while not done: status, done = downloader.next_chunk() sys.stdout.write(f' Downloaded: {status.progress():>7.2%}\r') sys.stdout.flush() sys.stdout.write('\n Download complete. Flushing to disk...\n') fileutils.close_file(f, True) if expectedMd5: f = fileutils.open_file(local_file, 'rb') sys.stdout.write(f' Verifying file hash is {expectedMd5}...') sys.stdout.flush() utils.md5_matches_file(local_file, expectedMd5, True) print('VERIFIED') fileutils.close_file(f)
def test_close_file_with_error(self): mock_file = MagicMock() mock_file.close.side_effect = IOError() self.assertFalse(fileutils.close_file(mock_file)) self.assertEqual(mock_file.close.call_count, 1)
def test_close_file_closes_file_successfully(self): mock_file = MagicMock() self.assertTrue(fileutils.close_file(mock_file)) self.assertEqual(mock_file.close.call_count, 1)
def changeAttendees(users): do_it = True i = 5 allevents = False start_date = end_date = None while len(sys.argv) > i: myarg = sys.argv[i].lower() if myarg == 'csv': csv_file = sys.argv[i + 1] i += 2 elif myarg == 'dryrun': do_it = False i += 1 elif myarg == 'start': start_date = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'end': end_date = utils.get_time_or_delta_from_now(sys.argv[i + 1]) i += 2 elif myarg == 'allevents': allevents = True i += 1 else: controlflow.invalid_argument_exit( sys.argv[i], "gam <users> update calattendees") attendee_map = {} f = fileutils.open_file(csv_file) csvFile = csv.reader(f) for row in csvFile: attendee_map[row[0].lower()] = row[1].lower() fileutils.close_file(f) for user in users: sys.stdout.write(f'Checking user {user}\n') user, cal = buildCalendarGAPIObject(user) if not cal: continue page_token = None while True: events_page = gapi.call(cal.events(), 'list', calendarId=user, pageToken=page_token, timeMin=start_date, timeMax=end_date, showDeleted=False, showHiddenInvitations=False) print(f'Got {len(events_page.get("items", []))}') for event in events_page.get('items', []): if event['status'] == 'cancelled': # print u' skipping cancelled event' continue try: event_summary = event['summary'] except (KeyError, UnicodeEncodeError, UnicodeDecodeError): event_summary = event['id'] try: organizer = event['organizer']['email'].lower() if not allevents and organizer != user: #print(f' skipping not-my-event {event_summary}') continue except KeyError: pass # no email for organizer needs_update = False try: for attendee in event['attendees']: try: if attendee['email'].lower() in attendee_map: old_email = attendee['email'].lower() new_email = attendee_map[ attendee['email'].lower()] print(f' SWITCHING attendee {old_email} to ' \ f'{new_email} for {event_summary}') event['attendees'].remove(attendee) event['attendees'].append({'email': new_email}) needs_update = True except KeyError: # no email for that attendee pass except KeyError: continue # no attendees if needs_update: body = {} body['attendees'] = event['attendees'] print(f'UPDATING {event_summary}') if do_it: gapi.call(cal.events(), 'patch', calendarId=user, eventId=event['id'], sendNotifications=False, body=body) else: print(' not pulling the trigger.') # else: # print(f' no update needed for {event_summary}') try: page_token = events_page['nextPageToken'] except KeyError: break