Ejemplo n.º 1
0
 def test_open_file_expands_user_file_path(self, mock_open):
     file_path = '~/some/path/containing/tilde/shortcut/to/home'
     fileutils.open_file(file_path)
     opened_path = mock_open.call_args[0][0]
     home_path = os.environ.get('HOME')
     self.assertIsNotNone(home_path)
     self.assertIn(home_path, opened_path)
Ejemplo n.º 2
0
 def test_open_file_strips_utf_bom_in_utf(self):
     bom_prefixed_data = u'\ufefffoobar'
     fake_file = io.StringIO(bom_prefixed_data)
     mock_open = MagicMock(spec=open, return_value=fake_file)
     with patch.object(fileutils, 'open', mock_open):
         f = fileutils.open_file(self.fake_path, strip_utf_bom=True)
         self.assertEqual('foobar', f.read())
Ejemplo n.º 3
0
Archivo: utils.py Proyecto: rlucian/GAM
def md5_matches_file(local_file, expected_md5, exitOnError):
  f = fileutils.open_file(local_file, 'rb')
  hash_md5 = md5()
  for chunk in iter(lambda: f.read(4096), b""):
    hash_md5.update(chunk)
  actual_hash = hash_md5.hexdigest()
  if exitOnError and actual_hash != expected_md5:
    controlflow.system_error_exit(6, f'actual hash was {actual_hash}. Exiting on corrupt file.')
  return actual_hash == expected_md5
Ejemplo n.º 4
0
 def test_open_file_strips_utf_bom_in_binary(self):
     bom_prefixed_data = u'\ufefffoobar'.encode('UTF-8')
     fake_file = io.BytesIO(bom_prefixed_data)
     mock_open = MagicMock(spec=open, return_value=fake_file)
     with patch.object(fileutils, 'open', mock_open):
         f = fileutils.open_file(self.fake_path,
                                 mode='rb',
                                 strip_utf_bom=True)
         self.assertEqual(b'foobar', f.read())
Ejemplo n.º 5
0
    def test_open_file_strip_utf_bom_when_no_bom_in_data(self):
        no_bom_data = 'This data has no BOM'
        fake_file = io.StringIO(no_bom_data)
        mock_open = MagicMock(spec=open, return_value=fake_file)

        with patch.object(fileutils, 'open', mock_open):
            f = fileutils.open_file(self.fake_path, strip_utf_bom=True)
            # Since there was no opening BOM, we should be back at the beginning of
            # the file.
            self.assertEqual(fake_file.tell(), 0)
            self.assertEqual(f.read(), no_bom_data)
Ejemplo n.º 6
0
def downloadExport():
    verifyFiles = True
    extractFiles = True
    v = buildGAPIObject()
    s = storage.build_gapi()
    matterId = getMatterItem(v, sys.argv[3])
    exportId = convertExportNameToID(v, sys.argv[4], matterId)
    targetFolder = GC_Values[GC_DRIVE_DIR]
    i = 5
    while i < len(sys.argv):
        myarg = sys.argv[i].lower().replace('_', '')
        if myarg == 'targetfolder':
            targetFolder = os.path.expanduser(sys.argv[i + 1])
            if not os.path.isdir(targetFolder):
                os.makedirs(targetFolder)
            i += 2
        elif myarg == 'noverify':
            verifyFiles = False
            i += 1
        elif myarg == 'noextract':
            extractFiles = False
            i += 1
        else:
            controlflow.invalid_argument_exit(sys.argv[i],
                                              "gam download export")
    export = gapi.call(v.matters().exports(),
                       'get',
                       matterId=matterId,
                       exportId=exportId)
    for s_file in export['cloudStorageSink']['files']:
        bucket = s_file['bucketName']
        s_object = s_file['objectName']
        filename = os.path.join(targetFolder, s_object.replace('/', '-'))
        print(f'saving to {filename}')
        request = s.objects().get_media(bucket=bucket, object=s_object)
        f = fileutils.open_file(filename, 'wb')
        downloader = googleapiclient.http.MediaIoBaseDownload(f, request)
        done = False
        while not done:
            status, done = downloader.next_chunk()
            sys.stdout.write(' Downloaded: {0:>7.2%}\r'.format(
                status.progress()))
            sys.stdout.flush()
        sys.stdout.write('\n Download complete. Flushing to disk...\n')
        fileutils.close_file(f, True)
        if verifyFiles:
            expected_hash = s_file['md5Hash']
            sys.stdout.write(f' Verifying file hash is {expected_hash}...')
            sys.stdout.flush()
            utils.md5_matches_file(filename, expected_hash, True)
            print('VERIFIED')
        if extractFiles and re.search(r'\.zip$', filename):
            __main__.extract_nested_zip(filename, targetFolder)
Ejemplo n.º 7
0
def get_cloud_storage_object(s,
                             bucket,
                             object_,
                             local_file=None,
                             expectedMd5=None):
    if not local_file:
        local_file = object_
    if os.path.exists(local_file):
        sys.stdout.write(' File already exists. ')
        sys.stdout.flush()
        if expectedMd5:
            sys.stdout.write(f'Verifying {expectedMd5} hash...')
            sys.stdout.flush()
            if utils.md5_matches_file(local_file, expectedMd5, False):
                print('VERIFIED')
                return
            print('not verified. Downloading again and over-writing...')
        else:
            return  # nothing to verify, just assume we're good.
    print(f'saving to {local_file}')
    request = s.objects().get_media(bucket=bucket, object=object_)
    file_path = os.path.dirname(local_file)
    if not os.path.exists(file_path):
        os.makedirs(file_path)
    f = fileutils.open_file(local_file, 'wb')
    downloader = googleapiclient.http.MediaIoBaseDownload(f, request)
    done = False
    while not done:
        status, done = downloader.next_chunk()
        sys.stdout.write(f' Downloaded: {status.progress():>7.2%}\r')
        sys.stdout.flush()
    sys.stdout.write('\n Download complete. Flushing to disk...\n')
    fileutils.close_file(f, True)
    if expectedMd5:
        f = fileutils.open_file(local_file, 'rb')
        sys.stdout.write(f' Verifying file hash is {expectedMd5}...')
        sys.stdout.flush()
        utils.md5_matches_file(local_file, expectedMd5, True)
        print('VERIFIED')
        fileutils.close_file(f)
Ejemplo n.º 8
0
def create_data_map_from_output(o_sch):
    logging.info('Configuration.output_file = {}'.format(Configuration.output_file))
    file = fileutils.open_file(Configuration.output_file)

    __dataMap = {}
    __baseline_bid_map_uniques = {}
    logging.info('Constructing the datamap from the input file (output of bid mapping func)')

    for line in file:
        __line_data_map, __baseline_bid_map = o_sch.parse_output_line(line.strip())
        __mergeLineDataWithExistingData(__line_data_map, __dataMap)
        __merge_baseline_bid_data_with_uniques(__baseline_bid_map, __baseline_bid_map_uniques)

    logging.debug('__dataMap=%s',str(__dataMap)[:20])

    return __dataMap, __baseline_bid_map_uniques
Ejemplo n.º 9
0
    def test_open_file_strips_utf_bom_in_non_utf(self):
        bom_prefixed_data = b'\xef\xbb\xbffoobar'.decode('iso-8859-1')

        # We need to trick the method under test into believing that a StringIO
        # instance is a file with an encoding. Since StringIO does not usually have,
        # an encoding, we'll mock it and add our own encoding, but send the other
        # methods in use (read and seek) back to the real StringIO object.
        real_stringio = io.StringIO(bom_prefixed_data)
        mock_file = MagicMock(spec=io.StringIO)
        mock_file.read.side_effect = real_stringio.read
        mock_file.seek.side_effect = real_stringio.seek
        mock_file.encoding = 'iso-8859-1'

        mock_open = MagicMock(spec=open, return_value=mock_file)
        with patch.object(fileutils, 'open', mock_open):
            f = fileutils.open_file(self.fake_path, strip_utf_bom=True)
            self.assertEqual('foobar', f.read())
Ejemplo n.º 10
0
 def test_open_file_stdout(self):
     f = fileutils.open_file('-', mode='w')
     self.assertEqual(fileutils.sys.stdout, f)
Ejemplo n.º 11
0
 def test_open_file_utf8_encoding_specified(self, mock_open):
     fileutils.open_file(self.fake_path, encoding='UTF-8')
     self.assertEqual(fileutils.UTF8_SIG,
                      mock_open.call_args[1]['encoding'])
Ejemplo n.º 12
0
def changeAttendees(users):
    do_it = True
    i = 5
    allevents = False
    start_date = end_date = None
    while len(sys.argv) > i:
        myarg = sys.argv[i].lower()
        if myarg == 'csv':
            csv_file = sys.argv[i + 1]
            i += 2
        elif myarg == 'dryrun':
            do_it = False
            i += 1
        elif myarg == 'start':
            start_date = utils.get_time_or_delta_from_now(sys.argv[i + 1])
            i += 2
        elif myarg == 'end':
            end_date = utils.get_time_or_delta_from_now(sys.argv[i + 1])
            i += 2
        elif myarg == 'allevents':
            allevents = True
            i += 1
        else:
            controlflow.invalid_argument_exit(
                sys.argv[i], "gam <users> update calattendees")
    attendee_map = {}
    f = fileutils.open_file(csv_file)
    csvFile = csv.reader(f)
    for row in csvFile:
        attendee_map[row[0].lower()] = row[1].lower()
    fileutils.close_file(f)
    for user in users:
        sys.stdout.write(f'Checking user {user}\n')
        user, cal = buildCalendarGAPIObject(user)
        if not cal:
            continue
        page_token = None
        while True:
            events_page = gapi.call(cal.events(),
                                    'list',
                                    calendarId=user,
                                    pageToken=page_token,
                                    timeMin=start_date,
                                    timeMax=end_date,
                                    showDeleted=False,
                                    showHiddenInvitations=False)
            print(f'Got {len(events_page.get("items", []))}')
            for event in events_page.get('items', []):
                if event['status'] == 'cancelled':
                    # print u' skipping cancelled event'
                    continue
                try:
                    event_summary = event['summary']
                except (KeyError, UnicodeEncodeError, UnicodeDecodeError):
                    event_summary = event['id']
                try:
                    organizer = event['organizer']['email'].lower()
                    if not allevents and organizer != user:
                        #print(f' skipping not-my-event {event_summary}')
                        continue
                except KeyError:
                    pass  # no email for organizer
                needs_update = False
                try:
                    for attendee in event['attendees']:
                        try:
                            if attendee['email'].lower() in attendee_map:
                                old_email = attendee['email'].lower()
                                new_email = attendee_map[
                                    attendee['email'].lower()]
                                print(f' SWITCHING attendee {old_email} to ' \
                                    f'{new_email} for {event_summary}')
                                event['attendees'].remove(attendee)
                                event['attendees'].append({'email': new_email})
                                needs_update = True
                        except KeyError:  # no email for that attendee
                            pass
                except KeyError:
                    continue  # no attendees
                if needs_update:
                    body = {}
                    body['attendees'] = event['attendees']
                    print(f'UPDATING {event_summary}')
                    if do_it:
                        gapi.call(cal.events(),
                                  'patch',
                                  calendarId=user,
                                  eventId=event['id'],
                                  sendNotifications=False,
                                  body=body)
                    else:
                        print(' not pulling the trigger.')
                # else:
                #  print(f' no update needed for {event_summary}')
            try:
                page_token = events_page['nextPageToken']
            except KeyError:
                break
Ejemplo n.º 13
0
 def test_open_file_default_system_encoding(self, mock_open):
     fileutils.open_file(self.fake_path)
     self.assertEqual(fileutils.GM_Globals[fileutils.GM_SYS_ENCODING],
                      mock_open.call_args[1]['encoding'])
Ejemplo n.º 14
0
 def test_open_file_encoding_for_binary(self, mock_open):
     fileutils.open_file(self.fake_path, mode='b')
     self.assertIsNone(mock_open.call_args[1]['encoding'])
Ejemplo n.º 15
0
 def test_open_file_opens_correct_mode(self, mock_open):
     fileutils.open_file(self.fake_path)
     self.assertEqual('r', mock_open.call_args[0][1])
Ejemplo n.º 16
0
 def test_open_file_exits_on_io_error(self, mock_open):
     mock_open.side_effect = IOError('Fake IOError')
     with self.assertRaises(SystemExit) as context:
         fileutils.open_file(self.fake_path)
     self.assertEqual(context.exception.code, 6)
Ejemplo n.º 17
0
 def test_open_file_stdin(self, mock_stdin):
     mock_stdin.read.return_value = 'some stdin content'
     f = fileutils.open_file('-', mode='r')
     self.assertIsInstance(f, fileutils.io.StringIO)
     self.assertEqual(f.getvalue(), mock_stdin.read.return_value)
Ejemplo n.º 18
0
 def __init__(self, output_schema_file_location):
     f = fileutils.open_file(output_schema_file_location)
     str = f.readline().strip()
     self.construct_schema(str)
Ejemplo n.º 19
0
 def test_open_file_opens_correct_path(self, mock_open):
     f = fileutils.open_file(self.fake_path)
     self.assertEqual(self.fake_path, mock_open.call_args[0][0])
     self.assertEqual(mock_open.return_value, f)