Exemple #1
0
def downloadExport():
    verifyFiles = True
    extractFiles = True
    v = buildGAPIObject()
    s = gapi_storage.build_gapi()
    matterId = getMatterItem(v, sys.argv[3])
    exportId = convertExportNameToID(v, sys.argv[4], matterId)
    targetFolder = GC_Values[GC_DRIVE_DIR]
    i = 5
    while i < len(sys.argv):
        myarg = sys.argv[i].lower().replace('_', '')
        if myarg == 'targetfolder':
            targetFolder = os.path.expanduser(sys.argv[i + 1])
            if not os.path.isdir(targetFolder):
                os.makedirs(targetFolder)
            i += 2
        elif myarg == 'noverify':
            verifyFiles = False
            i += 1
        elif myarg == 'noextract':
            extractFiles = False
            i += 1
        else:
            controlflow.invalid_argument_exit(sys.argv[i],
                                              'gam download export')
    export = gapi.call(v.matters().exports(),
                       'get',
                       matterId=matterId,
                       exportId=exportId)
    for s_file in export['cloudStorageSink']['files']:
        bucket = s_file['bucketName']
        s_object = s_file['objectName']
        filename = os.path.join(targetFolder, s_object.replace('/', '-'))
        print(f'saving to {filename}')
        request = s.objects().get_media(bucket=bucket, object=s_object)
        f = fileutils.open_file(filename, 'wb')
        downloader = googleapiclient.http.MediaIoBaseDownload(f, request)
        done = False
        while not done:
            status, done = downloader.next_chunk()
            sys.stdout.write(' Downloaded: {0:>7.2%}\r'.format(
                status.progress()))
            sys.stdout.flush()
        sys.stdout.write('\n Download complete. Flushing to disk...\n')
        fileutils.close_file(f, True)
        if verifyFiles:
            expected_hash = s_file['md5Hash']
            sys.stdout.write(f' Verifying file hash is {expected_hash}...')
            sys.stdout.flush()
            utils.md5_matches_file(filename, expected_hash, True)
            print('VERIFIED')
        if extractFiles and re.search(r'\.zip$', filename):
            gam.extract_nested_zip(filename, targetFolder)
Exemple #2
0
def get_cloud_storage_object(s,
                             bucket,
                             object_,
                             local_file=None,
                             expectedMd5=None):
    if not local_file:
        local_file = object_
    if os.path.exists(local_file):
        sys.stdout.write(' File already exists. ')
        sys.stdout.flush()
        if expectedMd5:
            sys.stdout.write(f'Verifying {expectedMd5} hash...')
            sys.stdout.flush()
            if utils.md5_matches_file(local_file, expectedMd5, False):
                print('VERIFIED')
                return
            print('not verified. Downloading again and over-writing...')
        else:
            return  # nothing to verify, just assume we're good.
    print(f'saving to {local_file}')
    request = s.objects().get_media(bucket=bucket, object=object_)
    file_path = os.path.dirname(local_file)
    if not os.path.exists(file_path):
        os.makedirs(file_path)
    f = fileutils.open_file(local_file, 'wb')
    downloader = googleapiclient.http.MediaIoBaseDownload(f, request)
    done = False
    while not done:
        status, done = downloader.next_chunk()
        sys.stdout.write(f' Downloaded: {status.progress():>7.2%}\r')
        sys.stdout.flush()
    sys.stdout.write('\n Download complete. Flushing to disk...\n')
    fileutils.close_file(f, True)
    if expectedMd5:
        f = fileutils.open_file(local_file, 'rb')
        sys.stdout.write(f' Verifying file hash is {expectedMd5}...')
        sys.stdout.flush()
        utils.md5_matches_file(local_file, expectedMd5, True)
        print('VERIFIED')
        fileutils.close_file(f)
Exemple #3
0
def changeAttendees(users):
    do_it = True
    i = 5
    allevents = False
    start_date = end_date = None
    while len(sys.argv) > i:
        myarg = sys.argv[i].lower()
        if myarg == 'csv':
            csv_file = sys.argv[i + 1]
            i += 2
        elif myarg == 'dryrun':
            do_it = False
            i += 1
        elif myarg == 'start':
            start_date = utils.get_time_or_delta_from_now(sys.argv[i + 1])
            i += 2
        elif myarg == 'end':
            end_date = utils.get_time_or_delta_from_now(sys.argv[i + 1])
            i += 2
        elif myarg == 'allevents':
            allevents = True
            i += 1
        else:
            controlflow.invalid_argument_exit(
                sys.argv[i], 'gam <users> update calattendees')
    attendee_map = {}
    f = fileutils.open_file(csv_file)
    csvFile = csv.reader(f)
    for row in csvFile:
        attendee_map[row[0].lower()] = row[1].lower()
    fileutils.close_file(f)
    for user in users:
        sys.stdout.write(f'Checking user {user}\n')
        user, cal = buildCalendarGAPIObject(user)
        if not cal:
            continue
        page_token = None
        while True:
            events_page = gapi.call(cal.events(),
                                    'list',
                                    calendarId=user,
                                    pageToken=page_token,
                                    timeMin=start_date,
                                    timeMax=end_date,
                                    showDeleted=False,
                                    showHiddenInvitations=False)
            print(f'Got {len(events_page.get("items", []))}')
            for event in events_page.get('items', []):
                if event['status'] == 'cancelled':
                    # print u' skipping cancelled event'
                    continue
                try:
                    event_summary = event['summary']
                except (KeyError, UnicodeEncodeError, UnicodeDecodeError):
                    event_summary = event['id']
                try:
                    organizer = event['organizer']['email'].lower()
                    if not allevents and organizer != user:
                        #print(f' skipping not-my-event {event_summary}')
                        continue
                except KeyError:
                    pass  # no email for organizer
                needs_update = False
                try:
                    for attendee in event['attendees']:
                        try:
                            if attendee['email'].lower() in attendee_map:
                                old_email = attendee['email'].lower()
                                new_email = attendee_map[
                                    attendee['email'].lower()]
                                print(f' SWITCHING attendee {old_email} to ' \
                                    f'{new_email} for {event_summary}')
                                event['attendees'].remove(attendee)
                                event['attendees'].append({'email': new_email})
                                needs_update = True
                        except KeyError:  # no email for that attendee
                            pass
                except KeyError:
                    continue  # no attendees
                if needs_update:
                    body = {}
                    body['attendees'] = event['attendees']
                    print(f'UPDATING {event_summary}')
                    if do_it:
                        gapi.call(cal.events(),
                                  'patch',
                                  calendarId=user,
                                  eventId=event['id'],
                                  sendNotifications=False,
                                  body=body)
                    else:
                        print(' not pulling the trigger.')
                # else:
                #  print(f' no update needed for {event_summary}')
            try:
                page_token = events_page['nextPageToken']
            except KeyError:
                break
Exemple #4
0
 def test_close_file_with_error(self):
     mock_file = MagicMock()
     mock_file.close.side_effect = IOError()
     self.assertFalse(fileutils.close_file(mock_file))
     self.assertEqual(mock_file.close.call_count, 1)
Exemple #5
0
 def test_close_file_closes_file_successfully(self):
     mock_file = MagicMock()
     self.assertTrue(fileutils.close_file(mock_file))
     self.assertEqual(mock_file.close.call_count, 1)
Exemple #6
0
def sync():
    ci = gapi_cloudidentity.build_dwd()
    device_types = gapi.get_enum_values_minus_unspecified(
        ci._rootDesc['schemas']['GoogleAppsCloudidentityDevicesV1Device']
        ['properties']['deviceType']['enum'])
    customer = _get_device_customerid()
    device_filter = None
    csv_file = None
    serialnumber_column = 'serialNumber'
    devicetype_column = 'deviceType'
    static_devicetype = None
    assettag_column = None
    unassigned_missing_action = 'delete'
    assigned_missing_action = 'donothing'
    missing_actions = ['delete', 'wipe', 'donothing']
    i = 3
    while i < len(sys.argv):
        myarg = sys.argv[i].lower().replace('_', '')
        if myarg in ['filter', 'query']:
            device_filter = sys.argv[i + 1]
            i += 2
        elif myarg == 'csvfile':
            csv_file = sys.argv[i + 1]
            i += 2
        elif myarg == 'serialnumbercolumn':
            serialnumber_column = sys.argv[i + 1]
            i += 2
        elif myarg == 'devicetypecolumn':
            devicetype_column = sys.argv[i + 1]
            i += 2
        elif myarg == 'staticdevicetype':
            static_devicetype = sys.argv[i + 1].upper()
            if static_devicetype not in device_types:
                controlflow.expected_argument_exit('device_type',
                                                   ', '.join(device_types),
                                                   sys.argv[i + 1])
            i += 2
        elif myarg in {'assettagcolumn', 'assetidcolumn'}:
            assettag_column = sys.argv[i + 1]
            i += 2
        elif myarg == 'unassignedmissingaction':
            unassigned_missing_action = sys.argv[i + 1].lower().replace(
                '_', '')
            if unassigned_missing_action not in missing_actions:
                controlflow.expected_argument_exit('unassigned_missing_action',
                                                   ', '.join(missing_actions),
                                                   sys.argv[i + 1])
            i += 2
        elif myarg == 'assignedmissingaction':
            assigned_missing_action = sys.argv[i + 1].lower().replace('_', '')
            if assigned_missing_action not in missing_actions:
                controlflow.expected_argument_exit('assigned_missing_action',
                                                   ', '.join(missing_actions),
                                                   sys.argv[i + 1])
            i += 2
        else:
            controlflow.invalid_argument_exit(sys.argv[i], 'gam sync devices')
    if not csv_file:
        controlflow.system_error_exit(
            3, 'csvfile is a required argument for "gam sync devices".')
    f = fileutils.open_file(csv_file)
    input_file = csv.DictReader(f, restval='')
    if serialnumber_column not in input_file.fieldnames:
        controlflow.csv_field_error_exit(serialnumber_column,
                                         input_file.fieldnames)
    if not static_devicetype and devicetype_column not in input_file.fieldnames:
        controlflow.csv_field_error_exit(devicetype_column,
                                         input_file.fieldnames)
    if assettag_column and assettag_column not in input_file.fieldnames:
        controlflow.csv_field_error_exit(assettag_column,
                                         input_file.fieldnames)
    local_devices = {}
    for row in input_file:
        # upper() is very important to comparison since Google
        # always return uppercase serials
        local_device = {
            'serialNumber': row[serialnumber_column].strip().upper()
        }
        if static_devicetype:
            local_device['deviceType'] = static_devicetype
        else:
            local_device['deviceType'] = row[devicetype_column].strip()
        sndt = f"{local_device['serialNumber']}-{local_device['deviceType']}"
        if assettag_column:
            local_device['assetTag'] = row[assettag_column].strip()
            sndt += f"-{local_device['assetTag']}"
        local_devices[sndt] = local_device
    fileutils.close_file(f)
    page_message = gapi.got_total_items_msg('Company Devices', '...\n')
    device_fields = ['serialNumber', 'deviceType', 'lastSyncTime', 'name']
    if assettag_column:
        device_fields.append('assetTag')
    fields = f'nextPageToken,devices({",".join(device_fields)})'
    remote_devices = {}
    remote_device_map = {}
    result = gapi.get_all_pages(ci.devices(),
                                'list',
                                'devices',
                                customer=customer,
                                page_message=page_message,
                                pageSize=100,
                                filter=device_filter,
                                view='COMPANY_INVENTORY',
                                fields=fields)
    for remote_device in result:
        sn = remote_device['serialNumber']
        last_sync = remote_device.pop('lastSyncTime', NEVER_TIME_NOMS)
        name = remote_device.pop('name')
        sndt = f"{remote_device['serialNumber']}-{remote_device['deviceType']}"
        if assettag_column:
            if 'assetTag' not in remote_device:
                remote_device['assetTag'] = ''
            sndt += f"-{remote_device['assetTag']}"
        remote_devices[sndt] = remote_device
        remote_device_map[sndt] = {'name': name}
        if last_sync == NEVER_TIME_NOMS:
            remote_device_map[sndt]['unassigned'] = True
    devices_to_add = []
    for sndt, device in iter(local_devices.items()):
        if sndt not in remote_devices:
            devices_to_add.append(device)
    missing_devices = []
    for sndt, device in iter(remote_devices.items()):
        if sndt not in local_devices:
            missing_devices.append(device)
    print(
        f'Need to add {len(devices_to_add)} and remove {len(missing_devices)} devices...'
    )
    for add_device in devices_to_add:
        print(f'Creating {add_device["serialNumber"]}')
        try:
            result = gapi.call(
                ci.devices(),
                'create',
                customer=customer,
                throw_reasons=[gapi_errors.ErrorReason.FOUR_O_NINE],
                body=add_device)
            print(
                f' created {result["response"]["deviceType"]} device {result["response"]["name"]} with serial {result["response"]["serialNumber"]}'
            )
        except googleapiclient.errors.HttpError:
            print(f' {add_device["serialNumber"]} already exists')
    for missing_device in missing_devices:
        sn = missing_device['serialNumber']
        sndt = f"{sn}-{missing_device['deviceType']}"
        if assettag_column:
            sndt += f"-{missing_device['assetTag']}"
        name = remote_device_map[sndt]['name']
        unassigned = remote_device_map[sndt].get('unassigned')
        action = unassigned_missing_action if unassigned else assigned_missing_action
        if action == 'donothing':
            pass
        else:
            if action == 'delete':
                kwargs = {'customer': customer}
            else:
                kwargs = {'body': {'customer': customer}}
            gapi.call(ci.devices(), action, name=name, **kwargs)
            print(f'{action}d {sn}')