def init(): httplib2shim.patch() # If modifying these scopes, delete the file token.pickle. SCOPES = ['https://www.googleapis.com/auth/spreadsheets'] # The ID and range of a sample spreadsheet. global SPREADSHEET_ID global SAMPLE_RANGE_NAME SPREADSHEET_ID = '17OF43aYLapmC25ngYOswO_J3wpWJbnQoAUiE8a-vsIQ' SAMPLE_RANGE_NAME = 'Response' creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('./configs/token.pickle'): with open('./configs/token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( './configs/credentials.json', SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('./configs/token.pickle', 'wb') as token: pickle.dump(creds, token) global service service = build('sheets', 'v4', credentials=creds) print(colored("google sheets finished init", "blue"))
def main(): """ Main entry point """ httplib2shim.patch() logging.setup_root_logger('splunk_ta_google-cloudplatform', 'google_cloud_monitoring') tacommon.main(print_scheme, run)
def __init__(self, source, mip=1, dim=512, is_test=False): httplib2shim.patch() if source == 'basil': source = 'gs://neuroglancer/basil_v0/raw_image' elif source == 'pinky40_unaligned': source = 'gs://neuroglancer/pinky40_v11/image' self.vol = cv.CloudVolume(source, mip=mip) self.dim = dim self.vol_info = self.vol.info['scales'][0] self.vol_size = self.vol_info['size'] self.vol_offsets = self.vol_info['voxel_offset'] self.adj_dim = self.dim * 2**self.vol.mip self.is_test = is_test
def main(): # Monkeypatches httplib2 to avoid issues with connection polling and oauth2client # (https://github.com/googleapis/google-api-python-client/issues/218) import httplib2shim httplib2shim.patch() parser = ArgumentParser() subparsers = parser.add_subparsers() subparsers.required = True subparsers.dest = 'command' for command in COMMANDS: command.register_parser(subparsers) args = parser.parse_args() actual = {'service': connect(), 'args': args} if 'api_key' in args: actual['service'] = connect(args.api_key) args.func(**actual)
import random import string import logging from pprint import pformat import cwltool.draft2tool from cwltool.pathmapper import MapperEnt from poll import PollThread from pipeline import Pipeline, PipelineJob from gce_fsaccess import GCEFsAccess log = logging.getLogger('funnel') import httplib2shim httplib2shim.patch() try: from oauth2client.client import GoogleCredentials from apiclient.discovery import build except ImportError: pass BASE_MOUNT = "/mnt/data" def find_index(s, sub): try: return s.index(sub) except: None
# Since the HTTP library used by the Google API Client library is not # thread-safe, we leverage https://github.com/GoogleCloudPlatform/httplib2shim # resolves the following: # - https://github.com/nccgroup/ScoutSuite/issues/443 # - https://github.com/nccgroup/ScoutSuite/issues/665 import httplib2shim httplib2shim.patch() from googleapiclient import discovery class GCPBaseFacade: def __init__(self, client_name: str, client_version: str): self._client_name = client_name self._client_version = client_version self._client = None def _build_client(self) -> discovery.Resource: return self._build_arbitrary_client(self._client_name, self._client_version) def _build_arbitrary_client(self, client_name, client_version, force_new=False): """ :param client_name: name of the service :param client_version: version of the client to create :param force_new: whether to create a new client - useful to create arbitrary clients from facades :return: """ if force_new: return discovery.build(client_name, client_version, cache_discovery=False, cache=MemoryCache()) else: if not self._client: self._client = discovery.build(client_name, client_version, cache_discovery=False, cache=MemoryCache())
class MultiFolderClone(): patch() source = '' dest = [] accounts = [] path = 'accounts' width = 2 thread_count = None skip_bad_dests = False drive_to_use = 1 files_to_copy = [] encoding = None threads = None id_whitelist = None id_blacklist = None name_whitelist = None name_blacklist = None share_publicly = False file_copy_error = 0 bad_drives = [] google_opts = ['trashed = false'] override_thread_check = False verbose = False max_retries = 3 sleep_time = 1 dont_recurse = False statistics = { 'folders': 0, 'files': 0, 'total_accounts': 0, 'quotad_accounts': 0, 'errors': {}, } error_codes = { 'dailyLimitExceeded': True, 'userRateLimitExceeded': True, 'rateLimitExceeded': True, 'sharingRateLimitExceeded': True, 'appNotAuthorizedToFile': True, 'insufficientFilePermissions': True, 'domainPolicy': True, 'backendError': True, 'internalError': True, 'badRequest': False, 'invalidSharingRequest': False, 'authError': False, 'notFound': False, 'failedPrecondition': True } def __init__(self, source, dest, **options): self.source = source self.dest = dest if isinstance(dest, str): self.dest = [dest] if options.get('encoding') is not None: self.encoding = options['encoding'] if options.get('thread_count') is not None: self.thread_count = int(options['thread_count']) if options.get('skip_bad_dests') is not None: self.skip_bad_dests = bool(options['skip_bad_dests']) if options.get('path') is not None: self.path = str(options['path']) if options.get('width') is not None: self.width = int(options['width']) if options.get('sleep_time') is not None: self.sleep_time = int(options['sleep_time']) if options.get('max_retries') is not None: self.max_retries = int(options['max_retries']) if options.get('id_whitelist') is not None: self.id_whitelist = list(options['id_whitelist']) if options.get('name_whitelist') is not None: self.name_whitelist = list(options['name_whitelist']) if options.get('id_blacklist') is not None: self.id_blacklist = list(options['id_blacklist']) if options.get('name_blacklist') is not None: self.name_blacklist = list(options['name_blacklist']) if options.get('override_thread_check') is not None: self.override_thread_check = bool(options['override_thread_check']) if options.get('verbose') is not None: self.verbose = bool(options['verbose']) if options.get('google_opts') is not None: self.google_opts = list(options['google_opts']) if options.get('no_recursion') is not None: self.dont_recurse = bool(options['no_recursion']) if options.get('share_publicly') is not None: self.share_publicly = bool(options['share_publicly']) self.accounts = glob(self.path + '/*.json') if not self.accounts: raise ValueError('The path provided (%s) has no accounts.' % self.path) def _add_error_stats(self, reason): if reason in self.statistics['errors']: self.statistics['errors'][reason] += 1 else: self.statistics['errors'][reason] = 1 def _create_drive(self): while True: random_acc = random.choice(self.accounts) try: credentials = Credentials.from_service_account_file( random_acc, scopes=["https://www.googleapis.com/auth/drive"]) random_drive = build("drive", "v3", credentials=credentials) except HttpError: print("#Error SA Error") else: break return (random_acc, random_drive) def _log(self, line): if self.verbose: print(line) def _apicall(self, request): resp = None tries = 0 while True: tries += 1 if tries > self.max_retries: return None try: resp = request.execute() except HttpError as error: try: error_details = json.loads(error.content.decode('utf-8')) except json.decoder.JSONDecodeError: time.sleep(self.sleep_time) continue reason = error_details['error']['errors'][0]['reason'] # self._add_error_stats(reason) if reason == 'userRateLimitExceeded': return False elif reason == 'storageQuotaExceeded': print( 'Got storageQuotaExceeded error. You are not using a Shared Drive.' ) return False elif reason == "cannotCopyFile": self.file_copy_error += 1 return True elif reason == 'teamDriveFileLimitExceeded': raise RuntimeError( 'The Shared Drive is full. No more files can be copied to it.' ) elif self.error_codes[reason]: time.sleep(self.sleep_time) continue else: return None except (socket.error, ProtocolError, TransportError) as err: reason = str(err) # self._add_error_stats(reason) time.sleep(self.sleep_time) continue else: return resp def _ls(self, service, parent, searchTerms=[]): files = [] resp = {'nextPageToken': None} while 'nextPageToken' in resp: resp = self._apicall(service.files().list( q=' and '.join(['"%s" in parents' % parent] + self.google_opts + searchTerms), fields='files(md5Checksum,id,name),nextPageToken', pageSize=1000, supportsAllDrives=True, includeItemsFromAllDrives=True, pageToken=resp['nextPageToken'])) files += resp['files'] return files def _lsd(self, service, parent): return self._ls( service, parent, searchTerms=[ 'mimeType contains "application/vnd.google-apps.folder"' ]) def _lsf(self, service, parent): return self._ls( service, parent, searchTerms=[ 'not mimeType contains "application/vnd.google-apps.folder"' ]) def _copy(self, sa_name, driv, source, dest): self._log('Copying file %s into folder %s' % (source, dest)) resp = self._apicall(driv.files().copy(fileId=source, body={'parents': [dest]}, supportsAllDrives=True)) if not resp: self._log('Error: Quotad SA') self.bad_drives.append(sa_name) self.files_to_copy.append((source, dest)) elif self.share_publicly: self._apicall(driv.permissions().create(fileId=resp['id'], body={ 'role': 'reader', 'type': 'anyone' }, supportsAllDrives=True)) self.threads.release() def _rcopy(self, source, dest, folder_name, display_line, width): list_drive = self._create_drive()[1] self._log('%s to %s' % (source, dest)) files_source = self._lsf(list_drive, source) files_dest = self._lsf(list_drive, dest) folders_source = self._lsd(list_drive, source) folders_dest = self._lsd(list_drive, dest) files_to_copy = [] files_source_id = [] files_dest_id = [] folder_len = len(folders_source) - 1 self._log('Found %d files in source.' % len(files_source)) self._log('Found %d folders in source.' % len(folders_source)) self._log('Found %d files in dest.' % len(files_dest)) self._log('Found %d folders in dest.' % len(folders_dest)) folders_copied = {} for file in files_source: files_source_id.append(dict(file)) file.pop('id') for file in files_dest: files_dest_id.append(dict(file)) file.pop('id') i = 0 while len(files_source) > i: if files_source[i] not in files_dest: files_to_copy.append(files_source_id[i]) i += 1 self._log('Checking whitelist and blacklist') for i in list(files_to_copy): if self.id_whitelist is not None: if i['id'] not in self.id_whitelist: files_to_copy.remove(i) if self.id_blacklist is not None: if i['id'] in self.id_blacklist: files_to_copy.remove(i) if self.name_whitelist is not None: if i['name'] not in self.name_whitelist: files_to_copy.remove(i) if self.name_blacklist is not None: if i['name'] in self.name_blacklist: files_to_copy.remove(i) self._log('Added %d files to copy list.' % len(files_to_copy)) self.files_to_copy = [(i['id'], dest) for i in files_to_copy] self._log('Copying files') fullname = display_line + folder_name if 'DYNO' not in os.environ: fullname = fullname.encode(self.encoding, errors='replace').decode(self.encoding) if files_to_copy: while self.files_to_copy: files_to_copy = self.files_to_copy self.files_to_copy = [] running_threads = [] pbar = CounterProgress(f"{fullname}", max=len(files_to_copy), encoding=self.encoding) pbar.update() # copy for i in files_to_copy: self.threads.acquire() random_acc, random_drive = self._create_drive() thread = threading.Thread(target=self._copy, args=(random_acc, random_drive, i[0], i[1])) running_threads.append(thread) thread.start() pbar.next() if self.file_copy_error: pbar.finish_update_with_error(self.file_copy_error) self.file_copy_error = 0 else: pbar.finish_update() pbar.finish() # join all threads for i in running_threads: i.join() # check for bad drives for i in self.bad_drives: if i in self.accounts: self.accounts.remove(i) self.bad_drives = [] # If there is less than 2 SAs, exit if len(self.accounts) == 1: raise RuntimeError('Out of SAs.') # copy completed #print(display_line + folder_name + ' | Synced') elif files_source and len(files_source) <= len(files_dest): print(fullname + ' | Up to date') else: print(fullname) for i in folders_dest: folders_copied[i['name']] = i['id'] current_folder = 0 if self.dont_recurse: return for folder in folders_source: if current_folder == folder_len: next_display_line = display_line.replace( '├' + '─' * width + ' ', '│' + ' ' * width + ' ').replace( '└' + '─' * width + ' ', ' ' + ' ' * width) + '└' + '─' * width + ' ' else: next_display_line = display_line.replace( '├' + '─' * width + ' ', '│' + ' ' * width + ' ').replace( '└' + '─' * width + ' ', ' ' + ' ' * width) + '├' + '─' * width + ' ' if folder['name'] not in folders_copied.keys(): folder_id = self._apicall(list_drive.files().create( body={ 'name': folder['name'], 'mimeType': 'application/vnd.google-apps.folder', 'parents': [dest] }, supportsAllDrives=True))['id'] else: folder_id = folders_copied[folder['name']] self._rcopy(folder['id'], folder_id, folder['name'].replace('%', '%%'), next_display_line, width) current_folder += 1 return def clone(self): check = self._create_drive()[1] try: root_dir = check.files().get( fileId=self.source, supportsAllDrives=True).execute()['name'] except HttpError: raise ValueError('Source folder %s cannot be read or is invalid.' % self.source) dest_dict = {i: '' for i in self.dest} for key in list(dest_dict.keys()): try: dest_dir = check.files().get( fileId=key, supportsAllDrives=True).execute()['name'] dest_dict[key] = dest_dir except HttpError: if not self.skip_bad_dests: raise ValueError( 'Destination folder %s cannot be read or is invalid.' % key) else: dest_dict.pop(key) print('Using %d Drive Services' % len(self.accounts)) if self.thread_count is not None and self.thread_count <= len( self.accounts): self.threads = threading.BoundedSemaphore(self.thread_count) print('BoundedSemaphore with %d threads' % self.thread_count) elif self.thread_count is None: self.threads = threading.BoundedSemaphore(len(self.accounts)) print('BoundedSemaphore with %d threads' % len(self.accounts)) else: raise ValueError('More threads than there is service accounts.') for i, dest_dir in dest_dict.items(): print('Copying from %s to %s.' % (root_dir, dest_dir)) self._rcopy(self.source, i, root_dir, '', self.width)
'dailyLimitExceeded': True, 'userRateLimitExceeded': True, 'rateLimitExceeded': True, 'sharingRateLimitExceeded': True, 'appNotAuthorizedToFile': True, 'insufficientFilePermissions': True, 'domainPolicy': True, 'backendError': True, 'internalError': True, 'badRequest': False, 'invalidSharingRequest': False, 'authError': False, 'notFound': False } patch() def log(*l): global debug if debug: for i in l: print(i) def apicall(request, sleep_time=1, max_retries=3): global error_codes resp = None tries = 0
class multifolderclone(): patch() source = '' dest = [] path = 'accounts' width = 2 thread_count = None skip_bad_dests = False drive_to_use = 1 files_to_copy = [] threads = None id_whitelist = None id_blacklist = None name_whitelist = None name_blacklist = None bad_drives = [] google_opts = ['trashed = false'] max_retries = 3 sleep_time = 1 error_codes = { 'dailyLimitExceeded': True, 'userRateLimitExceeded': True, 'rateLimitExceeded': True, 'sharingRateLimitExceeded': True, 'appNotAuthorizedToFile': True, 'insufficientFilePermissions': True, 'domainPolicy': True, 'backendError': True, 'internalError': True, 'badRequest': False, 'invalidSharingRequest': False, 'authError': False, 'notFound': False } def __init__(self, source, dest, **options): self.source = source self.dest = dest if type(dest) is str: self.dest = [dest] if options.get('thread_count') is not None: self.thread_count = int(options['thread_count']) if options.get('skip_bad_dests') is not None: self.skip_bad_dests = bool(options['skip_bad_dests']) if options.get('path') is not None: self.path = str(options['path']) if options.get('width') is not None: self.width = int(options['width']) if options.get('sleep_time') is not None: self.sleep_time = int(options['sleep_time']) if options.get('max_retries') is not None: self.max_retries = int(options['max_retries']) if options.get('id_whitelist') is not None: self.id_whitelist = list(options['id_whitelist']) if options.get('name_whitelist') is not None: self.name_whitelist = list(options['name_whitelist']) if options.get('id_blacklist') is not None: self.id_blacklist = list(options['id_blacklist']) if options.get('name_blacklist') is not None: self.name_blacklist = list(options['name_blacklist']) if options.get('override_thread_check') is not None: self.override_thread_check = bool(options['override_thread_check']) if options.get('verbose') is not None: self.verbose = bool(options['verbose']) if options.get('google_opts') is not None: google_opts = list(google_opts) def _log(self, s): if self.verbose: print(s) def _apicall(self, request): resp = None tries = 0 while True: tries += 1 if tries > self.max_retries: return None try: resp = request.execute() except HttpError as error: try: error_details = json.loads(error.content.decode('utf-8')) except json.decoder.JSONDecodeError: time.sleep(self.sleep_time) continue reason = error_details['error']['errors'][0]['reason'] if reason == 'userRateLimitExceeded': return False elif reason == 'storageQuotaExceeded': print( 'Got storageQuotaExceeded error. You are not using a Shared Drive.' ) return False elif reason == 'teamDriveFileLimitExceeded': raise RuntimeError( 'The Shared Drive is full. No more files can be copied to it.' ) elif self.error_codes[reason]: time.sleep(self.sleep_time) continue else: return None except (socket.error, ProtocolError, TransportError): time.sleep(self.sleep_time) continue else: return resp def _ls(self, service, parent, searchTerms=[]): files = [] resp = {'nextPageToken': None} while 'nextPageToken' in resp: resp = self._apicall(service.files().list( q=' and '.join(['"%s" in parents' % parent] + self.google_opts + searchTerms), fields='files(md5Checksum,id,name),nextPageToken', pageSize=1000, supportsAllDrives=True, includeItemsFromAllDrives=True, pageToken=resp['nextPageToken'])) files += resp['files'] return files def _lsd(self, service, parent): return self._ls( service, parent, searchTerms=[ 'mimeType contains "application/vnd.google-apps.folder"' ]) def _lsf(self, service, parent): return self._ls( service, parent, searchTerms=[ 'not mimeType contains "application/vnd.google-apps.folder"' ]) def _copy(self, driv, source, dest): if self._apicall(driv.files().copy(fileId=source, body={'parents': [dest]}, supportsAllDrives=True)) == False: self.bad_drives.append(driv) self.files_to_copy.append((source, dest)) self.threads.release() def _rcopy(self, drive, drive_to_use, source, dest, folder_name, display_line, width): files_source = self._lsf(drive[0], source) files_dest = self._lsf(drive[0], dest) folders_source = self._lsd(drive[0], source) folders_dest = self._lsd(drive[0], dest) files_to_copy = [] files_source_id = [] files_dest_id = [] folder_len = len(folders_source) - 1 folders_copied = {} for file in files_source: files_source_id.append(dict(file)) file.pop('id') for file in files_dest: files_dest_id.append(dict(file)) file.pop('id') i = 0 while len(files_source) > i: if files_source[i] not in files_dest: files_to_copy.append(files_source_id[i]) i += 1 self._log('Added %d files to copy list.' % len(files_to_copy)) for i in list(files_to_copy): if self.id_whitelist is not None: if i['id'] not in self.id_whitelist: files_to_copy.remove(i) if self.id_blacklist is not None: if i['id'] in self.id_blacklist: files_to_copy.remove(i) if self.name_whitelist is not None: if i['name'] not in self.name_whitelist: files_to_copy.remove(i) if self.name_blacklist is not None: if i['name'] in self.name_blacklist: files_to_copy.remove(i) self.files_to_copy = [(i['id'], dest) for i in files_to_copy] self._log('Copying files') if len(files_to_copy) > 0: while len(self.files_to_copy) > 0: files_to_copy = self.files_to_copy self.files_to_copy = [] running_threads = [] # copy for i in files_to_copy: self.threads.acquire() thread = threading.Thread(target=self._copy, args=(drive[drive_to_use], i[0], i[1])) running_threads.append(thread) thread.start() drive_to_use += 1 if drive_to_use > len(drive) - 1: drive_to_use = 1 # join all threads for i in running_threads: i.join() # check for bad drives for i in self.bad_drives: if i in drive: drive.remove(i) self.bad_drives = [] # If there is less than 2 SAs, exit if len(drive) == 1: raise RuntimeError('Out of SAs.') # copy completed print(display_line + folder_name + ' | Synced') elif len(files_source) > 0 and len(files_source) <= len(files_dest): print(display_line + folder_name + ' | Up to date') else: print(display_line + folder_name) for i in folders_dest: folders_copied[i['name']] = i['id'] current_folder = 0 for folder in folders_source: if current_folder == folder_len: next_display_line = display_line.replace( '├' + '─' * width + ' ', '│' + ' ' * width + ' ').replace( '└' + '─' * width + ' ', ' ' + ' ' * width) + '└' + '─' * width + ' ' else: next_display_line = display_line.replace( '├' + '─' * width + ' ', '│' + ' ' * width + ' ').replace( '└' + '─' * width + ' ', ' ' + ' ' * width) + '├' + '─' * width + ' ' if folder['name'] not in folders_copied.keys(): folder_id = self._apicall(drive[0].files().create( body={ 'name': folder['name'], 'mimeType': 'application/vnd.google-apps.folder', 'parents': [dest] }, supportsAllDrives=True))['id'] else: folder_id = folders_copied[folder['name']] drive = self._rcopy(drive, drive_to_use, folder['id'], folder_id, folder['name'].replace('%', '%%'), next_display_line, width) current_folder += 1 return drive def clone(self): accounts = glob(self.path + '/*.json') if len(accounts) < 2: raise ValueError('The path provided (%s) has 1 or no accounts.' % self.path) check = build('drive', 'v3', credentials=Credentials.from_service_account_file( accounts[0])) try: root_dir = check.files().get( fileId=self.source, supportsAllDrives=True).execute()['name'] except HttpError: raise ValueError('Source folder %s cannot be read or is invalid.' % self.source) dest_dict = {i: '' for i in self.dest} for key in list(dest_dict.keys()): try: dest_dir = check.files().get( fileId=key, supportsAllDrives=True).execute()['name'] dest_dict[key] = dest_dir except HttpError: if not skip_bad_dests: raise ValueError( 'Destination folder %s cannot be read or is invalid.' % key) else: dest_dict.pop(key) print('Creating %d Drive Services' % len(accounts)) drive = [] for account in accounts: credentials = Credentials.from_service_account_file( account, scopes=['https://www.googleapis.com/auth/drive']) drive.append(build('drive', 'v3', credentials=credentials)) if self.thread_count is not None and self.thread_count <= len(drive): self.threads = threading.BoundedSemaphore(self.thread_count) print('BoundedSemaphore with %d threads' % self.thread_count) elif self.thread_count is None: self.threads = threading.BoundedSemaphore(len(drive)) print('BoundedSemaphore with %d threads' % len(drive)) else: raise ValueError('More threads than there is service accounts.') for i, dest_dir in dest_dict.items(): print('Copying from %s to %s.' % (root_dir, dest_dir)) self._rcopy(drive, 1, self.source, i, root_dir, '', self.width)