def _check_format(cls, download_info, user_pk): # if we receive a single transfer, it might not be in a list if (type(download_info) is int) or (type(download_info) is str): download_info = [ download_info, ] # check that we have list of ints try: download_info = [int(x) for x in download_info] except ValueError as ex: raise exceptions.ExceptionWithMessage(''' The request payload must only contain integers for identifying resources to transfer. Received: %s''' % download_info) # check that all of those Resources are owned by the requester. # if the requester is admin, can do anything # otherwise, if ANY of the resources are invalid, reject everything # This also catches the case where the user gives a primary that does not exist try: requesting_user = get_user_model().objects.get(pk=user_pk) except ObjectDoesNotExist as ex: raise exceptions.ExceptionWithMessage(ex) if not requesting_user.is_staff: all_user_resources = Resource.objects.user_resources( requesting_user) all_user_resource_pks = [ x.pk for x in all_user_resources if (x.is_active and not x.originated_from_upload) ] if len(set(download_info).difference( set(all_user_resource_pks))) > 0: raise exceptions.ExceptionWithMessage(''' Requesting to transfer a resource you do not own, is not active, or not able to be downloaded. ''') reformatted_info = [] for pk in download_info: d = {} d['resource_pk'] = pk d['originator'] = user_pk d['destination'] = cls.destination reformatted_info.append(d) if len(reformatted_info) > 0: return reformatted_info else: raise exceptions.ExceptionWithMessage(''' There were no valid resources to download. ''')
def _check_keys(cls, data_dict): all_required_keys = list(set(Uploader.required_keys + cls.required_keys)) for key in all_required_keys: try: data_dict[key] except KeyError as ex: raise exceptions.ExceptionWithMessage('The request payload did not contain the required key: %s' % key)
def get_uploader(source): ''' Based on the compute environment and the source of the upload choose the appropriate class to use. ''' # This defines a two-level dictionary from which we can choose # a class. Additional sub-classes of EnvironmentSpecificUploader # need to be in this if they are to be used. Otherwise, the application # will 'not know' about the class class_mapping = { settings.GOOGLE : { settings.GOOGLE_DRIVE : GoogleDriveUploader, settings.DROPBOX : GoogleDropboxUploader, }, settings.AWS : { settings.GOOGLE_DRIVE : AWSDriveUploader, settings.DROPBOX : AWSDropboxUploader, } } environment = settings.CONFIG_PARAMS['cloud_environment'] try: return class_mapping[environment][source] except KeyError as ex: raise exceptions.ExceptionWithMessage(''' You did not specify an uploader implementation for: Compute environment: %s Upload source: %s ''' % (environment, source))
def _validate_ownership(cls, data_dict, requesting_user): try: # if 'owner' was included in the object, check that the owner PK matches theirs # unless the request was issued by an admin intended_owner = data_dict['owner'] if requesting_user.pk != intended_owner: if requesting_user.is_staff: data_dict['originator'] = requesting_user.pk else: raise exceptions.ExceptionWithMessage(''' Cannot assign ownership of an upload to someone other than yourself.''') else: data_dict['originator'] = data_dict['owner'] except KeyError as ex: data_dict['owner'] = requesting_user.pk data_dict['originator'] = requesting_user.pk
def _check_format(cls, upload_data, uploader_pk): try: requesting_user = get_user_model().objects.get(pk=uploader_pk) except ObjectDoesNotExist as ex: raise exceptions.ExceptionWithMessage(ex) # for consistent handling, take any single upload requests and # put inside a list if isinstance(upload_data, dict): upload_data = [upload_data,] for item in upload_data: cls._validate_ownership(item, requesting_user) cls._check_keys(item) item['user_uuid'] = requesting_user.user_uuid return upload_data
def finish_authentication_and_start_download(cls, request): if request.method == 'GET': parser = httplib2.Http() if 'error' in request.GET or 'code' not in request.GET: raise exceptions.RequestError( 'There was an error on the callback') if request.GET['state'] != request.session['session_state']: raise exceptions.RequestError( 'There was an error on the callback-- state mismatch') current_site = Site.objects.get_current() domain = current_site.domain code_callback_url = 'https://%s%s' % ( domain, settings.CONFIG_PARAMS['drive_callback']) params = urllib.parse.urlencode({ 'code': request.GET['code'], 'redirect_uri': code_callback_url, 'client_id': settings.CONFIG_PARAMS['drive_client_id'], 'client_secret': settings.CONFIG_PARAMS['drive_secret'], 'grant_type': 'authorization_code' }) headers = {'content-type': 'application/x-www-form-urlencoded'} resp, content = parser.request( settings.CONFIG_PARAMS['drive_token_endpoint'], method='POST', body=params, headers=headers) c = json.loads(content.decode('utf-8')) try: access_token = c['access_token'] except KeyError as ex: raise exceptions.ExceptionWithMessage(''' The response did not have the "access_token" key, so the OAuth2 flow did not succeed. The response body was %s ''' % c) try: download_info = request.session['download_info'] except KeyError as ex: raise exceptions.ExceptionWithMessage( 'There was no download_info registered with the session') # ensure we have enough space to push the file(s): credentials = google_credentials_module.Credentials(access_token) drive_service = build('drive', 'v3', credentials=credentials) about = drive_service.about().get(fields='storageQuota').execute() try: total_bytes = int(about['storageQuota']['limit']) unlimited = False except KeyError as ex: # per the docs, if the 'limit' field is not there, there is "unlimited" storage unlimited = True used_bytes = int(about['storageQuota']['usage']) if not unlimited: space_remaining_in_bytes = total_bytes - used_bytes running_total = 0 at_least_one_transfer = False failed_items = [] passing_items = [] problem = False if not unlimited: # iterate through the transfers, add the token, and check a running total # note that we do not do any optimization to maximize the number of transfers # in the case that the space is not sufficient for all files. for item in download_info: size_in_bytes = Resource.objects.get( pk=item['resource_pk']).size running_total += size_in_bytes if (running_total < space_remaining_in_bytes): passing_items.append(item) else: problem = True failed_items.append(item) else: # if unlimited storage, just 'pass' all the downloads through passing_items = download_info for item in passing_items: item['access_token'] = access_token at_least_one_transfer = len(passing_items) > 0 if not problem: # call async method: transfer_tasks.download.delay( passing_items, request.session['download_destination']) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer } return render(request, 'transfer_app/download_started.html', context) else: # if there was a problem-- could not fit all files # Still initiate the good transfers if len(passing_items) > 0: transfer_tasks.download.delay( passing_items, request.session['download_destination']) warning_list = [] for item in failed_items: resource_name = Resource.objects.get( pk=item['resource_pk']).name warning_list.append( 'Not enough space in your Google Drive for file %s' % resource_name) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer, 'warnings': warning_list } return render(request, 'transfer_app/download_started.html', context) else: raise MethodNotAllowed('Method not allowed.')
def finish_authentication_and_start_download(cls, request): if request.method == 'GET': parser = httplib2.Http() if 'error' in request.GET or 'code' not in request.GET: raise exceptions.RequestError( 'There was an error on the callback') if request.GET['state'] != request.session['session_state']: raise exceptions.RequestError( 'There was an error on the callback-- state mismatch') current_site = Site.objects.get_current() domain = current_site.domain code_callback_url = 'https://%s%s' % ( domain, settings.CONFIG_PARAMS['dropbox_callback']) params = urllib.parse.urlencode({ 'code': request.GET['code'], 'redirect_uri': code_callback_url, 'client_id': settings.CONFIG_PARAMS['dropbox_client_id'], 'client_secret': settings.CONFIG_PARAMS['dropbox_secret'], 'grant_type': 'authorization_code' }) headers = {'content-type': 'application/x-www-form-urlencoded'} resp, content = parser.request( settings.CONFIG_PARAMS['dropbox_token_endpoint'], method='POST', body=params, headers=headers) c = json.loads(content.decode('utf-8')) try: access_token = c['access_token'] except KeyError as ex: raise exceptions.ExceptionWithMessage(''' The response did not have the "access_token" key, so the OAuth2 flow did not succeed. The response body was %s ''' % c) try: download_info = request.session['download_info'] except KeyError as ex: raise exceptions.ExceptionWithMessage( 'There was no download_info registered with the session') # need to check that the user has enough space in their Dropbox account dbx = dropbox_module.Dropbox(access_token) space_usage = dbx.users_get_space_usage() if space_usage.allocation.is_team(): used_in_bytes = space_usage.allocation.get_team().used space_allocation_in_bytes = space_usage.allocation.get_team( ).allocated space_remaining_in_bytes = space_allocation_in_bytes - used_in_bytes else: used_in_bytes = space_usage.used space_allocation_in_bytes = space_usage.allocation.get_individual( ).allocated space_remaining_in_bytes = space_allocation_in_bytes - used_in_bytes running_total = 0 at_least_one_transfer = False # iterate through the transfers, add the token, and check a running total # note that we do not do any optimization to maximize the number of transfers # in the case that the space is not sufficient for all files. passing_items = [] failed_items = [] problem = False for item in download_info: size_in_bytes = Resource.objects.get( pk=item['resource_pk']).size running_total += size_in_bytes if running_total < space_remaining_in_bytes: item['access_token'] = access_token passing_items.append(item) else: problem = True failed_items.append(item) at_least_one_transfer = len(passing_items) > 0 if not problem: # call async method: transfer_tasks.download.delay( passing_items, request.session['download_destination']) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer } return render(request, 'transfer_app/download_started.html', context) else: # if there was a problem-- could not fit all files # Still initiate the good transfers if len(passing_items) > 0: transfer_tasks.download.delay( passing_items, request.session['download_destination']) warning_list = [] for item in failed_items: resource_name = Resource.objects.get( pk=item['resource_pk']).name warning_list.append( 'Not enough space in your Dropbox for file %s' % resource_name) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer, 'warnings': warning_list } return render(request, 'transfer_app/download_started.html', context) else: raise MethodNotAllowed('Method not allowed.')