def post(self, request, *args, **kwargs): # Parse the submitted data: data = request.data try: json_str = data['data'] # j is a json-format string data = json.loads(json_str) resource_pks = data['resource_pks'] resource_pks = [x for x in resource_pks if x ] # remove any None that may have gotten through download_destination = data['destination'] except KeyError as ex: raise exceptions.RequestError(''' Missing required information for initiating transfer. ''') # Here, we first do a spot-check on the data that was passed, BEFORE we invoke any asynchronous methods. # We prepare/massage the necessary data for the upload here, and then pass a simple dictionary to the asynchronous # method call. We do this since it is easiest to pass a simple native dictionary to celery. user_pk = request.user.pk try: # Depending on which download destination was requested (and which compute environment we are in), grab the proper class: downloader_cls = _downloaders.get_downloader(download_destination) # Check that the upload data has the required format to work with this uploader implementation: download_info, error_messages = downloader_cls.check_format( resource_pks, user_pk) if len(error_messages) > 0: return Response({'errors': error_messages}, status=409) else: # stash the download info, since we will be redirecting through an authentication flow request.session['download_info'] = download_info request.session['download_destination'] = download_destination return Response({'success': True}) except exceptions.ExceptionWithMessage as ex: raise exceptions.RequestError(ex.message) except Exception as ex: response = exception_handler(ex, None) return Response({'message': 'thanks'})
def post(self, request, *args, **kwargs): data = request.data try: upload_source = data['upload_source'] # (dropbox, drive, etc) upload_info = data['upload_info'] upload_info = json.loads(upload_info) #raise Exception('temp!'); except KeyError as ex: raise exceptions.RequestError( 'The request JSON body did not contain the required data (%s).' % ex) # Here, we first do a spot-check on the data that was passed, BEFORE we invoke any asynchronous methods. # We prepare/massage the necessary data for the upload here, and then pass a simple dictionary to the asynchronous # method call. We do this since it is easiest to pass a simple native dictionary to celery. user_pk = request.user.pk try: # Depending on which upload method was requested (and which compute environment we are in), grab the proper class: uploader_cls = _uploaders.get_uploader(upload_source) # Check that the upload data has the required format to work with this uploader implementation: upload_info, error_messages = uploader_cls.check_format( upload_info, user_pk) if len(error_messages) > 0: return Response({'errors': error_messages}) elif len(upload_info) > 0: # call async method: transfer_tasks.upload.delay(upload_info, upload_source) return Response({}) else: # no errors, but also nothing to do... return Response({}) except exceptions.ExceptionWithMessage as ex: raise exceptions.RequestError(ex.message) except Exception as ex: print('Exception: %s' % ex) response = exception_handler(ex, None) return response
def create_resource(serializer, user): ''' This function is used to get around making API calls between different endpoints. Namely, when a user requests the "upload" endpoint, we have to create Resource objects. To keep Resource creation in a central location, we extracted the logic out of the API view and put it here. Then, any API endpoint needing to create one or more Resource instances can use this function instead of having to call the endpoint for creating a Resource. serializer is an instance of rest_framework.serializers.ModelSerializer user is a basic Django User (or subclass) ''' serializer.is_valid(raise_exception=True) # if the user is NOT staff, we only let them # create a Resource for themself. if not user.is_staff: # if the owner specified in the request is the requesting user # then we approve try: many = serializer.many except AttributeError as ex: many = False if many: owner_status = [] for item in serializer.validated_data: try: properly_owned = item['owner'] == user owner_status.append(properly_owned) except KeyError as ex: item['owner'] = user owner_status.append(True) if all(owner_status): return serializer.save() else: raise exceptions.RequestError('Tried to create a Resource attributed to someone else.') else: try: if serializer.validated_data['owner'] == user: return serializer.save() # here we block any effort to create a Resource for anyone else. #Raise 404 so we do not give anything away else: raise Http404 except KeyError as ex: return serializer.save(owner=user) # Otherwsie (if the user IS staff), we trust them to create # Resources for themselves or others. else: return serializer.save()
def post(self, request, format=None): data = request.data if 'token' in data: b64_enc_token = data['token'] enc_token = base64.decodestring(b64_enc_token.encode('ascii')) expected_token = settings.CONFIG_PARAMS['token'] obj = DES.new(settings.CONFIG_PARAMS['enc_key'], DES.MODE_ECB) decrypted_token = obj.decrypt(enc_token) if decrypted_token == expected_token.encode('ascii'): # we can trust the content since it contained the proper token try: transfer_pk = data['transfer_pk'] success = bool(int(data['success'])) except KeyError as ex: raise exceptions.RequestError( 'The request did not have the correct formatting.') try: transfer_obj = Transfer.objects.get(pk=transfer_pk) transfer_obj.completed = True transfer_obj.success = success tz = transfer_obj.start_time.tzinfo now = datetime.datetime.now(tz) duration = now - transfer_obj.start_time transfer_obj.duration = duration transfer_obj.finish_time = now transfer_obj.save() if success: if transfer_obj.download: resource = transfer_obj.resource # did they use the last download? If so, set the Resource inactive if (resource.total_downloads + 1) >= \ int(settings.CONFIG_PARAMS['maximum_downloads']): resource.is_active = False resource.total_downloads += 1 resource.save() else: # upload resource = transfer_obj.resource resource.is_active = True resource.save() # now check if all the Transfers belonging to this TransferCoordinator are complete: try: tc = transfer_obj.coordinator except ObjectDoesNotExist as ex: raise exceptions.RequestError( 'TransferCoordinator with pk=%d did not exist' % coordinator_pk) all_transfers = Transfer.objects.filter(coordinator=tc) if all([x.completed for x in all_transfers]): tc.completed = True tc.finish_time = datetime.datetime.now() tc.save() all_originators = list( set([x.originator.email for x in all_transfers])) utils.post_completion(tc, all_originators) return Response({'message': 'thanks'}) except ObjectDoesNotExist as ex: raise exceptions.RequestError( 'Transfer with pk=%d did not exist' % transfer_pk) else: raise Http404 else: raise Http404
def finish_authentication_and_start_download(cls, request): if request.method == 'GET': parser = httplib2.Http() if 'error' in request.GET or 'code' not in request.GET: raise exceptions.RequestError( 'There was an error on the callback') if request.GET['state'] != request.session['session_state']: raise exceptions.RequestError( 'There was an error on the callback-- state mismatch') current_site = Site.objects.get_current() domain = current_site.domain code_callback_url = 'https://%s%s' % ( domain, settings.CONFIG_PARAMS['drive_callback']) params = urllib.parse.urlencode({ 'code': request.GET['code'], 'redirect_uri': code_callback_url, 'client_id': settings.CONFIG_PARAMS['drive_client_id'], 'client_secret': settings.CONFIG_PARAMS['drive_secret'], 'grant_type': 'authorization_code' }) headers = {'content-type': 'application/x-www-form-urlencoded'} resp, content = parser.request( settings.CONFIG_PARAMS['drive_token_endpoint'], method='POST', body=params, headers=headers) c = json.loads(content.decode('utf-8')) try: access_token = c['access_token'] except KeyError as ex: raise exceptions.ExceptionWithMessage(''' The response did not have the "access_token" key, so the OAuth2 flow did not succeed. The response body was %s ''' % c) try: download_info = request.session['download_info'] except KeyError as ex: raise exceptions.ExceptionWithMessage( 'There was no download_info registered with the session') # ensure we have enough space to push the file(s): credentials = google_credentials_module.Credentials(access_token) drive_service = build('drive', 'v3', credentials=credentials) about = drive_service.about().get(fields='storageQuota').execute() try: total_bytes = int(about['storageQuota']['limit']) unlimited = False except KeyError as ex: # per the docs, if the 'limit' field is not there, there is "unlimited" storage unlimited = True used_bytes = int(about['storageQuota']['usage']) if not unlimited: space_remaining_in_bytes = total_bytes - used_bytes running_total = 0 at_least_one_transfer = False failed_items = [] passing_items = [] problem = False if not unlimited: # iterate through the transfers, add the token, and check a running total # note that we do not do any optimization to maximize the number of transfers # in the case that the space is not sufficient for all files. for item in download_info: size_in_bytes = Resource.objects.get( pk=item['resource_pk']).size running_total += size_in_bytes if (running_total < space_remaining_in_bytes): passing_items.append(item) else: problem = True failed_items.append(item) else: # if unlimited storage, just 'pass' all the downloads through passing_items = download_info for item in passing_items: item['access_token'] = access_token at_least_one_transfer = len(passing_items) > 0 if not problem: # call async method: transfer_tasks.download.delay( passing_items, request.session['download_destination']) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer } return render(request, 'transfer_app/download_started.html', context) else: # if there was a problem-- could not fit all files # Still initiate the good transfers if len(passing_items) > 0: transfer_tasks.download.delay( passing_items, request.session['download_destination']) warning_list = [] for item in failed_items: resource_name = Resource.objects.get( pk=item['resource_pk']).name warning_list.append( 'Not enough space in your Google Drive for file %s' % resource_name) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer, 'warnings': warning_list } return render(request, 'transfer_app/download_started.html', context) else: raise MethodNotAllowed('Method not allowed.')
def finish_authentication_and_start_download(cls, request): if request.method == 'GET': parser = httplib2.Http() if 'error' in request.GET or 'code' not in request.GET: raise exceptions.RequestError( 'There was an error on the callback') if request.GET['state'] != request.session['session_state']: raise exceptions.RequestError( 'There was an error on the callback-- state mismatch') current_site = Site.objects.get_current() domain = current_site.domain code_callback_url = 'https://%s%s' % ( domain, settings.CONFIG_PARAMS['dropbox_callback']) params = urllib.parse.urlencode({ 'code': request.GET['code'], 'redirect_uri': code_callback_url, 'client_id': settings.CONFIG_PARAMS['dropbox_client_id'], 'client_secret': settings.CONFIG_PARAMS['dropbox_secret'], 'grant_type': 'authorization_code' }) headers = {'content-type': 'application/x-www-form-urlencoded'} resp, content = parser.request( settings.CONFIG_PARAMS['dropbox_token_endpoint'], method='POST', body=params, headers=headers) c = json.loads(content.decode('utf-8')) try: access_token = c['access_token'] except KeyError as ex: raise exceptions.ExceptionWithMessage(''' The response did not have the "access_token" key, so the OAuth2 flow did not succeed. The response body was %s ''' % c) try: download_info = request.session['download_info'] except KeyError as ex: raise exceptions.ExceptionWithMessage( 'There was no download_info registered with the session') # need to check that the user has enough space in their Dropbox account dbx = dropbox_module.Dropbox(access_token) space_usage = dbx.users_get_space_usage() if space_usage.allocation.is_team(): used_in_bytes = space_usage.allocation.get_team().used space_allocation_in_bytes = space_usage.allocation.get_team( ).allocated space_remaining_in_bytes = space_allocation_in_bytes - used_in_bytes else: used_in_bytes = space_usage.used space_allocation_in_bytes = space_usage.allocation.get_individual( ).allocated space_remaining_in_bytes = space_allocation_in_bytes - used_in_bytes running_total = 0 at_least_one_transfer = False # iterate through the transfers, add the token, and check a running total # note that we do not do any optimization to maximize the number of transfers # in the case that the space is not sufficient for all files. passing_items = [] failed_items = [] problem = False for item in download_info: size_in_bytes = Resource.objects.get( pk=item['resource_pk']).size running_total += size_in_bytes if running_total < space_remaining_in_bytes: item['access_token'] = access_token passing_items.append(item) else: problem = True failed_items.append(item) at_least_one_transfer = len(passing_items) > 0 if not problem: # call async method: transfer_tasks.download.delay( passing_items, request.session['download_destination']) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer } return render(request, 'transfer_app/download_started.html', context) else: # if there was a problem-- could not fit all files # Still initiate the good transfers if len(passing_items) > 0: transfer_tasks.download.delay( passing_items, request.session['download_destination']) warning_list = [] for item in failed_items: resource_name = Resource.objects.get( pk=item['resource_pk']).name warning_list.append( 'Not enough space in your Dropbox for file %s' % resource_name) context = { 'email_enabled': settings.EMAIL_ENABLED, 'problem': problem, 'at_least_one_transfer': at_least_one_transfer, 'warnings': warning_list } return render(request, 'transfer_app/download_started.html', context) else: raise MethodNotAllowed('Method not allowed.')