Example #1
0
    def upload_file(self,
                    user,
                    stream,
                    expected_size,
                    filename,
                    force_coll_name=''):
        temp_file = None
        logger.debug('Upload Begin')

        logger.debug('Expected Size: ' + str(expected_size))

        #is_anon = False

        size_rem = user.get_size_remaining()

        logger.debug('User Size Rem: ' + str(size_rem))

        if size_rem < expected_size:
            return {'error': 'out_of_space'}

        if force_coll_name and not user.has_collection(force_coll_name):
            #if is_anon:
            #    user.create_collection(force_coll, 'Temporary Collection')

            #else:
            #status = 'Collection {0} not found'.format(force_coll_name)
            return {'error': 'no_such_collection'}

        temp_file = SpooledTemporaryFile(max_size=BLOCK_SIZE)

        stream = CacheingLimitReader(stream, expected_size, temp_file)

        if filename.endswith('.har'):
            stream, expected_size = self.har2warc(filename, stream)
            temp_file.close()
            temp_file = stream

        infos = self.parse_uploaded(stream, expected_size)

        total_size = temp_file.tell()
        if total_size != expected_size:
            return {
                'error': 'incomplete_upload',
                'expected': expected_size,
                'actual': total_size
            }

        upload_id, upload_key = self._init_upload_status(user,
                                                         total_size,
                                                         1,
                                                         filename=filename)

        return self.handle_upload(temp_file, upload_id, upload_key, infos,
                                  filename, user, force_coll_name, total_size)
Example #2
0
    def upload_file(self):
        stream = None
        temp_file = None
        logger.debug('Upload Begin')

        expected_size = int(request.headers['Content-Length'])

        logger.debug('Expected Size: ' + str(expected_size))

        if not expected_size:
            return {'error_message': 'No File Specified'}

        curr_user = self.manager.get_curr_user()

        if not curr_user:
            #user = self.manager.get_anon_user()
            #force_coll = 'temp'
            #is_anon = True

            return {
                'error_message':
                'Sorry, uploads only available for logged-in users'
            }

        user = curr_user
        force_coll = request.query.getunicode('force-coll', '')
        is_anon = False

        size_rem = self.manager.get_size_remaining(user)

        logger.debug('User Size Rem: ' + str(size_rem))

        if size_rem < expected_size:
            return {
                'error_message': 'Sorry, not enough space to upload this file'
            }

        if force_coll and not self.manager.has_collection(user, force_coll):
            if is_anon:
                self.manager.create_collection(user, force_coll,
                                               'Temporary Collection')

            else:
                status = 'Collection {0} not found'.format(force_coll)
                return {'error_message': status}

        temp_file = SpooledTemporaryFile(max_size=BLOCK_SIZE)

        filename = request.query.getunicode('filename')

        stream = request.environ['wsgi.input']
        stream = CacheingLimitReader(stream, expected_size, temp_file)

        if filename.endswith('.har'):
            stream, expected_size = self.har2warc(filename, stream)
            temp_file.close()
            temp_file = stream

        infos = self.parse_uploaded(stream, expected_size)

        total_size = temp_file.tell()
        if total_size != expected_size:
            return {
                'error_message':
                'size mismatch: expected {0}, got {1}'.format(
                    expected_size, total_size)
            }

        upload_id = self._get_upload_id()

        upload_key = self.upload_key.format(user=user, upid=upload_id)

        with redis_pipeline(self.manager.redis) as pi:
            pi.hset(upload_key, 'size', 0)
            pi.hset(upload_key, 'total_size', total_size * 2)
            pi.hset(upload_key, 'filename', filename)
            pi.hset(upload_key, 'total_files', 1)
            pi.hset(upload_key, 'files', 1)

        return self.handle_upload(temp_file, upload_id, upload_key, infos,
                                  filename, user, force_coll, total_size)