def receive_data_chunk(self, raw_data, start):
        # Depending on the job id the task handling will stop
        # midway if the status is 'REVOKED'
        time.sleep(0.5)
        try:
            job = JobModel.objects.get(job_id=self.request.GET["job_id"])
            print(job)
        except JobModel.DoesNotExist:
            raise StopUpload(connection_reset=True)

        if job.job_status == JobModel.PAUSED:
            pulse_try = 0
            while (job.job_status == JobModel.PAUSED
                   and pulse_try <= settings.PULSE_MAX_TRIES):
                pulse_try += 1
                time.sleep(settings.PAUSE_PULSE)
                job = JobModel.objects.get(job_id=self.request.GET["job_id"])
            if pulse_try == settings.PULSE_MAX_TRIES:
                job.delete()
                job.save()
                raise StopUpload(connection_reset=True)
        elif job.job_status == JobModel.REVOKED:
            job.delete()
            raise StopUpload(connection_reset=True)

        self.file.write(raw_data)
Example #2
0
 def receive_data_chunk(self, raw_data, start):
     """Add the data to the BytesIO file."""
     if self.request.META.get('FILE_TOO_BIG'):
         raise StopUpload(connection_reset=True)
     if self.request.META.get('NO_SPACE_ON_DISK'):
         raise StopUpload(connection_reset=True)
     if self.activated:
         self.file.write(raw_data)
     else:
         if start > getattr(settings, 'MAX_TICKET_ATTACHMENT_SIZE'):
             self.request.META['FILE_TOO_BIG'] = True
             raise StopUpload(connection_reset=True)
         return raw_data
Example #3
0
    def handle_raw_input(self,
                         input_data,
                         META,
                         content_length,
                         boundary,
                         encoding=None):
        self.content_length = content_length
        # FIXME: This doesn't work very well.
        # For right now it is way better to enforce size restriction
        # using apache.  See comment above.
        if app_settings.VALIDATE_UPLOAD_SIZE:
            if self.content_length > app_settings.MAX_UPLOAD_SIZE:
                self.request.META['upload_size_error'] = True
                raise StopUpload(connection_reset=True)

        if 'X-Progress-ID' in self.request.GET:
            self.progress_id = self.request.GET['X-Progress-ID']
        elif 'X-Progress-ID' in self.request.META:
            self.progress_id = self.request.META['X-Progress-ID']

        if self.progress_id:

            self.cache_key = "%s_%s" % (self.request.META['REMOTE_ADDR'],
                                        self.progress_id)
            cache.set(self.cache_key, {
                'state': 'uploading',
                'size': self.content_length,
                'received': 0
            })
Example #4
0
    def receive_data_chunk(self, raw_data, start):
        if not self._activated:
            if self.request.META.get('PATH_INFO').startswith(
                    '/filebrowser') and self.request.META.get(
                        'PATH_INFO') != '/filebrowser/upload/archive':
                raise StopUpload()
            return raw_data

        try:
            self._file.write(raw_data)
            self._file.flush()
            return None
        except IOError:
            LOG.exception('Error storing upload data in temporary file "%s"' %
                          (self._file.get_temp_path(), ))
            raise StopUpload()
Example #5
0
    def new_file(self,
                 field_name,
                 file_name,
                 content_type,
                 content_length,
                 charset=None,
                 content_type_extra=None):

        basename, ext = os.path.splitext(file_name)
        ext = ext.lower()
        ext = ext.strip('.')
        self.charset = 'utf-8'
        #if charset is None:
        #self.charset = 'utf-8'
        #else:
        #self.charset = charset

        if ext in MOLECULE_EXTENSION_TYPES.keys():
            self.filetype = MOLECULE_EXTENSION_TYPES[ext]
        else:
            try:
                raise InvalidMoleculeFileExtension(ext=ext)
            except Exception as e:
                self.exception = e
                raise StopUpload(connection_reset=False)

        super(TemporaryMoleculeFileUploadHandlerMaxSize,
              self).new_file(field_name, file_name, content_type,
                             content_length, self.charset, content_type_extra)
        raise StopFutureHandlers()
Example #6
0
    def handle_raw_input(self,
                         input_data,
                         META,
                         content_length,
                         boundary,
                         encoding=None):
        self.content_length = content_length
        if 'X-Progress-ID' in self.request.GET:
            self.progress_id = self.request.GET['X-Progress-ID']
        #elif 'X-Progress-ID' in self.request.META:
        #self.progress_id = self.request.META['X-Progress-ID']
        elif 'HTTP_X_PROGRESS_ID' in self.request.META:
            self.progress_id = self.request.META['HTTP_X_PROGRESS_ID']
        if self.progress_id:
            #self.cache_key = "%s_%s" % (self.request.META['REMOTE_ADDR'], self.progress_id )
            self.cache_key = "%s_%s" % (self.request.session._session_key,
                                        self.progress_id)

            data = cache.get(self.cache_key)
            if data and 'cancelled' in data:
                self.cancelled = True
                cache.delete(self.cache_key)
                raise StopUpload(True)
            else:
                cache.set(self.cache_key, {
                    'length': self.content_length,
                    'uploaded': 0
                })
    def receive_data_chunk(self, raw_data, start):
        """Accumulates the data into memory."""
        length_after_chunk = self._length + len(raw_data)
        if length_after_chunk > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
            raise StopUpload(connection_reset=True)

        self._file.write(raw_data)
        self._length = length_after_chunk
Example #8
0
 def receive_data_chunk(self, raw_data, start):
     super().receive_data_chunk(raw_data, start)
     self.partial = min(self.partial + self.chunk_size, self.total)
     if self.partial > settings.FILE_UPLOAD_MAX_TEMP_SIZE:
         raise StopUpload()
     progress = int(100 * (self.partial / self.total) + 0.5)
     if progress > self.progress:
         self.progress = progress
         self.send_consumer('report', self.progress)
Example #9
0
 def handle_raw_input(self,
                      input_data,
                      META,
                      content_length,
                      boundary,
                      encoding=None):
     # TODO what if data does not specify content_length?
     if content_length / 1000000.0 > settings.MAX_UPLOAD_SIZE or content_length is None:
         raise StopUpload(connection_reset=True)
Example #10
0
 def receive_data_chunk(self, raw_data, start):
     free_space_left = get_free_space(self.file.fileno())
     if free_space_left < MINIMUM_FREE_SPACE:
         log.error('not enough free space left. %s < %s',
                   human_size(free_space_left),
                   human_size(MINIMUM_FREE_SPACE))
         raise StopUpload(connection_reset=True)
     super(SpaceEnsuringUploadHandler,
           self).receive_data_chunk(raw_data, start)
Example #11
0
    def receive_data_chunk(self, raw_data, start):
        self.total_upload += len(raw_data)

#        print "Total upload: {0}".format(self.total_upload)

        if self.total_upload >= self.QUOTA:
            raise StopUpload(connection_reset=True)

        self.file.write(raw_data)
Example #12
0
 def new_file(self, *args, **kwargs):
     if self.__acum_file_num >= self.max_files:
         try:
             raise TooManyFiles(self.max_files)
         except Exception as e:
             self.exception = e
             raise StopUpload(connection_reset=True)
     return super(TemporaryFileUploadHandlerMaxSize,
                  self).new_file(*args, **kwargs)
Example #13
0
 def receive_data_chunk(self, raw_data, start):
     self.__acum_size += len(raw_data)
     if self.__acum_size > self.max_size:
         try:
             raise FileTooLarge(self.max_size)
         except Exception as e:
             self.exception = e
             raise StopUpload(connection_reset=False)
     return super(TemporaryFileUploadHandlerMaxSize,
                  self).receive_data_chunk(raw_data, start)
    def file_complete(self, file_size):
        """Return a numpy array containing the accumulated image."""
        if not self._length == file_size:
            raise StopUpload(connection_reset=True)

        self._file.seek(0)
        try:
            return np.array(Image.open(self._file))
        except IOError as e:
            raise InvalidImageContent(
                'Given content does not contain a valid image.')
Example #15
0
 def receive_data_chunk(self, raw_data, start):
   if self._is_abfs_upload():
     try:
       LOG.debug("ABFSFileUploadHandler uploading file part with size: %s" % self._part_size)
       self._fs._append(self.target_path, raw_data, params = {'position' : int(start)})
       return None
     except Exception as e:
       self._fs.remove(self.target_path)
       LOG.exception('Failed to upload file to S3 at %s: %s' % (self.target_path, e))
       raise StopUpload()
   else:
     return raw_data
Example #16
0
    def receive_data_chunk(self, raw_data, start):
        if not self._activated:
            return raw_data

        try:
            self._file.write(raw_data)
            self._file.flush()
            return None
        except IOError:
            LOG.exception('Error storing upload data in temporary file "%s"' %
                          (self._file.get_temp_path(), ))
            raise StopUpload()
Example #17
0
 def receive_data_chunk(self, raw_data, start):
     if self.cache_key:
         data = cache.get(self.cache_key)
         if data and 'cancelled' in data:
             self.cancelled = True
             cache.delete(self.cache_key)
             raise StopUpload(True)
             return QueryDict(),
         elif data and 'uploaded' in data:
             data['uploaded'] += self.chunk_size
             cache.set(self.cache_key, data)
     return raw_data
Example #18
0
 def receive_data_chunk(self, raw_data, start):
   if self._is_s3_upload():
     try:
       LOG.debug("S3FileUploadHandler uploading file part: %d" % self._part_num)
       fp = self._get_file_part(raw_data)
       self._mp.upload_part_from_file(fp=fp, part_num=self._part_num)
       self._part_num += 1
       return None
     except Exception, e:
       self._mp.cancel_upload()
       LOG.exception('Failed to upload file to S3 at %s: %s' % (self.target_path, e))
       raise StopUpload()
    def new_file(self, *args, **kwargs):
        """
        Use the content_length to signal if the file is too large to be handled in memory.
        """
        _, _, content_type, content_length, _ = args
        assert content_type, 'content_type not set.'
        assert content_length, 'content_length not set.'
        assert content_length > 0, 'invalid content_length'
        assert settings.FILE_UPLOAD_MAX_MEMORY_SIZE, 'missing FILE_UPLOAD_MAX_MEMORY_SIZE setting.'
        if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
            raise StopUpload(connection_reset=True)

        self._file = BytesIO()
        self._length = 0
        raise StopFutureHandlers
    def receive_data_chunk(self, raw_data, start):

        try:
            if not self._activated:
                return raw_data
            if True:

                chunk = BytesIO(raw_data)
                ftp = class_ftp.ftp_connect_host()

                _start_time = time.time()
                if class_ftp.upload_data(
                        ftp,
                        self._file._ftp_remoteDir,
                        chunk,
                        self._file._ftp_file_name,
                        func_callback=self.append_callback,
                        chunk_size=self._file._ftplib_chunk_size):
                    _end_time = time.time()
                    self._chunk_index += 1
                    time_taken = _end_time - _start_time
                    self._file._test_ftp_upload_time += time_taken

                    self._test_lst.append({
                        'index':
                        self._chunk_index,
                        'time_taken':
                        time_taken,
                        'length':
                        len(raw_data),
                        'speed':
                        round(len(raw_data) / time_taken)
                    })
                if ftp:
                    ftp.close()
        except Exception as e:
            Common.log_exception(e)
            log.exception(e)
            ftp.abort()
            if self._cache_key:
                data = cache.get(self._cache_key)
                data['abort'] = 1
                cache.set(self._cache_key, data)
            raise StopUpload(True)
            raise
            # raise SkipFile(True)

        return None
Example #21
0
 def handle_raw_input(self,
                      input_data,
                      META,
                      content_length,
                      boundary,
                      encoding=None):
     """
     Use the content_length to signal whether or not this handler should be in use.
     """
     # Check the content-length header to see if we should
     # If the post is too large, reset connection.
     if content_length > self.max_post_size:
         try:
             raise RequestBodyTooLarge(self.max_post_size)
         except Exception as e:
             self.exception = e
             raise StopUpload(connection_reset=True)
Example #22
0
  def new_file(self, field_name, file_name, *args, **kwargs):
    if self._is_abfs_upload():
      super(ABFSFileUploadHandler, self).new_file(field_name, file_name, *args, **kwargs)

      LOG.info('Using ABFSFileUploadHandler to handle file upload wit temp file%s.' % file_name)
      self.target_path = self._fs.join(self.destination, file_name)
      
      try:
        # Check access permissions before attempting upload
        #self._check_access() #implement later
        LOG.debug("Initiating ABFS upload to target path: %s" % self.target_path)
        self._fs.create(self.target_path)
        self.file = SimpleUploadedFile(name=file_name, content='')
        raise StopFutureHandlers()
      except (ABFSFileUploadHandler, ABFSFileSystemException) as e:
        LOG.error("Encountered error in ABFSUploadHandler check_access: %s" % e)
        self.request.META['upload_failed'] = e
        raise StopUpload()
Example #23
0
    def _validate_file(self):
        filename_re = re.compile(r'filename="(?P<name>[^"]+)"')
        content_type = str(self.request.META.get('CONTENT_TYPE', ""))
        content_length = int(self.request.META.get('CONTENT_LENGTH', 0))
        charset = 'binary'

        m = filename_re.search(self.request.META.get("HTTP_CONTENT_DISPOSITION", ""))

        if content_type not in self.MIME_TYPES:
            raise IncorrectMimeTypeError("Incorrect mime type", connection_reset=True)
        if content_length > self.QUOTA:
            raise StopUpload(connection_reset=True)
        if not m:
            raise FileNameUnspecified("File name not specified", connection_reset=True)

        self.file_name = self.file_name = m.group('name')
        self.content_type = content_type
        self.content_length = content_length
    def receive_data_chunk(self, raw_data, start):
        content_length = self.request.META.get('CONTENT_LENGTH',0) if self.content_length is None else self.content_length
        try:
            if self.count == 1:
                session_result = self.dropbox.files_upload_session_start(self.chunk)
                self.cursor = dropbox.files.UploadSessionCursor(session_result.session_id, offset=len(self.chunk))
            elif self.count > 1:
                self.dropbox.files_upload_session_append_v2(self.chunk, self.cursor)
                self.cursor.offset += len(self.chunk)
                if self.cache_key:
                    data = cache.get(self.cache_key)
                    data['uploaded'] += len(self.chunk)
                    cache.set(self.cache_key, data)
            self.chunk = raw_data
            self.count += 1
        except:
            raise StopUpload(True)

        return None
Example #25
0
  def new_file(self, field_name, file_name, *args, **kwargs):
    if self._is_s3_upload():
      super(S3FileUploadHandler, self).new_file(field_name, file_name, *args, **kwargs)

      LOG.info('Using S3FileUploadHandler to handle file upload.')
      self.target_path = self._fs.join(self.key_name, file_name)

      try:
        # Check access permissions before attempting upload
        self._check_access()
        # Create a multipart upload request
        LOG.debug("Initiating S3 multipart upload to target path: %s" % self.target_path)
        self._mp = self._bucket.initiate_multipart_upload(self.target_path)
        self.file = SimpleUploadedFile(name=file_name, content='')
        raise StopFutureHandlers()
      except (S3FileUploadError, S3FileSystemException), e:
        LOG.error("Encountered error in S3UploadHandler check_access: %s" % e)
        self.request.META['upload_failed'] = e
        raise StopUpload()
Example #26
0
    def receive_data_chunk(self, raw_data, start):

        if self.filetype == 'sdf':
            encoded_raw_data = raw_data.decode(self.charset)
            encoded_raw_data = end_of_line_normalitzation(encoded_raw_data)
            block = self.__previous_last_line + encoded_raw_data

            if self.__end_mol_found and block.strip() != '':
                self.__invalidtoomols = True
            else:
                idx = block.rfind("\n$$$$\n")
                if idx > 0:
                    self.__end_mol_found = True
                    if len(block) > 6 and block[idx + 6:].strip() == '':
                        self.__invalidtoomols = False
                    else:
                        self.__invalidtoomols = True
                elif block[0:6] == "$$$$\n":
                    self.__end_mol_found = True
                    if len(block) > 5 and block[idx + 5:].strip() == '':
                        self.__invalidtoomols = False
                    else:
                        self.__invalidtoomols = True

            if self.__invalidtoomols == True:
                #self.file.close()
                try:
                    raise MultipleMoleculesinSDF()
                except Exception as e:
                    self.exception = e
                    raise StopUpload(connection_reset=False)

            splited_block = block.rsplit(sep="\n", maxsplit=1)
            if len(splited_block) == 1:
                self.__previous_last_line = block
            else:
                self.__previous_last_line = splited_block[1]

        return super(TemporaryMoleculeFileUploadHandlerMaxSize,
                     self).receive_data_chunk(raw_data, start)
Example #27
0
 def handle_raw_input(self,
                      input_data,
                      META,
                      content_length,
                      boundary,
                      encoding=None):
     self.content_length = content_length
     if 'X-Progress-ID' in self.request.GET:
         self.progress_id = self.request.GET['X-Progress-ID']
     if self.progress_id:
         self.cache_key = "%s_%s" % (self.request.META['REMOTE_ADDR'],
                                     self.progress_id)
         cache.set(self.cache_key, {
             'state': 'uploading',
             'size': self.content_length,
             'received': 0
         })
         logging.debug("UploadProgressCachedHandler: handle_raw_input " +
                       str(datetime.datetime.now()))
     if type(settings.MAX_UPLOAD_SIZE) == type(
             1
     ) and settings.MAX_UPLOAD_SIZE > 0 and content_length > settings.MAX_UPLOAD_SIZE * 1024 * 1024:
         raise StopUpload(connection_reset=True)
Example #28
0
 def upload_complete(self):
     if self.cache_key:
         cache.delete(self.cache_key)
     if self.cancelled:
         raise StopUpload(True)
Example #29
0
 def receive_data_chunk(self, raw_data, start):
     self.kbytes_read += len(raw_data) / 1024
     if self.kbytes_read > self.max_kbytes:
         self.canceled = True
         raise StopUpload(connection_reset=True)
     return raw_data
Example #30
0
 def receive_data_chunk(self, raw_data, start):
     self.total_upload += len(raw_data)
     if self.total_upload >= self.QUOTA:
         raise StopUpload(connection_reset=True)
     return raw_data