Exemplo n.º 1
0
    def init(self, settings, session):
        log.debug(deep_print(settings, "Building pipeline using settings loaded:"))

        # FIXME senders setting should be simpler to handle
        sender_settings = [sender_settings for sender_settings in settings.mail_accounts]
        if settings.dir_dest is not None:
            sender_settings.append(settings.dir_dest)
        if settings.mega_settings is not None:
            sender_settings.append(settings.mega_settings)

        if not (sender_settings or settings.add_fake_sender or settings.slow_sender is not None):
            raise InvalidSettings("No senders were configured")

        fs_settings = FilesystemSettings.Settings(
            sender_settings_list=sender_settings,
            stored_files_settings=settings.stored_files)

        global_quota = Quota(
            quota_limit=settings.limits.max_shared_upload_per_day.in_bytes,
            used_quota=FilesDestinations.get_bytes_uploaded_in_date(session))

        # The pipeline goes:
        #    read files -> filter -> compress -> [cipher] -> send -> log -> finish
        rate_limiter = None
        if settings.limits.rate_limits is not None:
            rate_limiter = trickle.TrickleBwShaper(trickle.Settings(settings.limits.rate_limits))

        work_rate_controller = \
            WorkRateController(max_pending_for_processing=settings.performance.max_pending_for_processing)
        work_rate_controller.register(self)
        files_reader = \
            FileReader(path_filter_list=settings.exclude_paths.path_filter_list,
                       work_rate_controller=work_rate_controller)

        if settings.performance.filter_by_path:
            PathFilter().register(self)

        self.pipeline \
            .add(files_reader, disable_on_shutdown=True) \
            .add(FileSizeFilter(file_size_limit_bytes=settings.limits.max_file_size.in_bytes),
                 disable_on_shutdown=True) \
            .add(QuotaFilter(global_quota=global_quota, stop_on_remaining=settings.limits.stop_on_remaining.in_bytes),
                 disable_on_shutdown=True) \
            .add(AlreadyProcessedFilter() if settings.stored_files.should_check_already_sent else None,
                 disable_on_shutdown=True) \
            .add(Compressor(fs_settings=fs_settings, global_quota=global_quota), disable_on_shutdown=True) \
            .add(Cipher() if settings.stored_files.should_encrypt else None, disable_on_shutdown=True) \
            .add(ToImage() if settings.to_image.enabled else None, disable_on_shutdown=True) \
            .add(MarkerTask(mark=Marks.sending_stage), disable_on_shutdown=True) \
            .add(SlowSender(settings=settings.slow_sender) if settings.slow_sender is not None else None,
                 disable_on_shutdown=True) \
            .add_in_list([MailSender(mail_conf=sender_conf) for sender_conf in settings.mail_accounts]
                         if settings.mail_accounts else None) \
            .add(ToDirectorySender(dir_path=settings.dir_dest.path) if settings.dir_dest is not None else None) \
            .add(MegaSender(settings=settings.mega_settings, rate_limiter=rate_limiter)
                 if settings.mega_settings is not None else None) \
            .add(FakeSender() if settings.add_fake_sender else None) \
            .add(SentLog(sent_log=settings.sent_files_log)) \
            .add(Cleaner(delete_temp_files=settings.stored_files.delete_temp_files)) \
            .add(MarkerTask(mark=Marks.end_of_pipeline))
Exemplo n.º 2
0
 def __init__(self, sender_settings):
     log = get_logger_for(self)
     self.restrictions = _SenderRestriction(sender_settings)
     self.destinations = deepcopy(sender_settings.destinations)
     with get_session() as session:
         self.bytes_uploaded_today = \
             FilesDestinations.get_bytes_uploaded_in_date(session, self.destinations)
     log.info("According to the logs, it were already uploaded today %d bytes for destinations %s",
              self.bytes_uploaded_today,
              self.destinations)
Exemplo n.º 3
0
    def _log_in_db(self, block):
        if not self._session_resource:
            self._session_resource = get_session()
        with self._session_resource as session:
            session.autoflush = False  # to avoid IntegrityError raised during testing

            sent_file_info = block.latest_file_info

            # a new container has been saved
            file_container = FilesContainer(
                sha1=sent_file_info.sha1,
                file_name=sent_file_info.basename,
                encryption_key=block.cipher_key if hasattr(block, 'cipher_key') else '',
                container_size=sent_file_info.size
            )
            session.add(file_container)
            ''' FIXME we need the container id because file_destination is not getting it
                (not working example of SQLAlchemy) '''
            session.flush()  # get container id

            # associate destinations to the container
            for destination in block.send_destinations if hasattr(block, 'send_destinations') else []:
                file_destination = FilesDestinations()
                file_destination.destination = Destination.get_or_add(session, destination)
                # FIXME according to the example in SQLAlchemy, this shouldn't be needed
                file_destination.file_containers_id = file_container.id
                if hasattr(block, 'destinations_verif_data') and destination in block.destinations_verif_data:
                    file_destination.verification_info = block.destinations_verif_data[destination]
                file_container.files_destinations.append(file_destination)

            # save/update each file in the container
            for file_info in block.content_file_infos:
                uploaded_file_fragment_number = 0
                if hasattr(file_info, 'fragment_info'):  # check if it is a fragment
                    uploaded_file_fragment_number = file_info.fragment_info.fragment_num
                    uploaded_file = \
                        self._get_uploaded_file(
                            session=session,
                            file_info=file_info.fragment_info.file_info,
                            fragment_count=file_info.fragment_info.fragments_count)

                    # save a new fragment for the file
                    file_fragment = FileFragment(
                        fragment_sha1=file_info.sha1,
                        fragment_name=file_info.upath,
                        fragment_number=file_info.fragment_info.fragment_num
                    )
                    uploaded_file.fragments.append(file_fragment)
                else:  # not fragmented file
                    uploaded_file = self._get_uploaded_file(session=session, file_info=file_info)

                session.flush()  # if uploaded_file has no id, we need one

                file_in_container_assoc = FilesInContainers(
                    uploaded_file_fragment_number=uploaded_file_fragment_number,
                    uploaded_files_id=uploaded_file.id
                )
                file_in_container_assoc.container_file = file_container
                file_container.fragments.append(file_in_container_assoc)

            session.commit()