Esempio n. 1
0
def DAEMONSynchroniseRepositories(controller):

    if not controller.options['pause_repo_sync']:

        services = controller.services_manager.GetServices(HC.REPOSITORIES,
                                                           randomised=True)

        for service in services:

            if HydrusThreading.IsThreadShuttingDown():

                return

            if controller.options['pause_repo_sync']:

                return

            service.SyncRemote()

            service.SyncProcessUpdates(maintenance_mode=HC.MAINTENANCE_IDLE)

            if HydrusThreading.IsThreadShuttingDown():

                return

            time.sleep(1)
Esempio n. 2
0
def DAEMONMaintainTrash( controller ):
    
    if HC.options[ 'trash_max_size' ] is not None:
        
        max_size = HC.options[ 'trash_max_size' ] * 1048576
        
        service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY )
        
        while service_info[ HC.SERVICE_INFO_TOTAL_SIZE ] > max_size:
            
            if HydrusThreading.IsThreadShuttingDown():
                
                return
                
            
            hashes = controller.Read( 'trash_hashes', limit = 10 )
            
            if len( hashes ) == 0:
                
                return
                
            
            content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
            
            service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
            
            controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
            
            service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY )
            
            time.sleep( 2 )
            
        
    
    if HC.options[ 'trash_max_age' ] is not None:
        
        max_age = HC.options[ 'trash_max_age' ] * 3600
        
        hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age )
        
        while len( hashes ) > 0:
            
            if HydrusThreading.IsThreadShuttingDown():
                
                return
                
            
            content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes )
            
            service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] }
            
            controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
            
            hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age )
            
            time.sleep( 2 )
Esempio n. 3
0
    def _CheckCancelTests(self):

        if not self._cancelled.is_set():

            should_cancel = False

            if self._cancel_on_shutdown and HydrusThreading.IsThreadShuttingDown(
            ):

                should_cancel = True

            if HG.client_controller.ShouldStopThisWork(self._maintenance_mode,
                                                       self._stop_time):

                should_cancel = True

            if should_cancel:

                self.Cancel()

        if not self._deleted.is_set():

            if self._deletion_time is not None:

                if HydrusData.TimeHasPassed(self._deletion_time):

                    self.Finish()

                    self._deleted.set()
Esempio n. 4
0
        def __enter__(self):

            # let all the readers know that we are bumping up to the front of the queue

            with self.parent.lock:

                self.parent.num_waiting_writers += 1

            while not HydrusThreading.IsThreadShuttingDown():

                with self.parent.lock:

                    # if nothing reading or writing atm, sieze the opportunity

                    if self.parent.num_readers == 0 and not self.parent.there_is_an_active_writer:

                        self.parent.there_is_an_active_writer = True

                        return

                # otherwise wait a bit

                self.parent.write_available_event.wait(1)

                self.parent.write_available_event.clear()
Esempio n. 5
0
def DAEMONCheckImportFolders():

    controller = HG.client_controller

    if not controller.options['pause_import_folders_sync']:

        HG.import_folders_running = True

        try:

            import_folder_names = controller.Read(
                'serialisable_names',
                HydrusSerialisable.SERIALISABLE_TYPE_IMPORT_FOLDER)

            for name in import_folder_names:

                import_folder = controller.Read(
                    'serialisable_named',
                    HydrusSerialisable.SERIALISABLE_TYPE_IMPORT_FOLDER, name)

                if controller.options[
                        'pause_import_folders_sync'] or HydrusThreading.IsThreadShuttingDown(
                        ):

                    break

                import_folder.DoWork()

        finally:

            HG.import_folders_running = False
        def do_it(hash_ids):

            for group_of_hash_ids in HydrusData.SplitListIntoChunks(
                    hash_ids, 256):

                if HydrusThreading.IsThreadShuttingDown():

                    return

                hash_ids_to_tags_managers = HG.client_controller.Read(
                    'force_refresh_tags_managers', group_of_hash_ids)

                with self._lock:

                    for (hash_id,
                         tags_manager) in hash_ids_to_tags_managers.items():

                        media_result = self._hash_ids_to_media_results.get(
                            hash_id, None)

                        if media_result is not None:

                            media_result.SetTagsManager(tags_manager)

            HG.client_controller.pub('refresh_all_tag_presentation_gui')
Esempio n. 7
0
def DAEMONCheckExportFolders():

    controller = HG.client_controller

    if not controller.new_options.GetBoolean('pause_export_folders_sync'):

        HG.export_folders_running = True

        try:

            export_folder_names = controller.Read(
                'serialisable_names',
                HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER)

            for name in export_folder_names:

                export_folder = controller.Read(
                    'serialisable_named',
                    HydrusSerialisable.SERIALISABLE_TYPE_EXPORT_FOLDER, name)

                if controller.new_options.GetBoolean(
                        'pause_export_folders_sync'
                ) or HydrusThreading.IsThreadShuttingDown():

                    break

                export_folder.DoWork()

        finally:

            HG.export_folders_running = False
Esempio n. 8
0
        def __enter__(self):

            while not HydrusThreading.IsThreadShuttingDown():

                with self.parent.lock:

                    # if there are no writers, we can start reading

                    if self.parent.num_waiting_writers == 0:

                        self.parent.num_readers += 1

                        return

                # otherwise wait a bit

                self.parent.read_available_event.wait(1)

                self.parent.read_available_event.clear()
Esempio n. 9
0
    def _ImportFiles(self, job_key):

        did_work = False

        time_to_save = HydrusData.GetNow() + 600

        num_files_imported = 0
        presentation_hashes = []
        presentation_hashes_fast = set()

        i = 0

        num_total = len(self._file_seed_cache)
        num_total_unknown = self._file_seed_cache.GetFileSeedCount(
            CC.STATUS_UNKNOWN)
        num_total_done = num_total - num_total_unknown

        while True:

            file_seed = self._file_seed_cache.GetNextFileSeed(
                CC.STATUS_UNKNOWN)

            p1 = HC.options['pause_import_folders_sync'] or self._paused
            p2 = HydrusThreading.IsThreadShuttingDown()
            p3 = job_key.IsCancelled()

            if file_seed is None or p1 or p2 or p3:

                break

            did_work = True

            if HydrusData.TimeHasPassed(time_to_save):

                HG.client_controller.WriteSynchronous('serialisable', self)

                time_to_save = HydrusData.GetNow() + 600

            gauge_num_done = num_total_done + num_files_imported + 1

            job_key.SetVariable(
                'popup_text_1',
                'importing file ' + HydrusData.ConvertValueRangeToPrettyString(
                    gauge_num_done, num_total))
            job_key.SetVariable('popup_gauge_1', (gauge_num_done, num_total))

            path = file_seed.file_seed_data

            file_seed.ImportPath(self._file_seed_cache,
                                 self._file_import_options,
                                 limited_mimes=self._mimes)

            if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES:

                if file_seed.HasHash():

                    hash = file_seed.GetHash()

                    if self._tag_import_options.HasAdditionalTags():

                        media_result = HG.client_controller.Read(
                            'media_result', hash)

                        downloaded_tags = []

                        service_keys_to_content_updates = self._tag_import_options.GetServiceKeysToContentUpdates(
                            file_seed.status, media_result,
                            downloaded_tags)  # additional tags

                        if len(service_keys_to_content_updates) > 0:

                            HG.client_controller.WriteSynchronous(
                                'content_updates',
                                service_keys_to_content_updates)

                    service_keys_to_tags = ClientTags.ServiceKeysToTags()

                    for (tag_service_key, filename_tagging_options) in list(
                            self._tag_service_keys_to_filename_tagging_options.
                            items()):

                        if not HG.client_controller.services_manager.ServiceExists(
                                tag_service_key):

                            continue

                        try:

                            tags = filename_tagging_options.GetTags(
                                tag_service_key, path)

                            if len(tags) > 0:

                                service_keys_to_tags[tag_service_key] = tags

                        except Exception as e:

                            HydrusData.ShowText(
                                'Trying to parse filename tags in the import folder "'
                                + self._name + '" threw an error!')

                            HydrusData.ShowException(e)

                    if len(service_keys_to_tags) > 0:

                        service_keys_to_content_updates = ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                            {hash}, service_keys_to_tags)

                        HG.client_controller.WriteSynchronous(
                            'content_updates', service_keys_to_content_updates)

                num_files_imported += 1

                if hash not in presentation_hashes_fast:

                    if file_seed.ShouldPresent(self._file_import_options):

                        presentation_hashes.append(hash)

                        presentation_hashes_fast.add(hash)

            elif file_seed.status == CC.STATUS_ERROR:

                HydrusData.Print(
                    'A file failed to import from import folder ' +
                    self._name + ':' + path)

            i += 1

            if i % 10 == 0:

                self._ActionPaths()

        if num_files_imported > 0:

            HydrusData.Print('Import folder ' + self._name + ' imported ' +
                             HydrusData.ToHumanInt(num_files_imported) +
                             ' files.')

            if len(presentation_hashes) > 0:

                ClientImporting.PublishPresentationHashes(
                    self._name, presentation_hashes,
                    self._publish_files_to_popup_button,
                    self._publish_files_to_page)

        self._ActionPaths()

        return did_work
Esempio n. 10
0
    def MainLoop(self):

        while not HydrusThreading.IsThreadShuttingDown():

            time.sleep(0.00001)

            with self._lock:

                do_wait = len(self._waterfall_queue) == 0 and len(
                    self._delayed_regeneration_queue) == 0

            if do_wait:

                self._waterfall_event.wait(1)

                self._waterfall_event.clear()

            start_time = HydrusData.GetNowPrecise()
            stop_time = start_time + 0.005  # a bit of a typical frame

            page_keys_to_rendered_medias = collections.defaultdict(list)

            num_done = 0
            max_at_once = 16

            while not HydrusData.TimeHasPassedPrecise(
                    stop_time) and num_done <= max_at_once:

                with self._lock:

                    if len(self._waterfall_queue) == 0:

                        break

                    result = self._waterfall_queue.pop()

                    if len(self._waterfall_queue) == 0:

                        self._waterfall_queue_empty_event.set()

                    self._waterfall_queue_quick.discard(result)

                (page_key, media) = result

                if media.GetDisplayMedia() is not None:

                    self.GetThumbnail(media)

                    page_keys_to_rendered_medias[page_key].append(media)

                num_done += 1

            if len(page_keys_to_rendered_medias) > 0:

                for (page_key,
                     rendered_medias) in page_keys_to_rendered_medias.items():

                    self._controller.pub('waterfall_thumbnails', page_key,
                                         rendered_medias)

                time.sleep(0.00001)

            # now we will do regen if appropriate

            with self._lock:

                # got more important work or no work to do
                if len(self._waterfall_queue) > 0 or len(
                        self._delayed_regeneration_queue
                ) == 0 or HG.client_controller.CurrentlyPubSubbing():

                    continue

                media_result = self._delayed_regeneration_queue.pop()

                self._delayed_regeneration_queue_quick.discard(media_result)

            if HG.file_report_mode:

                hash = media_result.GetHash()

                HydrusData.ShowText(
                    'Thumbnail {} now regenerating from source.'.format(
                        hash.hex()))

            try:

                self._controller.files_maintenance_manager.RunJobImmediately(
                    [media_result],
                    ClientFiles.REGENERATE_FILE_DATA_JOB_FORCE_THUMBNAIL,
                    pub_job_key=False)

            except HydrusExceptions.FileMissingException:

                pass

            except Exception as e:

                hash = media_result.GetHash()

                summary = 'The thumbnail for file {} was incorrect, but a later attempt to regenerate it or load the new file back failed.'.format(
                    hash.hex())

                self._HandleThumbnailException(e, summary)
Esempio n. 11
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         try:
             
             source_path = client_files_manager.GetFilePath( hash, mime )
             
         except HydrusExceptions.FileMissingException:
             
             raise Exception( 'A file to be exported, hash "{}", was missing! You should run file maintenance (under database->maintenance->files) to check the files for the export folder\'s search, and possibly all your files.' )
             
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.TryToGiveFileNicePermissionBits( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         local_file_service_keys = HG.client_controller.services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, ) )
         
         service_keys_to_deletee_hashes = collections.defaultdict( list )
         
         delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean( 'delete_lock_for_archived_files' )
         
         for media_result in media_results:
             
             if delete_lock_for_archived_files and not media_result.GetInbox():
                 
                 continue
                 
             
             hash = media_result.GetHash()
             
             deletee_service_keys = media_result.GetLocationsManager().GetCurrent().intersection( local_file_service_keys )
             
             for deletee_service_key in deletee_service_keys:
                 
                 service_keys_to_deletee_hashes[ deletee_service_key ].append( hash )
                 
             
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         for ( service_key, deletee_hashes ) in service_keys_to_deletee_hashes.items():
             
             chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
             
             for chunk_of_hashes in chunks_of_hashes:
                 
                 content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason )
                 
                 HG.client_controller.WriteSynchronous( 'content_updates', { service_key : [ content_update ] } )
Esempio n. 12
0
    def MainLoop(self):

        hashes_still_to_download_in_this_run = set()
        total_hashes_in_this_run = 0
        total_successful_hashes_in_this_run = 0

        while not (HydrusThreading.IsThreadShuttingDown()
                   or self._shutting_down or HG.view_shutdown):

            with self._lock:

                if len(self._pending_hashes) > 0:

                    if total_hashes_in_this_run == 0:

                        job_key = ClientThreading.JobKey(cancellable=True)

                        job_key.SetStatusTitle('downloading')

                        job_key.SetVariable('popup_text_1',
                                            'initialising downloader')

                        job_key_pub_job = self._controller.CallLater(
                            2.0, self._controller.pub, 'message', job_key)

                    num_before = len(hashes_still_to_download_in_this_run)

                    hashes_still_to_download_in_this_run.update(
                        self._pending_hashes)

                    num_after = len(hashes_still_to_download_in_this_run)

                    total_hashes_in_this_run += num_after - num_before

                    self._pending_hashes = set()

            if len(hashes_still_to_download_in_this_run) == 0:

                total_hashes_in_this_run = 0
                total_successful_hashes_in_this_run = 0

                self._new_files_event.wait(5)

                self._new_files_event.clear()

                continue

            if job_key.IsCancelled():

                hashes_still_to_download_in_this_run = set()

                continue

            hash = random.sample(hashes_still_to_download_in_this_run, 1)[0]

            hashes_still_to_download_in_this_run.discard(hash)

            total_done = total_hashes_in_this_run - len(
                hashes_still_to_download_in_this_run)

            job_key.SetVariable(
                'popup_text_1',
                'downloading files from remote services: {}'.format(
                    HydrusData.ConvertValueRangeToPrettyString(
                        total_done, total_hashes_in_this_run)))
            job_key.SetVariable('popup_gauge_1',
                                (total_done, total_hashes_in_this_run))

            try:

                errors_occured = []
                file_successful = False

                media_result = self._controller.Read('media_result', hash)

                service_keys = list(
                    media_result.GetLocationsManager().GetCurrent())

                random.shuffle(service_keys)

                if CC.COMBINED_LOCAL_FILE_SERVICE_KEY in service_keys:

                    total_successful_hashes_in_this_run += 1

                    continue

                for service_key in service_keys:

                    try:

                        service = self._controller.services_manager.GetService(
                            service_key)

                    except:

                        continue

                    try:

                        if service.GetServiceType() == HC.FILE_REPOSITORY:

                            file_repository = service

                            if file_repository.IsFunctional():

                                (os_file_handle,
                                 temp_path) = HydrusTemp.GetTempPath()

                                try:

                                    file_repository.Request(
                                        HC.GET,
                                        'file', {'hash': hash},
                                        temp_path=temp_path)

                                    exclude_deleted = False  # this is the important part here
                                    do_not_check_known_urls_before_importing = False
                                    do_not_check_hashes_before_importing = False
                                    allow_decompression_bombs = True
                                    min_size = None
                                    max_size = None
                                    max_gif_size = None
                                    min_resolution = None
                                    max_resolution = None
                                    automatic_archive = False
                                    associate_primary_urls = True
                                    associate_source_urls = True

                                    file_import_options = FileImportOptions.FileImportOptions(
                                    )

                                    file_import_options.SetPreImportOptions(
                                        exclude_deleted,
                                        do_not_check_known_urls_before_importing,
                                        do_not_check_hashes_before_importing,
                                        allow_decompression_bombs, min_size,
                                        max_size, max_gif_size, min_resolution,
                                        max_resolution)
                                    file_import_options.SetPostImportOptions(
                                        automatic_archive,
                                        associate_primary_urls,
                                        associate_source_urls)

                                    file_import_job = ClientImportFiles.FileImportJob(
                                        temp_path, file_import_options)

                                    file_import_job.DoWork()

                                    file_successful = True

                                    break

                                finally:

                                    HydrusTemp.CleanUpTempPath(
                                        os_file_handle, temp_path)

                        elif service.GetServiceType() == HC.IPFS:

                            multihashes = HG.client_controller.Read(
                                'service_filenames', service_key, {hash})

                            if len(multihashes) > 0:

                                multihash = multihashes[0]

                                service.ImportFile(multihash, silent=True)

                                file_successful = True

                                break

                    except Exception as e:

                        errors_occured.append(e)

                if file_successful:

                    total_successful_hashes_in_this_run += 1

                if len(errors_occured) > 0:

                    if not file_successful:

                        raise errors_occured[0]

            except Exception as e:

                HydrusData.ShowException(e)

                hashes_still_to_download_in_this_run = 0

            finally:

                if len(hashes_still_to_download_in_this_run) == 0:

                    job_key.DeleteVariable('popup_text_1')
                    job_key.DeleteVariable('popup_gauge_1')

                    if total_successful_hashes_in_this_run > 0:

                        job_key.SetVariable(
                            'popup_text_1',
                            HydrusData.ToHumanInt(
                                total_successful_hashes_in_this_run) +
                            ' files downloaded')

                    job_key_pub_job.Cancel()

                    job_key.Finish()

                    job_key.Delete(1)
Esempio n. 13
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         source_path = client_files_manager.GetFilePath( hash, mime )
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.MakeFileWritable( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         deletee_hashes = { media_result.GetHash() for media_result in media_results }
         
         chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason ) for chunk_of_hashes in chunks_of_hashes ]
         
         for content_update in content_updates:
             
             HG.client_controller.WriteSynchronous( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ content_update ] } )