Ejemplo n.º 1
0
def SetEnvTempDir(path):

    if os.path.exists(path) and not os.path.isdir(path):

        raise Exception(
            'The given temp directory, "{}", does not seem to be a directory!'.
            format(path))

    try:

        HydrusPaths.MakeSureDirectoryExists(path)

    except Exception as e:

        raise Exception('Could not create the temp dir: {}'.format(e))

    if not HydrusPaths.DirectoryIsWriteable(path):

        raise Exception(
            'The given temp directory, "{}", does not seem to be writeable-to!'
            .format(path))

    for tmp_name in ('TMPDIR', 'TEMP', 'TMP'):

        if tmp_name in os.environ:

            os.environ[tmp_name] = path

    tempfile.tempdir = path
Ejemplo n.º 2
0
def GetExportPath():
    
    portable_path = HG.client_controller.options[ 'export_path' ]
    
    if portable_path is None:
        
        desired_path = os.path.join( '~', 'hydrus_export' )
        
        path = os.path.expanduser( desired_path )
        
        if path == desired_path:
            
            # could not figure it out, probably crazy user setup atm
            
            return None
            
        
        HydrusPaths.MakeSureDirectoryExists( path )
        
    else:
        
        path = HydrusPaths.ConvertPortablePathToAbsPath( portable_path )
        
    
    return path
Ejemplo n.º 3
0
 def test_import_folders_daemon( self ):
     
     test_dir = HydrusTemp.GetTempDir()
     
     try:
         
         HG.test_controller.SetRead( 'hash_status', ClientImportFiles.FileImportStatus.STATICGetUnknownStatus() )
         
         HydrusPaths.MakeSureDirectoryExists( test_dir )
         
         hydrus_png_path = os.path.join( HC.STATIC_DIR, 'hydrus.png' )
         
         HydrusPaths.MirrorFile( hydrus_png_path, os.path.join( test_dir, '0' ) )
         HydrusPaths.MirrorFile( hydrus_png_path, os.path.join( test_dir, '1' ) ) # previously imported
         HydrusPaths.MirrorFile( hydrus_png_path, os.path.join( test_dir, '2' ) )
         
         with open( os.path.join( test_dir, '3' ), 'wb' ) as f: f.write( b'blarg' ) # broken
         with open( os.path.join( test_dir, '4' ), 'wb' ) as f: f.write( b'blarg' ) # previously failed
         
         #
         
         actions = {}
         
         actions[ CC.STATUS_SUCCESSFUL_AND_NEW ] = CC.IMPORT_FOLDER_DELETE
         actions[ CC.STATUS_SUCCESSFUL_BUT_REDUNDANT ] = CC.IMPORT_FOLDER_DELETE
         actions[ CC.STATUS_DELETED ] = CC.IMPORT_FOLDER_DELETE
         actions[ CC.STATUS_ERROR ] = CC.IMPORT_FOLDER_IGNORE
         
         import_folder = ClientImportLocal.ImportFolder( 'imp', path = test_dir, actions = actions )
         
         HG.test_controller.SetRead( 'serialisable_names', [ 'imp' ] )
         HG.test_controller.SetRead( 'serialisable_named', import_folder )
         
         HG.test_controller.ClearWrites( 'import_file' )
         HG.test_controller.ClearWrites( 'serialisable' )
         
         ClientDaemons.DAEMONCheckImportFolders()
         
         import_file = HG.test_controller.GetWrite( 'import_file' )
         
         self.assertEqual( len( import_file ), 3 )
         
         # I need to expand tests here with the new file system
         
         [ ( ( updated_import_folder, ), empty_dict ) ] = HG.test_controller.GetWrite( 'serialisable' )
         
         self.assertEqual( updated_import_folder, import_folder )
         
         self.assertTrue( not os.path.exists( os.path.join( test_dir, '0' ) ) )
         self.assertTrue( not os.path.exists( os.path.join( test_dir, '1' ) ) )
         self.assertTrue( not os.path.exists( os.path.join( test_dir, '2' ) ) )
         self.assertTrue( os.path.exists( os.path.join( test_dir, '3' ) ) )
         self.assertTrue( os.path.exists( os.path.join( test_dir, '4' ) ) )
         
     finally:
         
         shutil.rmtree( test_dir )
Ejemplo n.º 4
0
def GetExportPath():

    portable_path = HG.client_controller.options['export_path']

    if portable_path is None:

        path = os.path.join(os.path.expanduser('~'), 'hydrus_export')

        HydrusPaths.MakeSureDirectoryExists(path)

    else:

        path = HydrusPaths.ConvertPortablePathToAbsPath(portable_path)

    return path
Ejemplo n.º 5
0
    def _test_repo(self, service):

        service_key = service.GetServiceKey()

        # num_petitions

        num_petitions = [[
            HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_STATUS_PETITIONED, 23
        ], [HC.CONTENT_TYPE_TAG_PARENTS, HC.CONTENT_STATUS_PENDING, 0]]

        HG.test_controller.SetRead('num_petitions', num_petitions)

        response = service.Request(HC.GET, 'num_petitions')

        self.assertEqual(response['num_petitions'], num_petitions)

        # petition

        action = HC.CONTENT_UPDATE_PETITION
        petitioner_account = HydrusNetwork.Account.GenerateUnknownAccount()
        reason = 'it sucks'
        contents = [
            HydrusNetwork.Content(
                HC.CONTENT_TYPE_FILES,
                [HydrusData.GenerateKey() for i in range(10)])
        ]

        petition = HydrusNetwork.Petition(action, petitioner_account, reason,
                                          contents)

        HG.test_controller.SetRead('petition', petition)

        response = service.Request(
            HC.GET, 'petition', {
                'content_type': HC.CONTENT_TYPE_FILES,
                'status': HC.CONTENT_UPDATE_PETITION
            })

        self.assertEqual(response['petition'].GetSerialisableTuple(),
                         petition.GetSerialisableTuple())

        # definitions

        definitions_update = HydrusNetwork.DefinitionsUpdate()

        for i in range(100, 200):

            definitions_update.AddRow(
                (HC.DEFINITIONS_TYPE_TAGS, i, 'series:test ' + str(i)))
            definitions_update.AddRow((HC.DEFINITIONS_TYPE_HASHES, i + 500,
                                       HydrusData.GenerateKey()))

        definitions_update_network_bytes = definitions_update.DumpToNetworkBytes(
        )

        definitions_update_hash = hashlib.sha256(
            definitions_update_network_bytes).digest()

        path = ServerFiles.GetExpectedFilePath(definitions_update_hash)

        HydrusPaths.MakeSureDirectoryExists(path)

        with open(path, 'wb') as f:

            f.write(definitions_update_network_bytes)

        response = service.Request(HC.GET, 'update',
                                   {'update_hash': definitions_update_hash})

        try:
            os.remove(path)
        except:
            pass

        self.assertEqual(response, definitions_update_network_bytes)

        # content

        rows = [(random.randint(100, 1000),
                 [random.randint(100, 1000) for i in range(50)])
                for j in range(20)]

        content_update = HydrusNetwork.ContentUpdate()

        for row in rows:

            content_update.AddRow(
                (HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, row))

        content_update_network_bytes = content_update.DumpToNetworkBytes()

        content_update_hash = hashlib.sha256(
            content_update_network_bytes).digest()

        path = ServerFiles.GetExpectedFilePath(content_update_hash)

        with open(path, 'wb') as f:

            f.write(content_update_network_bytes)

        response = service.Request(HC.GET, 'update',
                                   {'update_hash': content_update_hash})

        try:
            os.remove(path)
        except:
            pass

        self.assertEqual(response, content_update_network_bytes)

        # metadata

        metadata = HydrusNetwork.Metadata()

        metadata.AppendUpdate([definitions_update_hash, content_update_hash],
                              HydrusData.GetNow() - 101000,
                              HydrusData.GetNow() - 1000,
                              HydrusData.GetNow() + 100000)

        service._metadata = metadata

        response = service.Request(HC.GET, 'metadata_slice', {'since': 0})

        self.assertEqual(response['metadata_slice'].GetSerialisableTuple(),
                         metadata.GetSerialisableTuple())

        # post content

        raise NotImplementedError()
        '''
Ejemplo n.º 6
0
    def _test_file_repo(self, service):

        # file

        path = ServerFiles.GetExpectedFilePath(self._file_hash)

        HydrusPaths.MakeSureDirectoryExists(os.path.dirname(path))

        with open(path, 'wb') as f:

            f.write(EXAMPLE_FILE)

        response = service.Request(HC.GET, 'file', {'hash': self._file_hash})

        self.assertEqual(response, EXAMPLE_FILE)

        #

        try:
            os.remove(path)
        except:
            pass

        path = os.path.join(HC.STATIC_DIR, 'hydrus.png')

        with open(path, 'rb') as f:

            file_bytes = f.read()

        HG.test_controller.ClearWrites('file')

        service.Request(HC.POST, 'file', {'file': file_bytes})

        written = HG.test_controller.GetWrite('file')

        [(args, kwargs)] = written

        (written_service_key, written_account, written_file_dict) = args

        hash = b'\xadm5\x99\xa6\xc4\x89\xa5u\xeb\x19\xc0&\xfa\xce\x97\xa9\xcdey\xe7G(\xb0\xce\x94\xa6\x01\xd22\xf3\xc3'

        self.assertEqual(written_file_dict['hash'], hash)
        self.assertEqual(written_file_dict['ip'], '127.0.0.1')
        self.assertEqual(written_file_dict['height'], 200)
        self.assertEqual(written_file_dict['width'], 200)
        self.assertEqual(written_file_dict['mime'], 2)
        self.assertEqual(written_file_dict['size'], 5270)

        # ip

        (ip, timestamp) = ('94.45.87.123', HydrusData.GetNow() - 100000)

        HG.test_controller.SetRead('ip', (ip, timestamp))

        response = service.Request(HC.GET, 'ip', {'hash': self._file_hash})

        self.assertEqual(response['ip'], ip)
        self.assertEqual(response['timestamp'], timestamp)

        # account from hash

        subject_content = HydrusNetwork.Content(
            content_type=HC.CONTENT_TYPE_FILES, content_data=hash)

        subject_account_identifier = HydrusNetwork.AccountIdentifier(
            content=subject_content)

        HG.test_controller.SetRead('account', self._account)

        response = service.Request(
            HC.GET, 'other_account',
            {'subject_identifier': subject_account_identifier})

        self.assertEqual(repr(response['account']), repr(self._account))

        # thumbnail

        path = ServerFiles.GetExpectedThumbnailPath(self._file_hash)

        HydrusPaths.MakeSureDirectoryExists(os.path.dirname(path))

        with open(path, 'wb') as f:

            f.write(EXAMPLE_THUMBNAIL)

        response = service.Request(HC.GET, 'thumbnail',
                                   {'hash': self._file_hash})

        self.assertEqual(response, EXAMPLE_THUMBNAIL)

        try:
            os.remove(path)
        except:
            pass
Ejemplo n.º 7
0
        db_dir = HC.DEFAULT_DB_DIR

        if not HydrusPaths.DirectoryIsWritable(
                db_dir) or HC.RUNNING_FROM_MACOS_APP:

            db_dir = HC.USERPATH_DB_DIR

    else:

        db_dir = result.db_dir

    db_dir = HydrusPaths.ConvertPortablePathToAbsPath(db_dir, HC.BASE_DIR)

    try:

        HydrusPaths.MakeSureDirectoryExists(db_dir)

    except:

        raise Exception(
            'Could not ensure db path "{}" exists! Check the location is correct and that you have permission to write to it!'
            .format(db_dir))

    if not os.path.isdir(db_dir):

        raise Exception(
            'The given db path "{}" is not a directory!'.format(db_dir))

    if not HydrusPaths.DirectoryIsWritable(db_dir):

        raise Exception(
Ejemplo n.º 8
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         try:
             
             source_path = client_files_manager.GetFilePath( hash, mime )
             
         except HydrusExceptions.FileMissingException:
             
             raise Exception( 'A file to be exported, hash "{}", was missing! You should run file maintenance (under database->maintenance->files) to check the files for the export folder\'s search, and possibly all your files.' )
             
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.TryToGiveFileNicePermissionBits( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         local_file_service_keys = HG.client_controller.services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, ) )
         
         service_keys_to_deletee_hashes = collections.defaultdict( list )
         
         delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean( 'delete_lock_for_archived_files' )
         
         for media_result in media_results:
             
             if delete_lock_for_archived_files and not media_result.GetInbox():
                 
                 continue
                 
             
             hash = media_result.GetHash()
             
             deletee_service_keys = media_result.GetLocationsManager().GetCurrent().intersection( local_file_service_keys )
             
             for deletee_service_key in deletee_service_keys:
                 
                 service_keys_to_deletee_hashes[ deletee_service_key ].append( hash )
                 
             
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         for ( service_key, deletee_hashes ) in service_keys_to_deletee_hashes.items():
             
             chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
             
             for chunk_of_hashes in chunks_of_hashes:
                 
                 content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason )
                 
                 HG.client_controller.WriteSynchronous( 'content_updates', { service_key : [ content_update ] } )
Ejemplo n.º 9
0
    def __init__(self, win, only_run):

        self.app = win
        self.win = win
        self.only_run = only_run

        self.db_dir = tempfile.mkdtemp()

        global DB_DIR

        DB_DIR = self.db_dir

        self._server_files_dir = os.path.join(self.db_dir, 'server_files')
        self._updates_dir = os.path.join(self.db_dir, 'test_updates')

        client_files_default = os.path.join(self.db_dir, 'client_files')

        HydrusPaths.MakeSureDirectoryExists(self._server_files_dir)
        HydrusPaths.MakeSureDirectoryExists(self._updates_dir)
        HydrusPaths.MakeSureDirectoryExists(client_files_default)

        HG.controller = self
        HG.client_controller = self
        HG.server_controller = self
        HG.test_controller = self

        self.db = self
        self.gui = self

        self.frame_splash_status = ClientGUISplash.FrameSplashStatus()

        self._call_to_threads = []

        self._pubsub = HydrusPubSub.HydrusPubSub(self, lambda o: True)

        self.new_options = ClientOptions.ClientOptions()

        HC.options = ClientDefaults.GetClientDefaultOptions()

        self.options = HC.options

        def show_text(text):
            pass

        HydrusData.ShowText = show_text

        self._reads = {}

        self._reads['local_booru_share_keys'] = []
        self._reads['messaging_sessions'] = []
        self._reads['options'] = ClientDefaults.GetClientDefaultOptions()
        self._reads['file_system_predicates'] = []
        self._reads['media_results'] = []

        self._param_reads = {}

        self.example_tag_repo_service_key = HydrusData.GenerateKey()

        services = []

        services.append(
            ClientServices.GenerateService(CC.LOCAL_BOORU_SERVICE_KEY,
                                           HC.LOCAL_BOORU, 'local booru'))
        services.append(
            ClientServices.GenerateService(CC.CLIENT_API_SERVICE_KEY,
                                           HC.CLIENT_API_SERVICE,
                                           'client api'))
        services.append(
            ClientServices.GenerateService(CC.COMBINED_LOCAL_FILE_SERVICE_KEY,
                                           HC.COMBINED_LOCAL_FILE,
                                           'all local files'))
        services.append(
            ClientServices.GenerateService(CC.LOCAL_FILE_SERVICE_KEY,
                                           HC.LOCAL_FILE_DOMAIN, 'my files'))
        services.append(
            ClientServices.GenerateService(CC.TRASH_SERVICE_KEY,
                                           HC.LOCAL_FILE_TRASH_DOMAIN,
                                           'trash'))
        services.append(
            ClientServices.GenerateService(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
                                           HC.LOCAL_TAG, 'my tags'))
        services.append(
            ClientServices.GenerateService(self.example_tag_repo_service_key,
                                           HC.TAG_REPOSITORY,
                                           'example tag repo'))
        services.append(
            ClientServices.GenerateService(CC.COMBINED_TAG_SERVICE_KEY,
                                           HC.COMBINED_TAG, 'all known tags'))
        services.append(
            ClientServices.GenerateService(
                LOCAL_RATING_LIKE_SERVICE_KEY, HC.LOCAL_RATING_LIKE,
                'example local rating like service'))
        services.append(
            ClientServices.GenerateService(
                LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.LOCAL_RATING_NUMERICAL,
                'example local rating numerical service'))

        self._reads['services'] = services

        client_files_locations = {}

        for prefix in HydrusData.IterateHexPrefixes():

            for c in ('f', 't'):

                client_files_locations[c + prefix] = client_files_default

        self._reads['client_files_locations'] = client_files_locations

        self._reads['sessions'] = []
        self._reads['tag_parents'] = {}
        self._reads['tag_siblings_all_ideals'] = {}
        self._reads['in_inbox'] = False

        self._writes = collections.defaultdict(list)

        self._managers = {}

        self.column_list_manager = ClientGUIListManager.ColumnListManager()

        self.services_manager = ClientServices.ServicesManager(self)
        self.client_files_manager = ClientFiles.ClientFilesManager(self)

        self.parsing_cache = ClientCaches.ParsingCache()

        bandwidth_manager = ClientNetworkingBandwidth.NetworkBandwidthManager()
        session_manager = ClientNetworkingSessions.NetworkSessionManager()
        domain_manager = ClientNetworkingDomain.NetworkDomainManager()

        ClientDefaults.SetDefaultDomainManagerData(domain_manager)

        login_manager = ClientNetworkingLogin.NetworkLoginManager()

        self.network_engine = ClientNetworking.NetworkEngine(
            self, bandwidth_manager, session_manager, domain_manager,
            login_manager)

        self.CallToThreadLongRunning(self.network_engine.MainLoop)

        self.tag_display_manager = ClientTagsHandling.TagDisplayManager()

        self._managers['undo'] = ClientManagers.UndoManager(self)
        self.server_session_manager = HydrusSessions.HydrusSessionManagerServer(
        )

        self.bitmap_manager = ClientManagers.BitmapManager(self)

        self.local_booru_manager = ClientCaches.LocalBooruCache(self)
        self.client_api_manager = ClientAPI.APIManager()

        self._cookies = {}

        self._job_scheduler = HydrusThreading.JobScheduler(self)

        self._job_scheduler.start()
Ejemplo n.º 10
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         source_path = client_files_manager.GetFilePath( hash, mime )
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.MakeFileWritable( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         deletee_hashes = { media_result.GetHash() for media_result in media_results }
         
         chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason ) for chunk_of_hashes in chunks_of_hashes ]
         
         for content_update in content_updates:
             
             HG.client_controller.WriteSynchronous( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ content_update ] } )
Ejemplo n.º 11
0
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            job_key = ClientThreading.JobKey(cancellable=True)

            job_key.SetStatusTitle('file export')

            HG.client_controller.pub('message', job_key)

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media, path)) in enumerate(to_do):

                if job_key.IsCancelled():

                    break

                try:

                    x_of_y = HydrusData.ConvertValueRangeToPrettyString(
                        index + 1, num_to_do)

                    job_key.SetVariable('popup_text_1',
                                        'Done {}'.format(x_of_y))
                    job_key.SetVariable('popup_gauge_1',
                                        (index + 1, num_to_do))

                    QP.CallAfter(qt_update_label, x_of_y)

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key, ClientTags.TAG_DISPLAY_ACTUAL)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWriteable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if not job_key.IsCancelled() and delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean(
                    'delete_lock_for_archived_files')

                if delete_lock_for_archived_files:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                        if not media.HasArchive()
                    }

                else:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                    }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            job_key.DeleteVariable('popup_gauge_1')
            job_key.SetVariable('popup_text_1', 'Done!')

            job_key.Finish()

            job_key.Delete(5)

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)
Ejemplo n.º 12
0
    def _DoExport(self, quit_afterwards=False):

        delete_afterwards = self._delete_files_after_export.isChecked()
        export_symlinks = self._export_symlinks.isChecked(
        ) and not delete_afterwards

        if quit_afterwards:

            message = 'Export as shown?'

            if delete_afterwards:

                message += os.linesep * 2
                message += 'THE FILES WILL BE DELETED FROM THE CLIENT AFTERWARDS'

            result = ClientGUIDialogsQuick.GetYesNo(self, message)

            if result != QW.QDialog.Accepted:

                self.parentWidget().close()

                return

        elif delete_afterwards:

            message = 'THE FILES WILL BE DELETED FROM THE CLIENT AFTERWARDS'

            result = ClientGUIDialogsQuick.GetYesNo(self, message)

            if result != QW.QDialog.Accepted:

                return

        self._RefreshPaths()

        export_tag_txts = self._export_tag_txts.isChecked()

        if self._export_tag_txts.isChecked():

            neighbouring_txt_tag_service_keys = self._neighbouring_txt_tag_service_keys

        else:

            neighbouring_txt_tag_service_keys = []

        directory = self._directory_picker.GetPath()

        HydrusPaths.MakeSureDirectoryExists(directory)

        pattern = self._pattern.text()

        HG.client_controller.new_options.SetString('export_phrase', pattern)

        try:

            terms = ClientExporting.ParseExportPhrase(pattern)

        except Exception as e:

            QW.QMessageBox.critical(self, 'Error', str(e))

            return

        client_files_manager = HG.client_controller.client_files_manager

        self._export.setEnabled(False)

        to_do = self._paths.GetData()

        to_do = [(ordering_index, media, self._GetPath(media))
                 for (ordering_index, media) in to_do]

        num_to_do = len(to_do)

        def qt_update_label(text):

            if not QP.isValid(self) or not QP.isValid(
                    self._export) or not self._export:

                return

            self._export.setText(text)

        def qt_done(quit_afterwards):

            if not QP.isValid(self) or not QP.isValid(
                    self._export) or not self._export:

                return

            self._export.setEnabled(True)

            if quit_afterwards:

                QP.CallAfter(self.parentWidget().close)

        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            job_key = ClientThreading.JobKey(cancellable=True)

            job_key.SetStatusTitle('file export')

            HG.client_controller.pub('message', job_key)

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media, path)) in enumerate(to_do):

                if job_key.IsCancelled():

                    break

                try:

                    x_of_y = HydrusData.ConvertValueRangeToPrettyString(
                        index + 1, num_to_do)

                    job_key.SetVariable('popup_text_1',
                                        'Done {}'.format(x_of_y))
                    job_key.SetVariable('popup_gauge_1',
                                        (index + 1, num_to_do))

                    QP.CallAfter(qt_update_label, x_of_y)

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key, ClientTags.TAG_DISPLAY_ACTUAL)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWriteable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if not job_key.IsCancelled() and delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean(
                    'delete_lock_for_archived_files')

                if delete_lock_for_archived_files:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                        if not media.HasArchive()
                    }

                else:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                    }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            job_key.DeleteVariable('popup_gauge_1')
            job_key.SetVariable('popup_text_1', 'Done!')

            job_key.Finish()

            job_key.Delete(5)

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)

        HG.client_controller.CallToThread(do_it, directory,
                                          neighbouring_txt_tag_service_keys,
                                          delete_afterwards, export_symlinks,
                                          quit_afterwards)
Ejemplo n.º 13
0
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media)) in enumerate(to_do):

                try:

                    QP.CallAfter(
                        qt_update_label,
                        HydrusData.ConvertValueRangeToPrettyString(
                            index + 1, num_to_do))

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = self._GetPath(media)

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key,
                                ClientTags.TAG_DISPLAY_SIBLINGS_AND_PARENTS)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWritable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                deletee_hashes = {
                    media.GetHash()
                    for (ordering_index, media) in to_do
                }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)