def DAEMONMaintainTrash( controller ): if HC.options[ 'trash_max_size' ] is not None: max_size = HC.options[ 'trash_max_size' ] * 1048576 service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY ) while service_info[ HC.SERVICE_INFO_TOTAL_SIZE ] > max_size: if HydrusThreading.IsThreadShuttingDown(): return hashes = controller.Read( 'trash_hashes', limit = 10 ) if len( hashes ) == 0: return content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes ) service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] } controller.WaitUntilModelFree() controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates ) service_info = controller.Read( 'service_info', CC.TRASH_SERVICE_KEY ) time.sleep( 2 ) if HC.options[ 'trash_max_age' ] is not None: max_age = HC.options[ 'trash_max_age' ] * 3600 hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age ) while len( hashes ) > 0: if HydrusThreading.IsThreadShuttingDown(): return content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes ) service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY : [ content_update ] } controller.WaitUntilModelFree() controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates ) hashes = controller.Read( 'trash_hashes', limit = 10, minimum_age = max_age ) time.sleep( 2 )
def test_dict_to_content_updates( self ): hash = HydrusData.GenerateKey() hashes = { hash } local_key = CC.LOCAL_TAG_SERVICE_KEY remote_key = HydrusData.GenerateKey() service_keys_to_tags = { local_key : { 'a' } } content_updates = { local_key : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'a', hashes ) ) ] } self.assertEqual( ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( { hash }, service_keys_to_tags ), content_updates ) service_keys_to_tags = { remote_key : { 'c' } } content_updates = { remote_key : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( 'c', hashes ) ) ] } self.assertEqual( ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( { hash }, service_keys_to_tags ), content_updates ) service_keys_to_tags = { local_key : [ 'a', 'character:b' ], remote_key : [ 'c', 'series:d' ] } content_updates = {} content_updates[ local_key ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'a', hashes ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'character:b', hashes ) ) ] content_updates[ remote_key ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( 'c', hashes ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, ( 'series:d', hashes ) ) ] self.assertEqual( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, 'c' ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_PEND, 'c' ) ) self.assertEqual( ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( { hash }, service_keys_to_tags ), content_updates )
def EditIPFSNotes(self): for (multihash, num_files, total_size, note) in self._ipfs_shares.GetSelectedClientData(): with ClientGUIDialogs.DialogTextEntry( self, 'Set a note for ' + multihash + '.') as dlg: if dlg.ShowModal() == wx.ID_OK: hashes = self._controller.Read('service_directory', self._service_key, multihash) note = dlg.GetValue() content_update_row = (hashes, multihash, note) content_updates = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_DIRECTORIES, HC.CONTENT_UPDATE_ADD, content_update_row) ] HydrusGlobals.client_controller.WriteSynchronous( 'content_updates', {self._service_key: content_updates}) self._DisplayService()
def GetContentUpdates(self): service_keys_to_content_updates = {} hashes = { hash for hash in itertools.chain.from_iterable(( media.GetHashes() for media in self._media)) } for (service_key, control) in self._service_keys_to_controls.items(): original_rating_state = self._service_keys_to_original_ratings_states[ service_key] rating_state = control.GetRatingState() if rating_state != original_rating_state: if rating_state == ClientRatings.LIKE: rating = 1 elif rating_state == ClientRatings.DISLIKE: rating = 0 else: rating = None content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (rating, hashes)) service_keys_to_content_updates[service_key] = ( content_update, ) return service_keys_to_content_updates
def EventDeleteLocalDeleted(self, event): message = 'This will clear the client\'s memory of which files it has locally deleted, which affects \'exclude previously deleted files\' import tests.' message += os.linesep * 2 message += 'It will freeze the gui while it works.' message += os.linesep * 2 message += 'If you do not know what this does, click \'forget it\'.' with ClientGUIDialogs.DialogYesNo(self, message, yes_label='do it', no_label='forget it') as dlg_add: result = dlg_add.ShowModal() if result == wx.ID_YES: content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADVANCED, ('delete_deleted', None)) service_keys_to_content_updates = { self._service_key: [content_update] } HydrusGlobals.client_controller.Write( 'content_updates', service_keys_to_content_updates) self._DisplayService()
def _SetSelected(self, status_to_set): file_seeds = self._list_ctrl.GetData(only_selected=True) if status_to_set == CC.STATUS_UNKNOWN: deleted_and_clearable_file_seeds = [ file_seed for file_seed in file_seeds if file_seed.IsDeleted() and file_seed.HasHash() ] if len(deleted_and_clearable_file_seeds) > 0: message = 'One or more of these files did not import due to being previously deleted. They will likely fail again unless you erase those deletion records. Would you like to do this now?' with ClientGUIDialogs.DialogYesNo(self, message) as dlg: if dlg.ShowModal() == wx.ID_YES: deletee_hashes = { file_seed.GetHash() for file_seed in deleted_and_clearable_file_seeds } content_update_erase_record = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADVANCED, ('delete_deleted', deletee_hashes)) content_update_undelete_from_trash = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, deletee_hashes) service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [ content_update_erase_record, content_update_undelete_from_trash ] } HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates) for file_seed in file_seeds: file_seed.SetStatus(status_to_set) self._file_seed_cache.NotifyFileSeedsUpdated(file_seeds)
def do_it(): hashes = self._controller.Read('trash_hashes') content_update = HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, hashes) service_keys_to_content_updates = { CC.TRASH_SERVICE_KEY: [content_update] } self._controller.WriteSynchronous('content_updates', service_keys_to_content_updates) wx.CallAfter(self._DisplayService)
def ConvertServiceKeysToTagsToServiceKeysToContentUpdates( hashes, service_keys_to_tags): service_keys_to_content_updates = {} for (service_key, tags) in service_keys_to_tags.items(): if service_key == CC.LOCAL_TAG_SERVICE_KEY: action = HC.CONTENT_UPDATE_ADD else: action = HC.CONTENT_UPDATE_PEND content_updates = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, action, (tag, hashes)) for tag in tags ] service_keys_to_content_updates[service_key] = content_updates return service_keys_to_content_updates
def test_undo(self): hash_1 = HydrusData.GenerateKey() hash_2 = HydrusData.GenerateKey() hash_3 = HydrusData.GenerateKey() command_1 = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, {hash_1}) ] } command_2 = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_INBOX, {hash_2}) ] } command_3 = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, {hash_1, hash_3}) ] } command_1_inverted = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_INBOX, {hash_1}) ] } command_2_inverted = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, {hash_2}) ] } undo_manager = ClientCaches.UndoManager(HG.client_controller) # undo_manager.AddCommand('content_updates', command_1) self.assertEqual((u'undo archive 1 files', None), undo_manager.GetUndoRedoStrings()) undo_manager.AddCommand('content_updates', command_2) self.assertEqual((u'undo inbox 1 files', None), undo_manager.GetUndoRedoStrings()) undo_manager.Undo() self.assertEqual((u'undo archive 1 files', u'redo inbox 1 files'), undo_manager.GetUndoRedoStrings()) self.assertEqual(HG.test_controller.GetWrite('content_updates'), [((command_2_inverted, ), {})]) undo_manager.Redo() self.assertEqual(HG.test_controller.GetWrite('content_updates'), [((command_2, ), {})]) self.assertEqual((u'undo inbox 1 files', None), undo_manager.GetUndoRedoStrings()) undo_manager.Undo() self.assertEqual(HG.test_controller.GetWrite('content_updates'), [((command_2_inverted, ), {})]) undo_manager.Undo() self.assertEqual(HG.test_controller.GetWrite('content_updates'), [((command_1_inverted, ), {})]) self.assertEqual((None, u'redo archive 1 files'), undo_manager.GetUndoRedoStrings()) undo_manager.AddCommand('content_updates', command_3) self.assertEqual((u'undo archive 2 files', None), undo_manager.GetUndoRedoStrings())
def do_it(neighbouring_txt_tag_service_keys, delete_afterwards, quit_afterwards): for (index, (ordering_index, media)) in enumerate(to_do): try: wx.CallAfter( wx_update_label, HydrusData.ConvertValueRangeToPrettyString( index + 1, num_to_do)) hash = media.GetHash() mime = media.GetMime() path = self._GetPath(media) path_dir = os.path.dirname(path) HydrusPaths.MakeSureDirectoryExists(path_dir) if export_tag_txts: tags_manager = media.GetTagsManager() tags = set() siblings_manager = HG.controller.GetManager( 'tag_siblings') tag_censorship_manager = HG.client_controller.GetManager( 'tag_censorship') for service_key in neighbouring_txt_tag_service_keys: current_tags = tags_manager.GetCurrent(service_key) current_tags = siblings_manager.CollapseTags( service_key, current_tags) current_tags = tag_censorship_manager.FilterTags( service_key, current_tags) tags.update(current_tags) tags = list(tags) tags.sort() txt_path = path + '.txt' with open(txt_path, 'wb') as f: f.write( HydrusData.ToByteString(os.linesep.join(tags))) source_path = client_files_manager.GetFilePath( hash, mime, check_file_exists=False) HydrusPaths.MirrorFile(source_path, path) try: os.chmod(path, stat.S_IWRITE | stat.S_IREAD) except: pass except: wx.CallAfter( wx.MessageBox, 'Encountered a problem while attempting to export file with index ' + str(ordering_index + 1) + ':' + os.linesep * 2 + traceback.format_exc()) break if delete_afterwards: wx.CallAfter(wx_update_label, 'deleting') deletee_hashes = { media.GetHash() for (ordering_index, media) in to_do } chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64) content_updates = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes) for chunk_of_hashes in chunks_of_hashes ] for content_update in content_updates: HG.client_controller.WriteSynchronous( 'content_updates', {CC.LOCAL_FILE_SERVICE_KEY: [content_update]}) wx.CallAfter(wx_update_label, 'done!') time.sleep(1) wx.CallAfter(wx_update_label, 'export') wx.CallAfter(wx_done, quit_afterwards)
def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS(self): def test(obj, dupe_obj): self.assertEqual(obj.ToTuple(), dupe_obj.ToTuple()) duplicate_action_options_delete_and_move = ClientDuplicates.DuplicateActionOptions( [(CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE, ClientTags.TagFilter())], [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE), (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE)], True) duplicate_action_options_copy = ClientDuplicates.DuplicateActionOptions( [(CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY, ClientTags.TagFilter())], [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY), (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY)], False) duplicate_action_options_merge = ClientDuplicates.DuplicateActionOptions( [(CC.LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientTags.TagFilter())], [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE), (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE)], False) inbox = True size = 40960 mime = HC.IMAGE_JPEG width = 640 height = 480 duration = None num_frames = None num_words = None local_locations_manager = ClientMedia.LocationsManager( {CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(), set(), set(), inbox) trash_locations_manager = ClientMedia.LocationsManager( {CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(), set(), set(), inbox) deleted_locations_manager = ClientMedia.LocationsManager( set(), {CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(), set(), inbox) # duplicate to generate proper dicts one_tags_manager = ClientMedia.TagsManager({ CC.LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'one'} } }).Duplicate() two_tags_manager = ClientMedia.TagsManager({ CC.LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'two'} } }).Duplicate() substantial_tags_manager = ClientMedia.TagsManager({ CC.LOCAL_TAG_SERVICE_KEY: { HC.CONTENT_STATUS_CURRENT: {'test tag', 'series:namespaced test tag'} } }).Duplicate() empty_tags_manager = ClientMedia.TagsManager({}).Duplicate() one_ratings_manager = ClientRatings.RatingsManager({ TC.LOCAL_RATING_LIKE_SERVICE_KEY: 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY: 0.8 }) two_ratings_manager = ClientRatings.RatingsManager({ TC.LOCAL_RATING_LIKE_SERVICE_KEY: 0.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY: 0.6 }) substantial_ratings_manager = ClientRatings.RatingsManager({ TC.LOCAL_RATING_LIKE_SERVICE_KEY: 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY: 0.8 }) empty_ratings_manager = ClientRatings.RatingsManager({}) # local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMedia.FileInfoManager( local_hash_has_values, size, mime, width, height, duration, num_frames, num_words) media_result = ClientMedia.MediaResult(file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager) local_media_has_values = ClientMedia.MediaSingleton(media_result) # other_local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMedia.FileInfoManager( other_local_hash_has_values, size, mime, width, height, duration, num_frames, num_words) media_result = ClientMedia.MediaResult(file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager) other_local_media_has_values = ClientMedia.MediaSingleton(media_result) # local_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMedia.FileInfoManager( local_hash_empty, size, mime, width, height, duration, num_frames, num_words) media_result = ClientMedia.MediaResult(file_info_manager, empty_tags_manager, local_locations_manager, empty_ratings_manager) local_media_empty = ClientMedia.MediaSingleton(media_result) # trashed_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMedia.FileInfoManager( trashed_hash_empty, size, mime, width, height, duration, num_frames, num_words) media_result = ClientMedia.MediaResult(file_info_manager, empty_tags_manager, trash_locations_manager, empty_ratings_manager) trashed_media_empty = ClientMedia.MediaSingleton(media_result) # deleted_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMedia.FileInfoManager( deleted_hash_empty, size, mime, width, height, duration, num_frames, num_words) media_result = ClientMedia.MediaResult(file_info_manager, empty_tags_manager, deleted_locations_manager, empty_ratings_manager) deleted_media_empty = ClientMedia.MediaSingleton(media_result) # one_hash = HydrusData.GenerateKey() file_info_manager = ClientMedia.FileInfoManager( one_hash, size, mime, width, height, duration, num_frames, num_words) media_result = ClientMedia.MediaResult(file_info_manager, one_tags_manager, local_locations_manager, one_ratings_manager) one_media = ClientMedia.MediaSingleton(media_result) # two_hash = HydrusData.GenerateKey() file_info_manager = ClientMedia.FileInfoManager( two_hash, size, mime, width, height, duration, num_frames, num_words) media_result = ClientMedia.MediaResult(file_info_manager, two_tags_manager, local_locations_manager, two_ratings_manager) two_media = ClientMedia.MediaSingleton(media_result) # self._dump_and_load_and_test(duplicate_action_options_delete_and_move, test) self._dump_and_load_and_test(duplicate_action_options_copy, test) self._dump_and_load_and_test(duplicate_action_options_merge, test) # def assertSCUEqual(one, two): self.assertEqual( TC.ConvertServiceKeysToContentUpdatesToComparable(one), TC.ConvertServiceKeysToContentUpdatesToComparable(two)) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty) scu = {} scu[CC.LOCAL_FILE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {local_hash_empty}) ] assertSCUEqual(result[0], scu) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, trashed_media_empty) scu = {} scu[CC.TRASH_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {trashed_hash_empty}) ] assertSCUEqual(result[0], scu) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, deleted_media_empty) self.assertEqual(result, []) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, other_local_media_has_values) scu = {} scu[CC.LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('test tag', {other_local_hash_has_values})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('series:namespaced test tag', {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] assertSCUEqual(result[0], scu) scu = {} scu[CC.LOCAL_FILE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {other_local_hash_has_values}) ] assertSCUEqual(result[1], scu) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values) scu = {} scu[CC.LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('test tag', {other_local_hash_has_values})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ('series:namespaced test tag', {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})), HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})), HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (None, {other_local_hash_has_values})) ] assertSCUEqual(result[0], scu) scu = {} scu[CC.LOCAL_FILE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, {other_local_hash_has_values}) ] assertSCUEqual(result[1], scu) # # result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty) self.assertEqual(result, []) # result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values) scu = {} scu[CC.LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})) ] assertSCUEqual(result[0], scu) # # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty) scu = {} scu[CC.LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})) ] assertSCUEqual(result[0], scu) # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values) scu = {} scu[CC.LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('test tag', {local_hash_empty})), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('series:namespaced test tag', {local_hash_empty})) ] scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (1.0, {local_hash_empty})) ] scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, (0.8, {local_hash_empty})) ] assertSCUEqual(result[0], scu) # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( one_media, two_media) scu = {} scu[CC.LOCAL_TAG_SERVICE_KEY] = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('one', {two_hash})), HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ('two', {one_hash})) ] assertSCUEqual(result[0], scu)
def ProcessPairIntoContentUpdates( self, first_media, second_media ): service_keys_to_content_updates = collections.defaultdict( list ) first_hashes = first_media.GetHashes() second_hashes = second_media.GetHashes() # services_manager = HG.client_controller.services_manager for ( service_key, action, tag_filter ) in self._tag_service_actions: content_updates = [] try: service = services_manager.GetService( service_key ) except HydrusExceptions.DataMissing: continue service_type = service.GetServiceType() if service_type == HC.LOCAL_TAG: add_content_action = HC.CONTENT_UPDATE_ADD elif service_type == HC.TAG_REPOSITORY: add_content_action = HC.CONTENT_UPDATE_PEND first_current_tags = first_media.GetTagsManager().GetCurrent( service_key ) second_current_tags = second_media.GetTagsManager().GetCurrent( service_key ) first_current_tags = tag_filter.Filter( first_current_tags ) second_current_tags = tag_filter.Filter( second_current_tags ) if action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE: first_needs = second_current_tags.difference( first_current_tags ) second_needs = first_current_tags.difference( second_current_tags ) content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) ) content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, second_hashes ) ) for tag in second_needs ) ) elif action == HC.CONTENT_MERGE_ACTION_COPY: first_needs = second_current_tags.difference( first_current_tags ) content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) ) elif service_type == HC.LOCAL_TAG and action == HC.CONTENT_MERGE_ACTION_MOVE: first_needs = second_current_tags.difference( first_current_tags ) content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, add_content_action, ( tag, first_hashes ) ) for tag in first_needs ) ) content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( tag, second_hashes ) ) for tag in second_current_tags ) ) if len( content_updates ) > 0: service_keys_to_content_updates[ service_key ].extend( content_updates ) for ( service_key, action ) in self._rating_service_actions: content_updates = [] try: service = services_manager.GetService( service_key ) except HydrusExceptions.DataMissing: continue first_current_value = first_media.GetRatingsManager().GetRating( service_key ) second_current_value = second_media.GetRatingsManager().GetRating( service_key ) if action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE: if first_current_value == second_current_value: continue if first_current_value is None and second_current_value is not None: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( second_current_value, first_hashes ) ) ) elif first_current_value is not None and second_current_value is None: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( first_current_value, second_hashes ) ) ) elif action == HC.CONTENT_MERGE_ACTION_COPY: if first_current_value == second_current_value: continue if first_current_value is None and second_current_value is not None: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( second_current_value, first_hashes ) ) ) elif action == HC.CONTENT_MERGE_ACTION_MOVE: if second_current_value is not None: if first_current_value is None: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( second_current_value, first_hashes ) ) ) content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, second_hashes ) ) ) if len( content_updates ) > 0: service_keys_to_content_updates[ service_key ].extend( content_updates ) # if self._sync_archive: if first_media.HasInbox() and second_media.HasArchive(): content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, first_hashes ) service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ].append( content_update ) elif first_media.HasArchive() and second_media.HasInbox(): content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, second_hashes ) service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ].append( content_update ) # if self._sync_urls_action is not None: first_urls = set( first_media.GetLocationsManager().GetURLs() ) second_urls = set( second_media.GetLocationsManager().GetURLs() ) content_updates = [] if self._sync_urls_action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE: first_needs = second_urls.difference( first_urls ) second_needs = first_urls.difference( second_urls ) if len( first_needs ) > 0: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( first_needs, first_hashes ) ) ) if len( second_needs ) > 0: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( second_needs, second_hashes ) ) ) elif self._sync_urls_action == HC.CONTENT_MERGE_ACTION_COPY: first_needs = second_urls.difference( first_urls ) if len( first_needs ) > 0: content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD, ( first_needs, first_hashes ) ) ) if len( content_updates ) > 0: service_keys_to_content_updates[ CC.COMBINED_LOCAL_FILE_SERVICE_KEY ].extend( content_updates ) # deletee_media = [] if self._delete_second_file or self._delete_both_files: if self._delete_both_files: deletee_media.append( first_media ) deletee_media.append( second_media ) for media in deletee_media: current_locations = media.GetLocationsManager().GetCurrent() if CC.LOCAL_FILE_SERVICE_KEY in current_locations: deletee_service_key = CC.LOCAL_FILE_SERVICE_KEY elif CC.TRASH_SERVICE_KEY in current_locations: deletee_service_key = CC.TRASH_SERVICE_KEY else: deletee_service_key = None if deletee_service_key is not None: content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, media.GetHashes() ) service_keys_to_content_updates[ deletee_service_key ].append( content_update ) # return service_keys_to_content_updates
def DoWork(self): try: if HydrusData.TimeHasPassed(self._last_checked + self._period): folder_path = HydrusData.ToUnicode(self._path) if folder_path != '' and os.path.exists( folder_path) and os.path.isdir(folder_path): query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context) media_results = [] i = 0 base = 256 while i < len(query_hash_ids): if HC.options[ 'pause_export_folders_sync'] or HydrusThreading.IsThreadShuttingDown( ): return if i == 0: (last_i, i) = (0, base) else: (last_i, i) = (i, i + base) sub_query_hash_ids = query_hash_ids[last_i:i] more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids) media_results.extend(more_media_results) # terms = ParseExportPhrase(self._phrase) previous_filenames = set(os.listdir(folder_path)) sync_filenames = set() client_files_manager = HG.client_controller.client_files_manager num_copied = 0 for media_result in media_results: if HC.options[ 'pause_export_folders_sync'] or HydrusThreading.IsThreadShuttingDown( ): return hash = media_result.GetHash() mime = media_result.GetMime() size = media_result.GetSize() source_path = client_files_manager.GetFilePath( hash, mime) filename = GenerateExportFilename( folder_path, media_result, terms) dest_path = os.path.join(folder_path, filename) dest_path_dir = os.path.dirname(dest_path) HydrusPaths.MakeSureDirectoryExists(dest_path_dir) if filename not in sync_filenames: copied = HydrusPaths.MirrorFile( source_path, dest_path) if copied: num_copied += 1 try: os.chmod(dest_path, stat.S_IWRITE | stat.S_IREAD) except: pass sync_filenames.add(filename) if num_copied > 0: HydrusData.Print('Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt(num_copied) + ' files.') if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE: deletee_filenames = previous_filenames.difference( sync_filenames) for deletee_filename in deletee_filenames: deletee_path = os.path.join( folder_path, deletee_filename) ClientPaths.DeletePath(deletee_path) if len(deletee_filenames) > 0: HydrusData.Print( 'Export folder ' + self._name + ' deleted ' + HydrusData.ToHumanInt(len(deletee_filenames)) + ' files.') if self._delete_from_client_after_export: deletee_hashes = { media_result.GetHash() for media_result in media_results } chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64) content_updates = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes) for chunk_of_hashes in chunks_of_hashes ] for content_update in content_updates: HG.client_controller.WriteSynchronous( 'content_updates', {CC.LOCAL_FILE_SERVICE_KEY: [content_update]}) except Exception as e: HG.client_controller.options['pause_export_folders_sync'] = True HydrusData.ShowText( 'The export folder "' + self._name + '" encountered an error! The error will follow! All export folders have now been paused. Please check the folder\'s settings and maybe report to hydrus dev if the error is complicated!' ) HydrusData.ShowException(e) self._last_checked = HydrusData.GetNow() HG.client_controller.WriteSynchronous('serialisable', self)