def THREADFetchUpdateProgress(self): (download_value, processing_value, range) = HydrusGlobals.client_controller.Read( 'repository_progress', self._service.GetServiceKey()) download_text = 'downloaded ' + HydrusData.ConvertValueRangeToPrettyString( download_value, range) processing_text = 'processed ' + HydrusData.ConvertValueRangeToPrettyString( processing_value, range) wx.CallAfter(self._UpdateFromThread, download_text, download_value, processing_text, processing_value, range)
def GetBandwidthStringsAndGaugeTuples(self, bandwidth_tracker, threshold=600): with self._lock: rows = [] rules_sorted = list(self._rules) def key((bandwidth_type, time_delta, max_allowed)): return time_delta rules_sorted.sort(key=key) for (bandwidth_type, time_delta, max_allowed) in rules_sorted: time_is_less_than_threshold = time_delta is not None and time_delta <= threshold if time_is_less_than_threshold or max_allowed == 0: continue usage = bandwidth_tracker.GetUsage(bandwidth_type, time_delta) s = 'used ' if bandwidth_type == HC.BANDWIDTH_TYPE_DATA: s += HydrusData.ConvertValueRangeToBytes( usage, max_allowed) elif bandwidth_type == HC.BANDWIDTH_TYPE_REQUESTS: s += HydrusData.ConvertValueRangeToPrettyString( usage, max_allowed) + ' requests' if time_delta is None: s += ' this month' else: s += ' in the past ' + HydrusData.TimeDeltaToPrettyTimeDelta( time_delta) rows.append((s, (usage, max_allowed))) return rows
def _Update(self): if self._seed_cache is None: self._import_summary_st.SetLabelText('') self._progress_st.SetLabelText('') self._progress_gauge.SetRange(1) self._progress_gauge.SetValue(0) if self._seed_cache_button.IsEnabled(): self._seed_cache_button.Disable() else: (import_summary, (num_done, num_to_do)) = self._seed_cache.GetStatus() self._import_summary_st.SetLabelText(import_summary) if num_to_do == 0: self._progress_st.SetLabelText('') else: self._progress_st.SetLabelText( HydrusData.ConvertValueRangeToPrettyString( num_done, num_to_do)) self._progress_gauge.SetRange(num_to_do) self._progress_gauge.SetValue(num_done) if not self._seed_cache_button.IsEnabled(): self._seed_cache_button.Enable()
def do_it(neighbouring_txt_tag_service_keys, delete_afterwards, quit_afterwards): for (index, (ordering_index, media)) in enumerate(to_do): try: wx.CallAfter( wx_update_label, HydrusData.ConvertValueRangeToPrettyString( index + 1, num_to_do)) hash = media.GetHash() mime = media.GetMime() path = self._GetPath(media) path_dir = os.path.dirname(path) HydrusPaths.MakeSureDirectoryExists(path_dir) if export_tag_txts: tags_manager = media.GetTagsManager() tags = set() siblings_manager = HG.controller.GetManager( 'tag_siblings') tag_censorship_manager = HG.client_controller.GetManager( 'tag_censorship') for service_key in neighbouring_txt_tag_service_keys: current_tags = tags_manager.GetCurrent(service_key) current_tags = siblings_manager.CollapseTags( service_key, current_tags) current_tags = tag_censorship_manager.FilterTags( service_key, current_tags) tags.update(current_tags) tags = list(tags) tags.sort() txt_path = path + '.txt' with open(txt_path, 'wb') as f: f.write( HydrusData.ToByteString(os.linesep.join(tags))) source_path = client_files_manager.GetFilePath( hash, mime, check_file_exists=False) HydrusPaths.MirrorFile(source_path, path) try: os.chmod(path, stat.S_IWRITE | stat.S_IREAD) except: pass except: wx.CallAfter( wx.MessageBox, 'Encountered a problem while attempting to export file with index ' + str(ordering_index + 1) + ':' + os.linesep * 2 + traceback.format_exc()) break if delete_afterwards: wx.CallAfter(wx_update_label, 'deleting') deletee_hashes = { media.GetHash() for (ordering_index, media) in to_do } chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64) content_updates = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes) for chunk_of_hashes in chunks_of_hashes ] for content_update in content_updates: HG.client_controller.WriteSynchronous( 'content_updates', {CC.LOCAL_FILE_SERVICE_KEY: [content_update]}) wx.CallAfter(wx_update_label, 'done!') time.sleep(1) wx.CallAfter(wx_update_label, 'export') wx.CallAfter(wx_done, quit_afterwards)
def _ImportFiles( self, job_key ): did_work = False time_to_save = HydrusData.GetNow() + 600 num_files_imported = 0 presentation_hashes = [] presentation_hashes_fast = set() i = 0 num_total = len( self._file_seed_cache ) num_total_unknown = self._file_seed_cache.GetFileSeedCount( CC.STATUS_UNKNOWN ) num_total_done = num_total - num_total_unknown while True: file_seed = self._file_seed_cache.GetNextFileSeed( CC.STATUS_UNKNOWN ) p1 = HC.options[ 'pause_import_folders_sync' ] or self._paused p2 = HydrusThreading.IsThreadShuttingDown() p3 = job_key.IsCancelled() if file_seed is None or p1 or p2 or p3: break did_work = True if HydrusData.TimeHasPassed( time_to_save ): HG.client_controller.WriteSynchronous( 'serialisable', self ) time_to_save = HydrusData.GetNow() + 600 gauge_num_done = num_total_done + num_files_imported + 1 job_key.SetVariable( 'popup_text_1', 'importing file ' + HydrusData.ConvertValueRangeToPrettyString( gauge_num_done, num_total ) ) job_key.SetVariable( 'popup_gauge_1', ( gauge_num_done, num_total ) ) path = file_seed.file_seed_data file_seed.ImportPath( self._file_seed_cache, self._file_import_options, limited_mimes = self._mimes ) if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES: if file_seed.HasHash(): hash = file_seed.GetHash() if self._tag_import_options.HasAdditionalTags(): in_inbox = HG.client_controller.Read( 'in_inbox', hash ) downloaded_tags = [] service_keys_to_content_updates = self._tag_import_options.GetServiceKeysToContentUpdates( file_seed.status, in_inbox, hash, downloaded_tags ) # additional tags if len( service_keys_to_content_updates ) > 0: HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates ) service_keys_to_tags = {} for ( tag_service_key, filename_tagging_options ) in self._tag_service_keys_to_filename_tagging_options.items(): if not HG.client_controller.services_manager.ServiceExists( tag_service_key ): continue try: tags = filename_tagging_options.GetTags( tag_service_key, path ) if len( tags ) > 0: service_keys_to_tags[ tag_service_key ] = tags except Exception as e: HydrusData.ShowText( 'Trying to parse filename tags in the import folder "' + self._name + '" threw an error!' ) HydrusData.ShowException( e ) if len( service_keys_to_tags ) > 0: service_keys_to_content_updates = ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates( { hash }, service_keys_to_tags ) HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates ) num_files_imported += 1 if hash not in presentation_hashes_fast: if file_seed.ShouldPresent( self._file_import_options ): presentation_hashes.append( hash ) presentation_hashes_fast.add( hash ) elif file_seed.status == CC.STATUS_ERROR: HydrusData.Print( 'A file failed to import from import folder ' + self._name + ':' + path ) i += 1 if i % 10 == 0: self._ActionPaths() if num_files_imported > 0: HydrusData.Print( 'Import folder ' + self._name + ' imported ' + HydrusData.ToHumanInt( num_files_imported ) + ' files.' ) if len( presentation_hashes ) > 0: ClientImporting.PublishPresentationHashes( self._name, presentation_hashes, self._publish_files_to_popup_button, self._publish_files_to_page ) self._ActionPaths() return did_work
def GetUnicode(self, with_count=True, sibling_service_key=None, render_for_user=False): count_text = u'' if with_count: if self._min_current_count > 0: number_text = HydrusData.ConvertIntToPrettyString( self._min_current_count) if self._max_current_count is not None: number_text += u'-' + HydrusData.ConvertIntToPrettyString( self._max_current_count) count_text += u' (' + number_text + u')' if self._min_pending_count > 0: number_text = HydrusData.ConvertIntToPrettyString( self._min_pending_count) if self._max_pending_count is not None: number_text += u'-' + HydrusData.ConvertIntToPrettyString( self._max_pending_count) count_text += u' (+' + number_text + u')' if self._predicate_type in HC.SYSTEM_PREDICATES: if self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_EVERYTHING: base = u'everything' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_INBOX: base = u'inbox' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_ARCHIVE: base = u'archive' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_UNTAGGED: base = u'untagged' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_LOCAL: base = u'local' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_NOT_LOCAL: base = u'not local' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_DIMENSIONS: base = u'dimensions' elif self._predicate_type in (HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS, HC.PREDICATE_TYPE_SYSTEM_WIDTH, HC.PREDICATE_TYPE_SYSTEM_HEIGHT, HC.PREDICATE_TYPE_SYSTEM_NUM_WORDS): if self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_NUM_TAGS: base = u'number of tags' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_WIDTH: base = u'width' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_HEIGHT: base = u'height' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_NUM_WORDS: base = u'number of words' if self._value is not None: (operator, value) = self._value base += u' ' + operator + u' ' + HydrusData.ConvertIntToPrettyString( value) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_DURATION: base = u'duration' if self._value is not None: (operator, value) = self._value base += u' ' + operator + u' ' + HydrusData.ConvertMillisecondsToPrettyTime( value) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_RATIO: base = u'ratio' if self._value is not None: (operator, ratio_width, ratio_height) = self._value base += u' ' + operator + u' ' + str( ratio_width) + u':' + str(ratio_height) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_SIZE: base = u'size' if self._value is not None: (operator, size, unit) = self._value base += u' ' + operator + u' ' + str( size) + HydrusData.ConvertIntToUnit(unit) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_LIMIT: base = u'limit' if self._value is not None: value = self._value base += u' is ' + HydrusData.ConvertIntToPrettyString( value) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_AGE: base = u'age' if self._value is not None: (operator, years, months, days, hours) = self._value base += u' ' + operator + u' ' + str(years) + u'y' + str( months) + u'm' + str(days) + u'd' + str(hours) + u'h' elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_NUM_PIXELS: base = u'num_pixels' if self._value is not None: (operator, num_pixels, unit) = self._value base += u' ' + operator + u' ' + str( num_pixels) + ' ' + HydrusData.ConvertIntToPixels(unit) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_HASH: base = u'hash' if self._value is not None: (hash, hash_type) = self._value base = hash_type + ' hash is ' + hash.encode('hex') elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_MIME: base = u'mime' if self._value is not None: mimes = self._value if set(mimes) == set(HC.SEARCHABLE_MIMES): mime_text = 'anything' elif set(mimes) == set(HC.SEARCHABLE_MIMES).intersection( set(HC.APPLICATIONS)): mime_text = 'application' elif set(mimes) == set(HC.SEARCHABLE_MIMES).intersection( set(HC.AUDIO)): mime_text = 'audio' elif set(mimes) == set(HC.SEARCHABLE_MIMES).intersection( set(HC.IMAGES)): mime_text = 'image' elif set(mimes) == set(HC.SEARCHABLE_MIMES).intersection( set(HC.VIDEO)): mime_text = 'video' else: mime_text = ', '.join( [HC.mime_string_lookup[mime] for mime in mimes]) base += u' is ' + mime_text elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_RATING: base = u'rating' if self._value is not None: (operator, value, service_key) = self._value service = HG.client_controller.services_manager.GetService( service_key) service_type = service.GetServiceType() pretty_value = HydrusData.ToUnicode(value) if service_type == HC.LOCAL_RATING_LIKE: if value == 0: pretty_value = 'dislike' elif value == 1: pretty_value = 'like' elif service_type == HC.LOCAL_RATING_NUMERICAL: if isinstance(value, float): allow_zero = service.AllowZero() num_stars = service.GetNumStars() if allow_zero: star_range = num_stars else: star_range = num_stars - 1 pretty_x = int(round(value * star_range)) pretty_y = num_stars if not allow_zero: pretty_x += 1 pretty_value = HydrusData.ConvertValueRangeToPrettyString( pretty_x, pretty_y) base += u' for ' + service.GetName( ) + u' ' + operator + u' ' + pretty_value elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_SIMILAR_TO: base = u'similar to' if self._value is not None: (hash, max_hamming) = self._value base += u' ' + hash.encode( 'hex') + u' using max hamming of ' + str(max_hamming) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_FILE_SERVICE: if self._value is None: base = 'file service' else: (operator, current_or_pending, service_key) = self._value if operator == True: base = u'is' else: base = u'is not' if current_or_pending == HC.CONTENT_STATUS_PENDING: base += u' pending to ' else: base += u' currently in ' service = HG.client_controller.services_manager.GetService( service_key) base += service.GetName() elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_TAG_AS_NUMBER: if self._value is None: base = 'tag as number' else: (namespace, operator, num) = self._value if namespace == '': n_text = 'tag' else: n_text = namespace if operator == u'\u2248': o_text = ' about ' elif operator == '<': o_text = ' less than ' elif operator == '>': o_text = ' more than ' base = n_text + o_text + HydrusData.ConvertIntToPrettyString( num) elif self._predicate_type == HC.PREDICATE_TYPE_SYSTEM_DUPLICATE_RELATIONSHIPS: base = 'num duplicate relationships' if self._value is not None: (operator, num_relationships, dupe_type) = self._value if operator == u'\u2248': o_text = ' about ' elif operator == '<': o_text = ' less than ' elif operator == '>': o_text = ' more than ' elif operator == '=': o_text = ' ' base += u' - has' + o_text + HydrusData.ConvertIntToPrettyString( num_relationships ) + u' ' + HC.duplicate_type_string_lookup[dupe_type] base = HydrusTags.CombineTag('system', base) base = ClientTags.RenderTag(base, render_for_user) base += count_text elif self._predicate_type == HC.PREDICATE_TYPE_TAG: tag = self._value if not self._inclusive: base = u'-' else: base = u'' base += ClientTags.RenderTag(tag, render_for_user) base += count_text if sibling_service_key is not None: siblings_manager = HG.client_controller.GetManager( 'tag_siblings') sibling = siblings_manager.GetSibling(sibling_service_key, tag) if sibling is not None: sibling = ClientTags.RenderTag(sibling, render_for_user) base += u' (will display as ' + sibling + ')' elif self._predicate_type == HC.PREDICATE_TYPE_PARENT: base = ' ' tag = self._value base += ClientTags.RenderTag(tag, render_for_user) base += count_text elif self._predicate_type == HC.PREDICATE_TYPE_NAMESPACE: namespace = self._value if not self._inclusive: base = u'-' else: base = u'' anything_tag = HydrusTags.CombineTag(namespace, '*anything*') anything_tag = ClientTags.RenderTag(anything_tag, render_for_user) base += anything_tag elif self._predicate_type == HC.PREDICATE_TYPE_WILDCARD: wildcard = self._value if not self._inclusive: base = u'-' else: base = u'' base += wildcard return base
def do_it(): job_key = ClientThreading.JobKey(pausable=True, cancellable=True) job_key.SetVariable('popup_title', self._service.GetName() + ': immediate sync') job_key.SetVariable('popup_text_1', 'downloading') self._controller.pub('message', job_key) content_update_package = self._service.Request( HC.GET, 'immediate_content_update_package') c_u_p_num_rows = content_update_package.GetNumRows() c_u_p_total_weight_processed = 0 update_speed_string = '' content_update_index_string = 'content row ' + HydrusData.ConvertValueRangeToPrettyString( c_u_p_total_weight_processed, c_u_p_num_rows) + ': ' job_key.SetVariable( 'popup_text_1', content_update_index_string + 'committing' + update_speed_string) job_key.SetVariable('popup_gauge_1', (c_u_p_total_weight_processed, c_u_p_num_rows)) for (content_updates, weight ) in content_update_package.IterateContentUpdateChunks(): (i_paused, should_quit) = job_key.WaitIfNeeded() if should_quit: job_key.Delete() return content_update_index_string = 'content row ' + HydrusData.ConvertValueRangeToPrettyString( c_u_p_total_weight_processed, c_u_p_num_rows) + ': ' job_key.SetVariable( 'popup_text_1', content_update_index_string + 'committing' + update_speed_string) job_key.SetVariable( 'popup_gauge_1', (c_u_p_total_weight_processed, c_u_p_num_rows)) precise_timestamp = HydrusData.GetNowPrecise() self._controller.WriteSynchronous( 'content_updates', {self._service_key: content_updates}) it_took = HydrusData.GetNowPrecise() - precise_timestamp rows_s = weight / it_took update_speed_string = ' at ' + HydrusData.ConvertIntToPrettyString( rows_s) + ' rows/s' c_u_p_total_weight_processed += weight job_key.DeleteVariable('popup_gauge_1') self._service.SyncThumbnails(job_key) job_key.SetVariable( 'popup_text_1', 'done! ' + HydrusData.ConvertIntToPrettyString(c_u_p_num_rows) + ' rows added.') job_key.Finish()
def THREADDownloadURLs(job_key, urls, title): job_key.SetVariable('popup_title', title) job_key.SetVariable('popup_text_1', 'initialising') num_successful = 0 num_redundant = 0 num_deleted = 0 num_failed = 0 presentation_hashes = [] presentation_hashes_fast = set() file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions( 'loud') def network_job_factory(*args, **kwargs): network_job = ClientNetworkingJobs.NetworkJob(*args, **kwargs) network_job.OverrideBandwidth() return network_job network_job_presentation_context_factory = GenerateMultiplePopupNetworkJobPresentationContextFactory( job_key) for (i, url) in enumerate(urls): (i_paused, should_quit) = job_key.WaitIfNeeded() if should_quit: break job_key.SetVariable( 'popup_text_1', HydrusData.ConvertValueRangeToPrettyString(i + 1, len(urls))) job_key.SetVariable('popup_gauge_1', (i + 1, len(urls))) file_seed = ClientImportFileSeeds.FileSeed( ClientImportFileSeeds.FILE_SEED_TYPE_URL, url) try: file_seed.DownloadAndImportRawFile( url, file_import_options, network_job_factory, network_job_presentation_context_factory) status = file_seed.status if status in CC.SUCCESSFUL_IMPORT_STATES: if status == CC.STATUS_SUCCESSFUL_AND_NEW: num_successful += 1 elif status == CC.STATUS_SUCCESSFUL_BUT_REDUNDANT: num_redundant += 1 if file_seed.HasHash(): hash = file_seed.GetHash() if hash not in presentation_hashes_fast: presentation_hashes.append(hash) presentation_hashes_fast.add(hash) elif status == CC.STATUS_DELETED: num_deleted += 1 except Exception as e: num_failed += 1 HydrusData.Print(url + ' failed to import!') HydrusData.PrintException(e) job_key.DeleteVariable('popup_network_job') text_components = [] if num_successful > 0: text_components.append( HydrusData.ToHumanInt(num_successful) + ' successful') if num_redundant > 0: text_components.append( HydrusData.ToHumanInt(num_redundant) + ' already in db') if num_deleted > 0: text_components.append(HydrusData.ToHumanInt(num_deleted) + ' deleted') if num_failed > 0: text_components.append( HydrusData.ToHumanInt(num_failed) + ' failed (errors written to log)') job_key.SetVariable('popup_text_1', ', '.join(text_components)) if len(presentation_hashes) > 0: job_key.SetVariable('popup_files', (presentation_hashes, 'downloads')) job_key.DeleteVariable('popup_gauge_1') job_key.Finish()