Ejemplo n.º 1
0
    def GetValue(self):

        intended_files_per_check = self._intended_files_per_check.GetValue()
        never_faster_than = self._never_faster_than.GetValue()
        never_slower_than = self._never_slower_than.GetValue()
        death_file_velocity = self._death_file_velocity.GetValue()

        return ClientData.CheckerOptions(intended_files_per_check,
                                         never_faster_than, never_slower_than,
                                         death_file_velocity)
Ejemplo n.º 2
0
 def test_SERIALISABLE_TYPE_SUBSCRIPTION( self ):
     
     def test( obj, dupe_obj ):
         
         self.assertEqual( obj.GetName(), dupe_obj.GetName() )
         
         self.assertEqual( obj._gallery_identifier, dupe_obj._gallery_identifier )
         self.assertEqual( obj._gallery_stream_identifiers, dupe_obj._gallery_stream_identifiers )
         self.assertEqual( len( obj._queries ), len( dupe_obj._queries ) )
         self.assertEqual( obj._get_tags_if_url_known_and_file_redundant, dupe_obj._get_tags_if_url_known_and_file_redundant )
         self.assertEqual( obj._initial_file_limit, dupe_obj._initial_file_limit )
         self.assertEqual( obj._periodic_file_limit, dupe_obj._periodic_file_limit )
         self.assertEqual( obj._paused, dupe_obj._paused )
         
         self.assertEqual( obj._file_import_options.GetSerialisableTuple(), dupe_obj._file_import_options.GetSerialisableTuple() )
         self.assertEqual( obj._tag_import_options.GetSerialisableTuple(), dupe_obj._tag_import_options.GetSerialisableTuple() )
         
         self.assertEqual( obj._no_work_until, dupe_obj._no_work_until )
         
     
     sub = ClientImporting.Subscription( 'test sub' )
     
     self._dump_and_load_and_test( sub, test )
     
     gallery_identifier = ClientDownloading.GalleryIdentifier( HC.SITE_TYPE_BOORU, 'gelbooru' )
     gallery_stream_identifiers = ClientDownloading.GetGalleryStreamIdentifiers( gallery_identifier )
     queries = [ ClientImporting.SubscriptionQuery( 'test query' ), ClientImporting.SubscriptionQuery( 'test query 2' ) ]
     checker_options = ClientData.CheckerOptions()
     get_tags_if_url_known_and_file_redundant = True
     initial_file_limit = 100
     periodic_file_limit = 50
     paused = False
     
     file_import_options = ClientImporting.FileImportOptions( automatic_archive = False, exclude_deleted = True, min_size = 8 * 1024, min_resolution = [ 25, 25 ] )
     tag_import_options = ClientImporting.TagImportOptions( service_keys_to_namespaces = { HydrusData.GenerateKey() : { 'series', '' } }, service_keys_to_explicit_tags = { HydrusData.GenerateKey() : { 'test explicit tag', 'and another' } } )
     
     no_work_until = HydrusData.GetNow() - 86400 * 20
     
     sub.SetTuple( gallery_identifier, gallery_stream_identifiers, queries, checker_options, get_tags_if_url_known_and_file_redundant, initial_file_limit, periodic_file_limit, paused, file_import_options, tag_import_options, no_work_until )
     
     self.assertEqual( sub.GetGalleryIdentifier(), gallery_identifier )
     self.assertEqual( sub.GetTagImportOptions(), tag_import_options )
     self.assertEqual( sub.GetQueries(), queries )
     
     self.assertEqual( sub._paused, False )
     sub.PauseResume()
     self.assertEqual( sub._paused, True )
     sub.PauseResume()
     self.assertEqual( sub._paused, False )
     
     self._dump_and_load_and_test( sub, test )
Ejemplo n.º 3
0
 def test_checker_options( self ):
     
     regular_checker_options = ClientData.CheckerOptions( intended_files_per_check = 5, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 86400 ) )
     fast_checker_options = ClientData.CheckerOptions( intended_files_per_check = 2, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 86400 ) )
     slow_checker_options = ClientData.CheckerOptions( intended_files_per_check = 10, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 86400 ) )
     callous_checker_options = ClientData.CheckerOptions( intended_files_per_check = 5, never_faster_than = 30, never_slower_than = 86400, death_file_velocity = ( 1, 60 ) )
     
     empty_seed_cache = ClientImporting.SeedCache()
     
     seed_cache = ClientImporting.SeedCache()
     
     last_check_time = 10000000
     
     one_day_before = last_check_time - 86400
     
     for i in range( 50 ):
         
         seed = os.urandom( 16 ).encode( 'hex' )
         
         seed_cache.AddSeeds( ( seed, ) )
         
         seed_cache.UpdateSeedSourceTime( seed, one_day_before - 10 )
         
     
     for i in range( 50 ):
         
         seed = os.urandom( 16 ).encode( 'hex' )
         
         seed_cache.AddSeeds( ( seed, ) )
         
         seed_cache.UpdateSeedSourceTime( seed, last_check_time - 600 )
         
     
     bare_seed_cache = ClientImporting.SeedCache()
     
     bare_seed_cache.AddSeeds( ( 'early', ) )
     bare_seed_cache.AddSeeds( ( 'in_time_delta', ) )
     
     bare_seed_cache.UpdateSeedSourceTime( 'early', one_day_before - 10 )
     bare_seed_cache.UpdateSeedSourceTime( 'in_time_delta', one_day_before + 10 )
     
     busy_seed_cache = ClientImporting.SeedCache()
     
     busy_seed_cache.AddSeeds( ( 'early', ) )
     
     busy_seed_cache.UpdateSeedSourceTime( 'early', one_day_before - 10 )
     
     for i in range( 8640 ):
         
         seed = os.urandom( 16 ).encode( 'hex' )
         
         busy_seed_cache.AddSeeds( ( seed, ) )
         
         busy_seed_cache.UpdateSeedSourceTime( seed, one_day_before + ( ( i + 1 ) * 10 ) - 1 )
         
     
     new_thread_seed_cache = ClientImporting.SeedCache()
     
     for i in range( 10 ):
         
         seed = os.urandom( 16 ).encode( 'hex' )
         
         new_thread_seed_cache.AddSeeds( ( seed, ) )
         
         new_thread_seed_cache.UpdateSeedSourceTime( seed, last_check_time - 600 )
         
     
     # empty
     # should say ok if last_check_time is 0, so it can initialise
     # otherwise sperg out safely
     
     self.assertFalse( regular_checker_options.IsDead( empty_seed_cache, 0 ) )
     
     self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( empty_seed_cache, 0 ), 'no files yet' )
     
     self.assertEqual( regular_checker_options.GetNextCheckTime( empty_seed_cache, 0 ), 0 )
     
     self.assertTrue( regular_checker_options.IsDead( empty_seed_cache, last_check_time ) )
     
     self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( empty_seed_cache, last_check_time ), 'no files, unable to determine velocity' )
     
     # regular
     # current velocity should be 50 files per day for the day ones and 0 files per min for the callous minute one
     
     self.assertFalse( regular_checker_options.IsDead( seed_cache, last_check_time ) )
     self.assertFalse( fast_checker_options.IsDead( seed_cache, last_check_time ) )
     self.assertFalse( slow_checker_options.IsDead( seed_cache, last_check_time ) )
     self.assertTrue( callous_checker_options.IsDead( seed_cache, last_check_time ) )
     
     self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( seed_cache, last_check_time ), u'at last check, found 50 files in previous 1 day' )
     self.assertEqual( fast_checker_options.GetPrettyCurrentVelocity( seed_cache, last_check_time ), u'at last check, found 50 files in previous 1 day' )
     self.assertEqual( slow_checker_options.GetPrettyCurrentVelocity( seed_cache, last_check_time ), u'at last check, found 50 files in previous 1 day' )
     self.assertEqual( callous_checker_options.GetPrettyCurrentVelocity( seed_cache, last_check_time ), u'at last check, found 0 files in previous 1 minute' )
     
     self.assertEqual( regular_checker_options.GetNextCheckTime( seed_cache, last_check_time ), last_check_time + 8640 )
     self.assertEqual( fast_checker_options.GetNextCheckTime( seed_cache, last_check_time ), last_check_time + 3456 )
     self.assertEqual( slow_checker_options.GetNextCheckTime( seed_cache, last_check_time ), last_check_time + 17280 )
     
     # bare
     # 1 files per day
     
     self.assertFalse( regular_checker_options.IsDead( bare_seed_cache, last_check_time ) )
     self.assertTrue( callous_checker_options.IsDead( bare_seed_cache, last_check_time ) )
     
     self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( bare_seed_cache, last_check_time ), u'at last check, found 1 files in previous 1 day' )
     
     self.assertEqual( regular_checker_options.GetNextCheckTime( bare_seed_cache, last_check_time ), last_check_time + 86400 )
     self.assertEqual( fast_checker_options.GetNextCheckTime( bare_seed_cache, last_check_time ), last_check_time + 86400 )
     self.assertEqual( slow_checker_options.GetNextCheckTime( bare_seed_cache, last_check_time ), last_check_time + 86400 )
     
     # busy
     # 8640 files per day, 6 files per minute
     
     self.assertFalse( regular_checker_options.IsDead( busy_seed_cache, last_check_time ) )
     self.assertFalse( fast_checker_options.IsDead( busy_seed_cache, last_check_time ) )
     self.assertFalse( slow_checker_options.IsDead( busy_seed_cache, last_check_time ) )
     self.assertFalse( callous_checker_options.IsDead( busy_seed_cache, last_check_time ) )
     
     self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( busy_seed_cache, last_check_time ), u'at last check, found 8,640 files in previous 1 day' )
     self.assertEqual( callous_checker_options.GetPrettyCurrentVelocity( busy_seed_cache, last_check_time ), u'at last check, found 6 files in previous 1 minute' )
     
     self.assertEqual( regular_checker_options.GetNextCheckTime( busy_seed_cache, last_check_time ), last_check_time + 50 )
     self.assertEqual( fast_checker_options.GetNextCheckTime( busy_seed_cache, last_check_time ), last_check_time + 30 )
     self.assertEqual( slow_checker_options.GetNextCheckTime( busy_seed_cache, last_check_time ), last_check_time + 100 )
     self.assertEqual( callous_checker_options.GetNextCheckTime( busy_seed_cache, last_check_time ), last_check_time + 50 )
     
     # new thread
     # only had files from ten mins ago, so timings are different
     
     self.assertFalse( regular_checker_options.IsDead( new_thread_seed_cache, last_check_time ) )
     self.assertFalse( fast_checker_options.IsDead( new_thread_seed_cache, last_check_time ) )
     self.assertFalse( slow_checker_options.IsDead( new_thread_seed_cache, last_check_time ) )
     self.assertTrue( callous_checker_options.IsDead( new_thread_seed_cache, last_check_time ) )
     
     self.assertEqual( regular_checker_options.GetPrettyCurrentVelocity( new_thread_seed_cache, last_check_time ), u'at last check, found 10 files in previous 10 minutes' )
     self.assertEqual( fast_checker_options.GetPrettyCurrentVelocity( new_thread_seed_cache, last_check_time ), u'at last check, found 10 files in previous 10 minutes' )
     self.assertEqual( slow_checker_options.GetPrettyCurrentVelocity( new_thread_seed_cache, last_check_time ), u'at last check, found 10 files in previous 10 minutes' )
     self.assertEqual( callous_checker_options.GetPrettyCurrentVelocity( new_thread_seed_cache, last_check_time ), u'at last check, found 0 files in previous 1 minute' )
     
     # these would be 360, 120, 600, but the 'don't check faster the time since last file post' bumps this up
     self.assertEqual( regular_checker_options.GetNextCheckTime( new_thread_seed_cache, last_check_time ), last_check_time + 600 )
     self.assertEqual( fast_checker_options.GetNextCheckTime( new_thread_seed_cache, last_check_time ), last_check_time + 600 )
     self.assertEqual( slow_checker_options.GetNextCheckTime( new_thread_seed_cache, last_check_time ), last_check_time + 600 )