def test_env_int_not_required_and_not_set(): """ Test that when the env variable is not set and not required it raises a ValueError """ # sanity check assert "TEST_INT_ENV_VARIABLE" not in os.environ with pytest.raises(ValueError): env_int("TEST_INT_ENV_VARIABLE")
def test_env_int_when_missing_and_required_is_error(): """ Test that when the env variable is not set and is required, it raises an error. """ # sanity check assert "TEST_INT_ENV_VARIABLE" not in os.environ with pytest.raises(KeyError): env_int("TEST_INT_ENV_VARIABLE", required=True)
def test_env_int_when_missing_and_required_is_error(): """ Test that when the env variable is not set and is required, it raises an error. """ # sanity check assert 'TEST_INT_ENV_VARIABLE' not in os.environ with pytest.raises(KeyError): env_int('TEST_INT_ENV_VARIABLE', required=True)
def test_env_int_not_required_and_not_set(): """ Test that when the env variable is not set and not required it raises a ValueError """ # sanity check assert 'TEST_INT_ENV_VARIABLE' not in os.environ with pytest.raises(ValueError): env_int('TEST_INT_ENV_VARIABLE')
def test_env_int_not_required_with_no_default(monkeypatch, env_value, expected): """ Test that when the environment variable is present that it is parsed to a int. """ monkeypatch.setenv("TEST_INT_ENV_VARIABLE", env_value) actual = env_int("TEST_INT_ENV_VARIABLE") assert actual == expected
def test_env_int_when_missing_and_default_provided(default, expected): """ Test that when the env variable is not set and a default is provided, the default is used. """ assert "TEST_INT_ENV_VARIABLE" not in os.environ actual = env_int("TEST_INT_ENV_VARIABLE", default=default) assert actual == expected
def test_env_int_when_missing_and_default_provided(default, expected): """ Test that when the env variable is not set and a default is provided, the default is used. """ assert 'TEST_INT_ENV_VARIABLE' not in os.environ actual = env_int('TEST_INT_ENV_VARIABLE', default=default) assert actual == expected
def test_env_int_not_required_with_no_default(monkeypatch, env_value, expected): """ Test that when the environment variable is present that it is parsed to a int. """ monkeypatch.setenv('TEST_INT_ENV_VARIABLE', env_value) actual = env_int('TEST_INT_ENV_VARIABLE') assert actual == expected
def BruteForceBestTileMatcher(source_image): # Used to determine if a tile has already been used. exclusions = StockTileExclusions(source_image) # Source for the stock data that will be matched against. stock_data = get_stock_data_backend()() # If set, determines when a match is good enough as an early exit condition # when iterating over stock data. match_threshold = excavator.env_int( 'BRUTE_FORCE_GOOD_ENOUGH_THRESHOLD', default=0, ) return functools.partial( find_tile_matches, stock_data=stock_data, exclusions=exclusions, match_threshold=match_threshold, compare_fn=measure_diff_similarity, )
def get_stock_data_backend(backend_class=None): if backend_class is None: backend_class = settings.MOSAIC_STOCK_DATA_BACKEND return import_string(backend_class) class BaseStockDataBackend(object): """ Base class which defines the interface a stock data backend must implement. """ def __iter__(self): raise NotImplementedError("Subclasses must implement an iter method") DB_CHUNK_SIZE = excavator.env_int('STOCK_DATA_CHUNK_SIZE', default=2000) class BaseInMemoryDataBackend(BaseStockDataBackend): """ Fully loads the stock data into memory. """ def __init__(self, chunk_size=None, stock_data=None): if chunk_size is None: chunk_size = DB_CHUNK_SIZE self.chunk_size = chunk_size if stock_data is None: stock_data = [] self._stock_data = stock_data
CACHES = { 'default': django_cache_url.config(), } if 'MEMCACHIER_SERVERS' in os.environ: CACHES['default'].setdefault('BINARY', True) CACHES['default'].setdefault('TIMEOUT', 500) CACHES['default'].setdefault('OPTIONS', {}) CACHES['default']['OPTIONS'].setdefault('tcp_nodelay', True) CACHES['default']['OPTIONS'].setdefault('ketama', True) # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # `django.contrib.sites` settings SITE_ID = excavator.env_int('DJANGO_SITE_ID', default=1) # Event ID # - Designates the ID of the `active` event. CURRENT_EVENT_ID = excavator.env_int("CURRENT_EVENT_ID", default=0) or None # django-rest-framework REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_AUTHENTICATION_CLASSES': ( 'rest_framework.authentication.SessionAuthentication', ), 'DEFAULT_RENDERER_CLASSES': ( 'drf_ujson.renderers.UJSONRenderer',
def test_that_required_and_default_are_mutually_exclusive(): """ Test that when `required` and `default` are both set, raises a ValueError. """ with pytest.raises(ValueError): env_int("TEST_INT_ENV_VARIABLE", required=True, default=1)
in stock_tile_qs )) tile_match_data = find_best_group_tiles( tuple(( (tile_id, tile_data_lookup[tile_id]) for tile_id in tile_ids )), group_stock_data, exclusions, compare_fn=compare_fn, ) tile_match_data_array.append(tile_match_data) return zip(*zip(*itertools.chain.from_iterable(tile_match_data_array))) K_MEANS_GENERATION_ID = excavator.env_int('K_MEANS_GENERATION_ID', default=62) def get_group_data(): logger.info("Loading Group Data") with Timer() as timer: generation = Generation.objects.get(pk=K_MEANS_GENERATION_ID) group_data = tuple(( (pk, cast_image_data_to_numpy_array(center)) for pk, center in generation.groups.order_by( 'pk', ).values_list('pk', 'center') )) logger.info( "Took %s to load group data", timer.elapsed,
def test_that_required_and_default_are_mutually_exclusive(): """ Test that when `required` and `default` are both set, raises a ValueError. """ with pytest.raises(ValueError): env_int('TEST_INT_ENV_VARIABLE', required=True, default=1)
# Database # https://docs.djangoproject.com/en/dev/ref/settings/#databases DATABASES = { 'default': dj_database_url.parse(excavator.env_string('DATABASE_URL', required=True)), } DATABASES['default'].setdefault('ATOMIC_REQUESTS', True) # Cache if excavator.env_bool('REDIS_CACHE_ENABLED'): CACHES = { 'default': { 'BACKEND': excavator.env_string('REDIS_CACHE_BACKEND', default='redis_cache.RedisCache'), 'LOCATION': excavator.env_string('REDIS_CACHE_LOCATION'), 'OPTIONS': { 'DB': excavator.env_int('REDIS_CACHE_DB', default=1), 'PASSWORD': excavator.env_string('REDIS_CACHE_PASSWORD'), 'PARSER_CLASS': 'redis.connection.HiredisParser', 'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool', 'CONNECTION_POOL_CLASS_KWARGS': { 'max_connections': 50, 'timeout': 20, } }, }, } else: CACHES = { 'default': django_cache_url.config(), }
def create_source_image_tiles(source_image_pk): source_image = NormalizedSourceImage.objects.get(pk=source_image_pk) with Timer() as timer: source_image.create_tiles() logger.info( "Took %s to create source image tiles for NormalizedSourceImage: %s", timer.elapsed, source_image_pk, ) # go ahead and trigger tile matching queue_source_image_tiles_for_matching() MATCH_BATCH_SIZE = excavator.env_int('MOSAIC_BATCH_SIZE', default=20) @periodic_task(crontab(minute='*')) def queue_source_image_tiles_for_matching(): if not SourceImageTile.objects.unmatched().exists(): return if SourceImageTile.objects.processing().exists(): return logger.info("Queueing tiles for matching") for _ in range(MATCH_BATCH_SIZE): match_souce_image_tiles() @db_task()