def test_resolved(aws_ip_range_data, test_ip_range_cache, test_aws_ip, app): ipresolver = IPResolver(app) ipresolver.amazon_ranges = test_ip_range_cache["all_amazon"] ipresolver.sync_token = test_ip_range_cache["sync_token"] assert ipresolver.resolve_ip(test_aws_ip) == ResolvedLocation( provider="aws", service=None, sync_token=123456789, country_iso_code=None) assert ipresolver.resolve_ip("10.0.0.2") == ResolvedLocation( provider="aws", service=None, sync_token=123456789, country_iso_code=None) assert ipresolver.resolve_ip("6.0.0.2") == ResolvedLocation( provider="aws", service=None, sync_token=123456789, country_iso_code=u"US") assert ipresolver.resolve_ip("1.2.3.4") == ResolvedLocation( provider="internet", service=u"US", sync_token=123456789, country_iso_code=u"US") assert ipresolver.resolve_ip("127.0.0.1") == ResolvedLocation( provider="internet", service=None, sync_token=123456789, country_iso_code=None)
def test_resolved(aws_ip_range_data, test_ip_range_cache, test_aws_ip, app): ipresolver = IPResolver(app) ipresolver.amazon_ranges = test_ip_range_cache['all_amazon'] ipresolver.sync_token = test_ip_range_cache['sync_token'] assert ipresolver.resolve_ip(test_aws_ip) == ResolvedLocation( provider='aws', service=None, sync_token=123456789, country_iso_code=None) assert ipresolver.resolve_ip('10.0.0.2') == ResolvedLocation( provider='aws', service=None, sync_token=123456789, country_iso_code=None) assert ipresolver.resolve_ip('6.0.0.2') == ResolvedLocation( provider='aws', service=None, sync_token=123456789, country_iso_code=u'US') assert ipresolver.resolve_ip('1.2.3.4') == ResolvedLocation( provider='internet', service=u'US', sync_token=123456789, country_iso_code=u'US') assert ipresolver.resolve_ip('127.0.0.1') == ResolvedLocation( provider='internet', service=None, sync_token=123456789, country_iso_code=None)
def test_direct_download( test_aws_ip, test_empty_ip_range_cache, test_ip_range_cache, aws_ip_range_data, ipranges_populated, app, ): ipresolver = IPResolver(app) if ipranges_populated: ipresolver.sync_token = ( test_ip_range_cache["sync_token"] if ipranges_populated else test_empty_ip_range_cache["sync_token"] ) ipresolver.amazon_ranges = ( test_ip_range_cache["all_amazon"] if ipranges_populated else test_empty_ip_range_cache["all_amazon"] ) context = StorageContext("nyc", None, config_provider, ipresolver) # Create a test bucket and put some test content. boto3.client("s3").create_bucket(Bucket=_TEST_BUCKET) engine = CloudFrontedS3Storage( context, "cloudfrontdomain", "keyid", "test/data/test.pem", "some/path", _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD, ) engine.put_content(_TEST_PATH, _TEST_CONTENT) assert engine.exists(_TEST_PATH) # Request a direct download URL for a request from a known AWS IP, and ensure we are returned an S3 URL. assert "s3.amazonaws.com" in engine.get_direct_download_url(_TEST_PATH, test_aws_ip) if ipranges_populated: # Request a direct download URL for a request from a non-AWS IP, and ensure we are returned a CloudFront URL. assert "cloudfrontdomain" in engine.get_direct_download_url(_TEST_PATH, "1.2.3.4") else: # Request a direct download URL for a request from a non-AWS IP, but since IP Ranges isn't populated, we still # get back an S3 URL. assert "s3.amazonaws.com" in engine.get_direct_download_url(_TEST_PATH, "1.2.3.4")
def test_empty_ip_range_cache(empty_range_data): sync_token = empty_range_data['syncToken'] all_amazon = IPResolver._parse_amazon_ranges(empty_range_data) fake_cache = { 'sync_token': sync_token, } return fake_cache
def test_ip_range_cache(aws_ip_range_data): sync_token = aws_ip_range_data["syncToken"] all_amazon = IPResolver._parse_amazon_ranges(aws_ip_range_data) fake_cache = { "sync_token": sync_token, "all_amazon": all_amazon, } return fake_cache
def test_ip_range_cache(aws_ip_range_data): sync_token = aws_ip_range_data['syncToken'] all_amazon = IPResolver._parse_amazon_ranges(aws_ip_range_data) fake_cache = { 'sync_token': sync_token, 'all_amazon': all_amazon, } return fake_cache
def test_direct_download_no_ip(test_aws_ip, aws_ip_range_data, ipranges_populated, app): ipresolver = IPResolver(app) context = StorageContext('nyc', None, None, config_provider, ipresolver) # Create a test bucket and put some test content. boto.connect_s3().create_bucket(_TEST_BUCKET) engine = CloudFrontedS3Storage(context, 'cloudfrontdomain', 'keyid', 'test/data/test.pem', 'some/path', _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD) engine.put_content(_TEST_PATH, _TEST_CONTENT) assert engine.exists(_TEST_PATH) assert 's3.amazonaws.com' in engine.get_direct_download_url(_TEST_PATH)
def test_direct_download(test_aws_ip, test_empty_ip_range_cache, test_ip_range_cache, aws_ip_range_data, ipranges_populated, app): ipresolver = IPResolver(app) if ipranges_populated: ipresolver.sync_token = test_ip_range_cache[ 'sync_token'] if ipranges_populated else test_empty_ip_range_cache[ 'sync_token'] ipresolver.amazon_ranges = test_ip_range_cache[ 'all_amazon'] if ipranges_populated else test_empty_ip_range_cache[ 'all_amazon'] context = StorageContext('nyc', None, None, config_provider, ipresolver) # Create a test bucket and put some test content. boto.connect_s3().create_bucket(_TEST_BUCKET) engine = CloudFrontedS3Storage(context, 'cloudfrontdomain', 'keyid', 'test/data/test.pem', 'some/path', _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD) engine.put_content(_TEST_PATH, _TEST_CONTENT) assert engine.exists(_TEST_PATH) # Request a direct download URL for a request from a known AWS IP, and ensure we are returned an S3 URL. assert 's3.amazonaws.com' in engine.get_direct_download_url( _TEST_PATH, test_aws_ip) if ipranges_populated: # Request a direct download URL for a request from a non-AWS IP, and ensure we are returned a CloudFront URL. assert 'cloudfrontdomain' in engine.get_direct_download_url( _TEST_PATH, '1.2.3.4') else: # Request a direct download URL for a request from a non-AWS IP, but since IP Ranges isn't populated, we still # get back an S3 URL. assert 's3.amazonaws.com' in engine.get_direct_download_url( _TEST_PATH, '1.2.3.4')
def test_direct_download_no_ip(test_aws_ip, aws_ip_range_data, ipranges_populated, app): ipresolver = IPResolver(app) context = StorageContext("nyc", None, config_provider, ipresolver) # Create a test bucket and put some test content. boto3.client("s3").create_bucket(Bucket=_TEST_BUCKET) engine = CloudFrontedS3Storage( context, "cloudfrontdomain", "keyid", "test/data/test.pem", "some/path", _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD, ) engine.put_content(_TEST_PATH, _TEST_CONTENT) assert engine.exists(_TEST_PATH) assert "s3.amazonaws.com" in engine.get_direct_download_url(_TEST_PATH)
app.url_map.converters["regex"] = RegexConverter app.url_map.converters["repopath"] = RepositoryPathConverter app.url_map.converters["apirepopath"] = APIRepositoryPathConverter Principal(app, use_sessions=False) tf = app.config["DB_TRANSACTION_FACTORY"] model_cache = get_model_cache(app.config) avatar = Avatar(app) login_manager = LoginManager(app) mail = Mail(app) prometheus = PrometheusPlugin(app) chunk_cleanup_queue = WorkQueue(app.config["CHUNK_CLEANUP_QUEUE_NAME"], tf) instance_keys = InstanceKeys(app) ip_resolver = IPResolver(app) storage = Storage(app, chunk_cleanup_queue, instance_keys, config_provider, ip_resolver) userfiles = Userfiles(app, storage) log_archive = LogArchive(app, storage) analytics = Analytics(app) user_analytics = UserAnalytics(app) billing = Billing(app) sentry = Sentry(app) build_logs = BuildLogs(app) authentication = UserAuthentication(app, config_provider, OVERRIDE_CONFIG_DIRECTORY) userevents = UserEventsBuilderModule(app) superusers = SuperUserManager(app) signer = Signer(app, config_provider) instance_keys = InstanceKeys(app) label_validator = LabelValidator(app) build_canceller = BuildCanceller(app)