Example #1
0
    def from_config(cls, conf, credential_provider):
        credential_provider.validate()
        credentials = credential_provider.get_credentials()

        if conf:
            return cls(
                aws_access_key_id=credentials.get('AccessKeyId'),
                aws_secret_access_key=credentials.get('SecretAccessKey'),
                aws_security_token=credentials.get('SessionToken'),
                region=aws_conf.get_region(conf=conf),
                host=conf.HOST.get(),
                proxy_address=conf.PROXY_ADDRESS.get(),
                proxy_port=conf.PROXY_PORT.get(),
                proxy_user=conf.PROXY_USER.get(),
                proxy_pass=conf.PROXY_PASS.get(),
                calling_format=conf.CALLING_FORMAT.get(),
                is_secure=conf.IS_SECURE.get(),
                expiration=credentials.get('Expiration'))
        else:
            return cls(
                aws_access_key_id=credentials.get('AccessKeyId'),
                aws_secret_access_key=credentials.get('SecretAccessKey'),
                aws_security_token=credentials.get('SessionToken'),
                expiration=credentials.get('Expiration'),
                region=aws_conf.get_region())
Example #2
0
def test_core_site():
    hadoop_home = tempfile.mkdtemp()
    finish = []

    try:
        xml = """<?xml version="1.0"?>
<configuration>
  <property>
    <name>fs.s3a.custom.signers</name>
    <value>RazS3SignerPlugin:org.apache.ranger.raz.hook.s3.RazS3SignerPlugin:org.apache.ranger.raz.hook.s3.RazS3SignerPluginInitializer</value>
  </property>
  <property>
    <name>fs.s3a.s3.signing-algorithm</name>
    <value>RazS3SignerPlugin</value>
  </property>
  <property>
    <name>fs.s3a.delegation.token.binding</name>
    <value>org.apache.ranger.raz.hook.s3.RazDelegationTokenBinding</value>
  </property>
  <property>
    <name>fs.s3a.ext.raz.rest.host.url</name>
    <value>https://prakashdh67-master10.prakashr.xcu2-8y8x.dev.cldr.work:6082/</value>
  </property>
  <property>
    <name>fs.s3a.ext.raz.s3.access.cluster.name</name>
    <value>prakashdh67</value>
  </property>
  <property>
    <name>fs.s3a.bucket.prakashmowdev1.endpoint</name>
    <value>s3.us-west-2.amazonaws.com</value>
  </property>
</configuration>
    """
        open_file(os.path.join(hadoop_home, 'core-site.xml'), 'w').write(xml)

        finish = (conf.HDFS_CLUSTERS.set_for_testing({
            'default': {}
        }), conf.HDFS_CLUSTERS['default'].HADOOP_CONF_DIR.set_for_testing(
            hadoop_home))
        core_site.reset()

        assert_equal(
            core_site.get_raz_api_url(),
            'https://prakashdh67-master10.prakashr.xcu2-8y8x.dev.cldr.work:6082/'
        )
        assert_equal(core_site.get_raz_cluster_name(), 'prakashdh67')
        assert_equal(core_site.get_raz_default_endpoint(), {
            'host': 's3.us-west-2.amazonaws.com',
            'bucket': 'prakashmowdev1'
        })

        assert_equal(get_remote_home_storage(), 's3a://prakashmowdev1')
        assert_equal(get_region(), 'us-west-2')
    finally:
        core_site.reset()
        for f in finish:
            f()
Example #3
0
def test_core_site():
    hadoop_home = tempfile.mkdtemp()
    finish = []

    try:
        xml = """<?xml version="1.0"?>
<configuration>
  <property>
    <name>fs.s3a.custom.signers</name>
    <value>RazS3SignerPlugin:org.apache.ranger.raz.hook.s3.RazS3SignerPlugin:org.apache.ranger.raz.hook.s3.RazS3SignerPluginInitializer</value>
  </property>
  <property>
    <name>fs.s3a.s3.signing-algorithm</name>
    <value>RazS3SignerPlugin</value>
  </property>
  <property>
    <name>fs.s3a.delegation.token.binding</name>
    <value>org.apache.ranger.raz.hook.s3.RazDelegationTokenBinding</value>
  </property>
  <property>
    <name>fs.s3a.ext.raz.rest.host.url</name>
    <value>https://gehue-adls-master:6082/</value>
  </property>
  <property>
    <name>fs.s3a.ext.raz.s3.access.cluster.name</name>
    <value>gehue-adls</value>
  </property>
  <property>
    <name>fs.s3a.bucket.gethue-dev.endpoint</name>
    <value>s3.us-west-2.amazonaws.com</value>
  </property>
  <property>    
    <name>fs.azure.ext.raz.rest.host.url</name>    
    <value>https://gehue-adls-master:6082/</value>  
  </property> 
  <property>
    <name>fs.azure.ext.raz.adls.access.cluster.name</name>
    <value>gehue-adls</value>
  </property>
  <property>
    <name>fs.defaultFS</name>
    <value>abfs://[email protected]/hue-adls</value>
  </property> 
</configuration>
    """
        open_file(os.path.join(hadoop_home, 'core-site.xml'), 'w').write(xml)

        finish = (conf.HDFS_CLUSTERS.set_for_testing({
            'default': {}
        }), conf.HDFS_CLUSTERS['default'].HADOOP_CONF_DIR.set_for_testing(
            hadoop_home))
        core_site.reset()

        assert_equal(core_site.get_raz_api_url(),
                     'https://*****:*****@gethuedevstorage.dfs.core.windows.net/hue-adls')

        assert_equal(get_remote_home_storage(), 's3a://gethue-dev')
        assert_equal(get_region(), 'us-west-2')
    finally:
        core_site.reset()
        for f in finish:
            f()