Exemplo n.º 1
0
 def __init__(self, spec):
     super(AwsKopsCluster, self).__init__(spec)
     self.name += '.k8s.local'
     self.config_bucket = 'kops-%s-%s' % (FLAGS.run_uri, str(uuid.uuid4()))
     self.region = util.GetRegionFromZone(self.zone)
     self.s3_service = s3.S3Service()
     self.s3_service.PrepareService(self.region)
Exemplo n.º 2
0
    def __init__(self, dpb_service_spec):
        super(UnmanagedDpbService, self).__init__(dpb_service_spec)
        #  Dictionary to hold the cluster vms.
        self.vms = {}
        self.cloud = dpb_service_spec.worker_group.cloud
        if not self.dpb_service_zone:
            raise errors.Setup.InvalidSetupError(
                'dpb_service_zone must be provided, for provisioning.')
        if self.cloud == 'GCP':
            self.region = gcp_util.GetRegionFromZone(FLAGS.dpb_service_zone)
            self.storage_service = gcs.GoogleCloudStorageService()
            self.persistent_fs_prefix = 'gs://'
        elif self.cloud == 'AWS':
            self.region = aws_util.GetRegionFromZone(FLAGS.dpb_service_zone)
            self.storage_service = s3.S3Service()
            self.persistent_fs_prefix = 's3://'
        else:
            self.region = None
            self.storage_service = None
            self.persistent_fs_prefix = None
            self.manage_bucket = False
            logging.warning(
                'Cloud provider %s does not support object storage. '
                'Some benchmarks will not work.', self.cloud)

        if self.storage_service:
            self.storage_service.PrepareService(location=self.region)

        # set in _Create of derived classes
        self.leader = None
Exemplo n.º 3
0
    def __init__(self, dpb_service_spec):
        super(UnmanagedDpbService, self).__init__(dpb_service_spec)
        #  Dictionary to hold the cluster vms.
        self.vms = {}
        self.cloud = dpb_service_spec.worker_group.cloud
        if not self.dpb_service_zone:
            raise errors.Setup.InvalidSetupError(
                'dpb_service_zone must be provided, for provisioning.')
        if self.cloud == 'GCP':
            self.region = gcp_util.GetRegionFromZone(FLAGS.dpb_service_zone)
            self.storage_service = gcs.GoogleCloudStorageService()
            self.persistent_fs_prefix = 'gs://'
        elif self.cloud == 'AWS':
            self.region = aws_util.GetRegionFromZone(FLAGS.dpb_service_zone)
            self.storage_service = s3.S3Service()
            self.persistent_fs_prefix = 's3://'
        else:
            raise errors.Config.InvalidValue(
                f'Unsupported Cloud provider {self.cloud}')

        if self.storage_service:
            self.storage_service.PrepareService(location=self.region)

        # set in _Create of derived classes
        self.leader = None
Exemplo n.º 4
0
 def setUp(self):
     super(S3Test, self).setUp()
     flag_values = {'timeout_minutes': 0, 'persistent_timeout_minutes': 0}
     p = mock.patch.object(s3, 'FLAGS')
     flags_mock = p.start()
     flags_mock.configure_mock(**flag_values)
     self.mock_command = mock.patch.object(vm_util, 'IssueCommand').start()
     self.mock_retryable_command = mock.patch.object(
         vm_util, 'IssueRetryableCommand').start()
     self.s3_service = s3.S3Service()
     self.s3_service.PrepareService(None)  # will use s3.DEFAULT_AWS_REGION
 def __init__(self, dpb_service_spec):
     super().__init__(dpb_service_spec)
     self.dpb_service_type = self.SERVICE_TYPE
     self.project = None
     self.cmd_prefix = list(util.AWS_PREFIX)
     if not self.dpb_service_zone:
         raise errors.Setup.InvalidSetupError(
             'dpb_service_zone must be provided, for provisioning.')
     self.region = util.GetRegionFromZone(self.dpb_service_zone)
     self.storage_service = s3.S3Service()
     self.storage_service.PrepareService(location=self.region)
     self.persistent_fs_prefix = 's3://'
     self.role = FLAGS.aws_glue_job_role
     self._cluster_create_time = None
     self._job_counter = 0
Exemplo n.º 6
0
 def __init__(self, edw_service_spec):
     super(Athena, self).__init__(edw_service_spec)
     self.region = util.GetRegionFromZone(FLAGS.zones[0])
     self.output_bucket = '-'.join(
         [FLAGS.athena_output_location_prefix, self.region, FLAGS.run_uri])
     self.client_interface = GetAthenaClientInterface(
         self.cluster_identifier, self.output_bucket, self.region)
     self.s3_service = s3.S3Service()
     self.s3_service.PrepareService(self.region)
     self.s3_service.MakeBucket(self.output_bucket)
     if FLAGS.provision_athena:
         self.data_bucket = 'pkb' + self.cluster_identifier.replace('_', '')
         self.tables = (TPC_H_TABLES if FLAGS.edw_tpc_dsb_type == 'tpc_h'
                        else TPC_DS_TABLES)
         self.athena_db_create_time = 0
         self.athena_table_create_time = 0
Exemplo n.º 7
0
 def __init__(self, dpb_service_spec):
     super(AwsDpbEmr, self).__init__(dpb_service_spec)
     self.dpb_service_type = AwsDpbEmr.SERVICE_TYPE
     self.project = None
     self.cmd_prefix = list(util.AWS_PREFIX)
     if self.dpb_service_zone:
         self.region = util.GetRegionFromZone(self.dpb_service_zone)
     else:
         raise errors.Setup.InvalidSetupError(
             'dpb_service_zone must be provided, for provisioning.')
     self.cmd_prefix += ['--region', self.region]
     self.network = aws_network.AwsNetwork.GetNetworkFromNetworkSpec(
         aws_network.AwsNetworkSpec(zone=self.dpb_service_zone))
     self.storage_service = s3.S3Service()
     self.storage_service.PrepareService(self.region)
     self.bucket_to_delete = None
     self.dpb_version = FLAGS.dpb_emr_release_label
Exemplo n.º 8
0
    def __init__(self, dpb_service_spec):
        super().__init__(dpb_service_spec)
        self.dpb_service_type = self.SERVICE_TYPE
        # Set DPB version as Spark version for metadata
        self.dpb_version = 'spark_' + FLAGS.spark_version

        benchmark_spec = context.GetThreadBenchmarkSpec()
        self.k8s_cluster = benchmark_spec.container_cluster
        assert self.k8s_cluster
        assert self.k8s_cluster.CLUSTER_TYPE == container_service.KUBERNETES
        self.cloud = self.k8s_cluster.CLOUD
        self.container_registry = benchmark_spec.container_registry
        assert self.container_registry

        self.spark_drivers = []

        # TODO(pclay): Support overriding image?
        # Corresponds with data/docker/spark directory
        self.image = 'spark'

        if self.cloud == 'GCP':
            self.region = gcp_util.GetRegionFromZone(self.k8s_cluster.zone)
            self.storage_service = gcs.GoogleCloudStorageService()
            self.persistent_fs_prefix = 'gs://'
        elif self.cloud == 'AWS':
            self.region = self.k8s_cluster.region
            self.storage_service = s3.S3Service()
            self.persistent_fs_prefix = 's3://'
        else:
            raise errors.Config.InvalidValue(
                f'Unsupported Cloud provider {self.cloud}')

        self.storage_service.PrepareService(location=self.region)

        # TODO(pclay): support
        assert not FLAGS.dpb_cluster_properties

        if self.k8s_cluster.num_nodes < 2:
            raise errors.Config.InvalidValue(
                f'Cluster type {KUBERNETES_SPARK_CLUSTER} requires at least 2 nodes.'
                f'Found {self.k8s_cluster.num_nodes}.')
Exemplo n.º 9
0
 def __init__(self):
   self.storage_service = s3.S3Service()
   self.storage_service.PrepareService(
       util.GetRegionFromZone(FLAGS.dpb_service_zone))