コード例 #1
0
ファイル: main.py プロジェクト: kholmatov/python-sdk
def main():
    logging.basicConfig(level=logging.INFO)
    arguments = parse_args()
    interceptor = yandexcloud.RetryInterceptor(
        max_retry_count=5, retriable_codes=[grpc.StatusCode.UNAVAILABLE])
    if arguments.token:
        sdk = yandexcloud.SDK(interceptor=interceptor, token=arguments.token)
    else:
        with open(arguments.sa_json_path) as infile:
            sdk = yandexcloud.SDK(interceptor=interceptor,
                                  service_account_key=json.load(infile))

    fill_missing_arguments(sdk, arguments)

    instance_id = None
    try:
        operation = create_instance(sdk, arguments.folder_id, arguments.zone,
                                    arguments.name, arguments.subnet_id)
        operation_result = sdk.wait_operation_and_get_result(
            operation,
            response_type=Instance,
            meta_type=CreateInstanceMetadata,
        )

        instance_id = operation_result.response.id

    finally:
        if instance_id:
            logging.info('Deleting instance {}'.format(instance_id))
            operation = delete_instance(sdk, instance_id)
            sdk.wait_operation_and_get_result(
                operation,
                meta_type=DeleteInstanceMetadata,
            )
コード例 #2
0
ファイル: main.py プロジェクト: yandex-cloud/python-sdk
def main():
    logging.basicConfig(level=logging.INFO)
    arguments = parse_cmd()
    if arguments.token:
        sdk = yandexcloud.SDK(token=arguments.token, user_agent=USER_AGENT)
    else:
        with open(arguments.sa_json_path) as infile:
            sdk = yandexcloud.SDK(service_account_key=json.load(infile),
                                  user_agent=USER_AGENT)

    fill_missing_arguments(sdk, arguments)

    resources = common_pb.Resources(
        resource_preset_id='s2.micro',
        disk_type_id='network-ssd',
    )
    cluster_id = None
    try:
        operation_result = create_cluster(
            sdk, create_cluster_request(arguments, resources=resources))
        cluster_id = operation_result.response.id
        change_cluster_description(sdk, cluster_id)
        add_subcluster(sdk, cluster_id, arguments, resources=resources)

        run_hive_job(sdk, cluster_id=cluster_id)
        run_mapreduce_job(sdk,
                          cluster_id=cluster_id,
                          bucket=arguments.s3_bucket)
        run_spark_job(sdk, cluster_id=cluster_id, bucket=arguments.s3_bucket)
        run_pyspark_job(sdk, cluster_id=cluster_id, bucket=arguments.s3_bucket)
    finally:
        if cluster_id is not None:
            delete_cluster(sdk, cluster_id)
コード例 #3
0
ファイル: yandex.py プロジェクト: ysktir/airflow-1
 def __init__(
     self,
     # Connection id is deprecated. Use yandex_conn_id instead
     connection_id: Optional[str] = None,
     yandex_conn_id: Optional[str] = None,
     default_folder_id: Union[dict, bool, None] = None,
     default_public_ssh_key: Optional[str] = None,
 ) -> None:
     super().__init__()
     if connection_id:
         warnings.warn(
             "Using `connection_id` is deprecated. Please use `yandex_conn_id` parameter.",
             DeprecationWarning,
             stacklevel=2,
         )
     self.connection_id = yandex_conn_id or connection_id or self.default_conn_name
     self.connection = self.get_connection(self.connection_id)
     self.extras = self.connection.extra_dejson
     credentials = self._get_credentials()
     self.sdk = yandexcloud.SDK(**credentials)
     self.default_folder_id = default_folder_id or self._get_field(
         'folder_id', False)
     self.default_public_ssh_key = default_public_ssh_key or self._get_field(
         'public_ssh_key', False)
     self.client = self.sdk.client
コード例 #4
0
ファイル: main.py プロジェクト: kholmatov/python-sdk
def main(product_id, sku_id, quantity, timestamp=None, uuid=None):
    # NOTE: IAM token will be taken automatically from metadata agent of VM

    interceptor = yandexcloud.RetryInterceptor(
        max_retry_count=5, retriable_codes=[grpc.StatusCode.UNAVAILABLE])
    sdk = yandexcloud.SDK(interceptor=interceptor)
    service = sdk.client(ImageProductUsageServiceStub)
    request = build_product_usage_write_request(product_id, sku_id, quantity,
                                                timestamp, uuid)

    # Step 0. Ensure consumer has all permissions to use the product (validate_only=True)

    request.validate_only = True
    response = service.Write(request)

    if len(response.accepted) == 0:
        raise ValueError(
            'Unable to provide the service to customer. Reason: %s' %
            str(response.rejected))

    # Step 1. Provide your service to the customer

    business_logic(product_id, sku_id)

    # Step 2. Write the product usage to Yandex.Cloud API (validate_only=False)

    request.validate_only = False
    response = service.Write(request)

    if len(response.accepted) == 0:
        raise ValueError('Unable to write the product usage. Reason: %s' %
                         str(response.rejected))

    return response
コード例 #5
0
ファイル: main.py プロジェクト: kbespalov/python-sdk
def main():
    arguments = parse_cmd()
    sdk = yandexcloud.SDK(token=arguments.token)

    fill_missing_flags(sdk, arguments)

    resources = common_pb.Resources(
        resource_preset_id='s2.micro',
        disk_size=15 * (1024**3),
        disk_type_id='network-ssd',
    )
    req = create_cluster_request(arguments, resources=resources)
    cluster_id = None
    try:
        cluster = create_cluster(sdk, req)
        cluster_id = cluster.id
        change_cluster_description(sdk, cluster_id)
        add_subcluster(sdk, cluster_id, arguments, resources=resources)

        run_hive_job(sdk, cluster_id=cluster_id)
        run_mapreduce_job(sdk,
                          cluster_id=cluster_id,
                          bucket=arguments.s3_bucket)
        run_spark_job(sdk, cluster_id=cluster_id, bucket=arguments.s3_bucket)
        run_pyspark_job(sdk, cluster_id=cluster_id, bucket=arguments.s3_bucket)
    finally:
        if cluster_id is not None:
            delete_cluster(sdk, cluster_id)
コード例 #6
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def get_func(func_id: str) -> Tuple[Function, Optional[str]]:
    sdk = yandexcloud.SDK()
    client = sdk.client(FunctionServiceStub)
    try:
        return client.Get(GetFunctionRequest(function_id=func_id)), None
    except grpc.RpcError as e:
        if hasattr(e, 'details'):
            return None, e.details()
        return None, json.dumps(e.args)
コード例 #7
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def get_cluster(cluster_id: str) -> Tuple[Cluster, Optional[str]]:
    sdk = yandexcloud.SDK()
    client = sdk.client(ClusterServiceStub)
    try:
        return client.Get(GetClusterRequest(cluster_id=cluster_id)), None
    except grpc.RpcError as e:
        if hasattr(e, 'details'):
            return None, e.details()
        return None, json.dumps(e.args)
コード例 #8
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def stop_cluster(cluster_id: str) -> Optional[str]:
    sdk = yandexcloud.SDK()
    client = sdk.client(ClusterServiceStub)
    try:
        client.Stop(StopClusterRequest(cluster_id=cluster_id))
    except grpc.RpcError as e:
        if hasattr(e, 'details'):
            return e.details()
        return json.dumps(e.args)
コード例 #9
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def restart_vm(vm_id: str) -> Optional[str]:
    sdk = yandexcloud.SDK()
    client = sdk.client(InstanceServiceStub)
    try:
        client.Restart(RestartInstanceRequest(instance_id=vm_id))
    except grpc.RpcError as e:
        if hasattr(e, 'details'):
            return e.details()
        return json.dumps(e.args)
コード例 #10
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def get_vm(vm_id: str) -> Tuple[Instance, Optional[str]]:
    sdk = yandexcloud.SDK()
    client = sdk.client(InstanceServiceStub)
    try:
        return client.Get(GetInstanceRequest(instance_id=vm_id)), None
    except grpc.RpcError as e:
        if hasattr(e, 'details'):
            return None, e.details()
        return None, json.dumps(e.args)
コード例 #11
0
ファイル: yc_inventory.py プロジェクト: ssi91/yc_inventory
def generate_inventory(conf):
    sdk = yandexcloud.SDK(service_account_key=conf.service_account)

    c = sdk.client(InstanceServiceStub)

    l = c.List(ListInstancesRequest(
        folder_id=conf['folderId']
    ))

    tags = [k for k in conf['tags'].keys()]

    tag_hosts_map = {
        tag: [
            i.network_interfaces[0].primary_v4_address.one_to_one_nat.address for i in find_by_labels(
                l.instances, {'tags': [tag]}
            )
        ] for tag in conf['tags'].keys()
    }

    def set_host_as_value(var_hosts):
        if isinstance(var_hosts, list):
            return [set_host_as_value(host) for host in var_hosts]
        if any(var_hosts == tag_name for tag_name in tags):
            return tag_hosts_map[var_hosts]
        if var_hosts.find('[') != -1 and var_hosts.find(']') == len(var_hosts) - 1:
            tag_name, index = var_hosts.split('[')
            index = int(index[:-1])
            if len(tag_hosts_map[tag_name]) <= index:
                # FIXME: it seems like an error in the config
                return []
            return tag_hosts_map[tag_name][index]

    def extract_var(var):
        if 'value' in var:
            return var['value']
        return set_host_as_value(var['hosts'])

    # extract vars
    tag_host_vars_map = {}
    for tag in tags:
        if 'vars' not in config['tags'][tag] or config['tags'][tag]['vars'] is None:
            continue
        tag_host_vars_map[tag] = {
            var_name: extract_var(config['tags'][tag]['vars'][var_name]) for var_name in config['tags'][tag]['vars']
        }

    result_inventory = {}
    for tag, hosts in tag_hosts_map.items():
        hosts_name = tag
        if 'hostsName' in config['tags'][tag]:
            hosts_name = config['tags'][tag]['hostsName']
        result_inventory[hosts_name] = {
            'hosts': hosts
        }
        if tag in tag_host_vars_map:
            result_inventory[hosts_name]['vars'] = tag_host_vars_map[tag]
    return result_inventory
コード例 #12
0
ファイル: yandex.py プロジェクト: folly3/airflow-1
 def __init__(self, connection_id=None, default_folder_id=None, default_public_ssh_key=None):
     super().__init__()
     self.connection_id = connection_id or 'yandexcloud_default'
     self.connection = self.get_connection(self.connection_id)
     self.extras = self.connection.extra_dejson
     credentials = self._get_credentials()
     self.sdk = yandexcloud.SDK(**credentials)
     self.default_folder_id = default_folder_id or self._get_field('folder_id', False)
     self.default_public_ssh_key = default_public_ssh_key or self._get_field('public_ssh_key', False)
     self.client = self.sdk.client
コード例 #13
0
ファイル: main.py プロジェクト: yandex-cloud/python-sdk
def main():
    logging.basicConfig(level=logging.INFO)
    arguments = parse_cmd()
    if arguments.token:
        sdk = yandexcloud.SDK(token=arguments.token)
    else:
        with open(arguments.sa_json_path) as infile:
            sdk = yandexcloud.SDK(service_account_key=json.load(infile))

    fill_missing_arguments(sdk, arguments)

    cluster_id = None
    try:
        operation_result = create_cluster(sdk,
                                          create_cluster_request(arguments))
        cluster_id = operation_result.response.id
        change_cluster_description(sdk, cluster_id)
        add_cluster_host(sdk, cluster_id, arguments)
    finally:
        if cluster_id is not None:
            delete_cluster(sdk, cluster_id)
コード例 #14
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def close_func(func_id: str) -> Optional[str]:
    sdk = yandexcloud.SDK()
    client = sdk.client(FunctionServiceStub)
    try:
        client.SetAccessBindings(
            SetAccessBindingsRequest(
                resource_id=func_id,
                access_bindings=[],
            ))
    except grpc.RpcError as e:
        if hasattr(e, 'details'):
            return e.details()
        return json.dumps(e.args)
コード例 #15
0
    def init_sdk(self):
        '''Init Yandex.Cloud SDK with provided auth method'''
        interceptor = yandexcloud.RetryInterceptor(
            max_retry_count=self.get_option('api_retry_count'),
            retriable_codes=[grpc.StatusCode.UNAVAILABLE])
        auth_kind = self.get_option('auth_kind')
        if auth_kind == 'serviceaccountfile':
            sa_file_path = self.get_option('service_account_file')
            sa_file_contents = self.get_option('service_account_contents')
            if bool(sa_file_path) == bool(sa_file_contents):
                raise AnsibleError(
                    'Either "service_account_file" or "service_account_contents" must be set '
                    'when auth_kind is set to "serviceaccountfile"')
            if sa_file_path:
                try:
                    with open(sa_file_path, 'r') as f:
                        sa_file_contents = f.read()
                except Exception as e:
                    raise AnsibleError(
                        'Error reading Service Account data from file: "{}": {}'
                        .format(sa_file_path, to_native(e)))
            try:
                sa = json.loads(sa_file_contents)
            except Exception as e:
                raise AnsibleError(
                    'Error reading Service Account data from JSON: {}'.format(
                        to_native(e)))
            self.sdk = yandexcloud.SDK(interceptor=interceptor,
                                       service_account_key=sa)

        elif auth_kind == 'oauth':
            oauth_token = self.get_option('oauth_token')
            if not oauth_token:
                raise AnsibleError('oauth_token should be set')
            self.sdk = yandexcloud.SDK(interceptor=interceptor,
                                       token=oauth_token)
        else:
            raise AnsibleError(
                'Unknown value for auth_kind: {}'.format(auth_kind))
コード例 #16
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def handle_funcs(message):
    sdk = yandexcloud.SDK()
    functions = sdk.client(FunctionServiceStub)
    resp = functions.List(ListFunctionsRequest(folder_id=FOLDER))
    mu = InlineKeyboardMarkup()
    for func in resp.functions:
        resp_b = functions.ListAccessBindings(
            ListAccessBindingsRequest(resource_id=func.id))
        status_emoji = FUNCTION_STATUS_EMOJI[bool(resp_b.access_bindings)]
        mu.add(
            InlineKeyboardButton(f'{status_emoji} {func.name}',
                                 callback_data=f'func:{func.id}'), )
    bot.send_message(message.chat.id, 'выбери функцию', reply_markup=mu)
コード例 #17
0
ファイル: main.py プロジェクト: pombredanne/python-sdk-1
def main(token, folder_id, zone, name, subnet_id):
    interceptor = yandexcloud.RetryInterceptor(max_retry_count=5, retriable_codes=[grpc.StatusCode.UNAVAILABLE])
    sdk = yandexcloud.SDK(interceptor=interceptor, token=token)
    operation = create_instance(sdk, folder_id, zone, name, subnet_id)
    meta = CreateInstanceMetadata()
    operation.metadata.Unpack(meta)
    print('Creating instance {}'.format(meta.instance_id))
    operation = wait_for_operation(sdk, operation)

    instance = Instance()
    operation.response.Unpack(instance)

    print('Deleting instance {}'.format(instance.id))
    operation = delete_instance(sdk, instance.id)
    wait_for_operation(sdk, operation)
コード例 #18
0
def main():
    flags = parse_cmd()
    sdk = yandexcloud.SDK(token=flags.token)

    fill_missing_flags(sdk, flags)

    req = create_cluster_request(flags)
    cluster = None
    try:
        cluster = create_cluster(sdk, req)
        change_cluster_description(sdk, cluster)
        add_cluster_host(sdk, cluster, flags)
    finally:
        if cluster is not None:
            delete_cluster(sdk, cluster)
コード例 #19
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def handle_vms(message):
    sdk = yandexcloud.SDK()
    compute = sdk.client(InstanceServiceStub)
    resp = compute.List(ListInstancesRequest(folder_id=FOLDER))
    mu = InlineKeyboardMarkup()
    text = "выбери ВМ"
    for vm in resp.instances:
        status_name = Instance.Status.Name(vm.status)
        status_emoji = INSTANCE_STATUS_EMOJI[vm.status]
        text += f'\n{status_emoji} {vm.name} {status_name}'
        if vm.status in (Instance.RUNNING, Instance.PROVISIONING, Instance.STARTING) \
                and vm.network_interfaces:
            net = vm.network_interfaces[0].primary_v4_address
            if net.one_to_one_nat:
                text += f' {net.one_to_one_nat.address}'
        mu.add(InlineKeyboardButton(vm.name, callback_data=f'vm:{vm.id}'), )
    bot.send_message(message.chat.id, text, reply_markup=mu)
コード例 #20
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def handle_dbs(message):
    sdk = yandexcloud.SDK()
    clusters = sdk.client(ClusterServiceStub)
    resp = clusters.List(ListClustersRequest(folder_id=FOLDER))
    mu = InlineKeyboardMarkup()
    text = "выбери кластер"
    for cluster in resp.clusters:
        status_name = Cluster.Status.Name(cluster.status)
        status_emoji = CLUSTER_STATUS_EMOJI[cluster.status]
        text += f'\n{status_emoji} {cluster.name} {status_name}'
        # TODO: строка подключения
        # получить список хостов, найти мастера
        # получить список бд
        mu.add(
            InlineKeyboardButton(cluster.name,
                                 callback_data=f'cluster:{cluster.id}'), )
    bot.send_message(message.chat.id, text, reply_markup=mu)
コード例 #21
0
ファイル: main.py プロジェクト: JleMyP/yc-bot-tg
def open_func(func_id: str) -> Optional[str]:
    sdk = yandexcloud.SDK()
    client = sdk.client(FunctionServiceStub)
    try:
        client.SetAccessBindings(
            SetAccessBindingsRequest(
                resource_id=func_id,
                access_bindings=[
                    AccessBinding(
                        role_id='serverless.functions.invoker',
                        subject=Subject(
                            id='allUsers',
                            type='system',
                        ),
                    ),
                ],
            ))
    except grpc.RpcError as e:
        if hasattr(e, 'details'):
            return e.details()
        return json.dumps(e.args)
コード例 #22
0
def get_sdk():
    token = find_token()
    return yandexcloud.SDK(token=token)
コード例 #23
0
def sdk(sa_key):
    return yandexcloud.SDK(service_account_key=sa_key)
コード例 #24
0
from time import sleep
from io import StringIO
import paramiko

logging.basicConfig(
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    level=logging.INFO)

logger = logging.getLogger(__name__)
telegram_token = os.environ['TELEGRAM_TOKEN']
telegram_id = int(os.environ['TELEGRAM_USER_ID'])
instance_id = os.environ['CLOUD_INSTANNCE_ID']
updater = Updater(token=telegram_token, use_context=True)
dispatcher = updater.dispatcher

instance_service = yandexcloud.SDK().client(InstanceServiceStub)


def _telegram_send_json(chat_id, message):
    return {
        'statusCode':
        200,
        'headers': {
            'Content-Type': 'application/json'
        },
        'body':
        json.dumps({
            'method': 'sendMessage',
            'chat_id': chat_id,
            'text': message
        }),
コード例 #25
0
    def _init_client(self):
        sdk = yandexcloud.SDK(token=str(self.get_option('yacloud_token')))

        self.instance_service = sdk.client(InstanceServiceStub)
        self.folder_service = sdk.client(FolderServiceStub)
        self.cloud_service = sdk.client(CloudServiceStub)
コード例 #26
0
def main():
    logging.basicConfig(level=logging.INFO)
    arguments = parse_cmd()
    if arguments.token:
        sdk = yandexcloud.SDK(token=arguments.token, user_agent=USER_AGENT)
    else:
        with open(arguments.sa_json_path) as infile:
            sdk = yandexcloud.SDK(service_account_key=json.load(infile),
                                  user_agent=USER_AGENT)
    fill_missing_arguments(sdk, arguments)

    dataproc = sdk.wrappers.Dataproc(
        default_folder_id=arguments.folder_id,
        default_public_ssh_key=arguments.ssh_public_key,
    )
    bucket_for_logs_output = arguments.s3_bucket
    services = (
        'HDFS',
        'YARN',
        'MAPREDUCE',
        'HIVE',
        'SPARK',
    )
    try:
        dataproc.create_cluster(
            masternode_resource_preset='s2.micro',
            datanode_count=2,
            datanode_resource_preset='s2.micro',
            subnet_id=arguments.subnet_id,
            s3_bucket=bucket_for_logs_output,
            service_account_id=arguments.service_account_id,
            zone=arguments.zone,
            services=services,
            log_group_id=arguments.log_group_id,
        )

        dataproc.update_cluster_description('New cluster description')

        dataproc.create_subcluster(
            subcluster_type='compute',
            name='compute',
            hosts_count=1,
            resource_preset='s2.micro',
            max_hosts_count=2,
            cpu_utilization_target=66,
            preemptible=True,
        )

        dataproc.create_mapreduce_job(
            main_class='org.apache.hadoop.streaming.HadoopStreaming',
            file_uris=[
                's3a://data-proc-public/jobs/sources/mapreduce-001/mapper.py',
                's3a://data-proc-public/jobs/sources/mapreduce-001/reducer.py'
            ],
            args=[
                '-mapper', 'mapper.py', '-reducer', 'reducer.py',
                '-numReduceTasks', '1', '-input',
                's3a://data-proc-public/jobs/sources/data/cities500.txt.bz2',
                '-output', 's3a://{bucket}/dataproc/job/results/{uuid}'.format(
                    bucket=bucket_for_logs_output, uuid=uuid.uuid4())
            ],
            properties={
                'yarn.app.mapreduce.am.resource.mb': '2048',
                'yarn.app.mapreduce.am.command-opts': '-Xmx2048m',
                'mapreduce.job.maps': '6',
            },
        )

        dataproc.create_hive_job(
            query_file_uri=
            's3a://data-proc-public/jobs/sources/hive-001/main.sql',
            script_variables={
                'CITIES_URI':
                's3a://data-proc-public/jobs/sources/hive-001/cities/',
                'COUNTRY_CODE': 'RU',
            })

        dataproc.create_spark_job(
            name=
            'Spark job: Find total urban population in distribution by country',
            main_jar_file_uri=
            's3a://data-proc-public/jobs/sources/java/dataproc-examples-1.0.jar',
            main_class='ru.yandex.cloud.dataproc.examples.PopulationSparkJob',
            file_uris=[
                's3a://data-proc-public/jobs/sources/data/config.json',
            ],
            archive_uris=[
                's3a://data-proc-public/jobs/sources/data/country-codes.csv.zip',
            ],
            jar_file_uris=[
                's3a://data-proc-public/jobs/sources/java/icu4j-61.1.jar',
                's3a://data-proc-public/jobs/sources/java/commons-lang-2.6.jar',
                's3a://data-proc-public/jobs/sources/java/opencsv-4.1.jar',
                's3a://data-proc-public/jobs/sources/java/json-20190722.jar'
            ],
            args=[
                's3a://data-proc-public/jobs/sources/data/cities500.txt.bz2',
                's3a://{bucket}/dataproc/job/results/${{JOB_ID}}'.format(
                    bucket=bucket_for_logs_output),
            ],
            properties={
                'spark.submit.deployMode': 'cluster',
            },
            packages=['org.slf4j:slf4j-simple:1.7.30'],
            repositories=['https://repo1.maven.org/maven2'],
            exclude_packages=['com.amazonaws:amazon-kinesis-client'],
        )

        dataproc.create_pyspark_job(
            main_python_file_uri=
            's3a://data-proc-public/jobs/sources/pyspark-001/main.py',
            python_file_uris=[
                's3a://data-proc-public/jobs/sources/pyspark-001/geonames.py',
            ],
            file_uris=[
                's3a://data-proc-public/jobs/sources/data/config.json',
            ],
            archive_uris=[
                's3a://data-proc-public/jobs/sources/data/country-codes.csv.zip',
            ],
            args=[
                's3a://data-proc-public/jobs/sources/data/cities500.txt.bz2',
                's3a://{bucket}/jobs/results/${{JOB_ID}}'.format(
                    bucket=bucket_for_logs_output),
            ],
            jar_file_uris=[
                's3a://data-proc-public/jobs/sources/java/dataproc-examples-1.0.jar',
                's3a://data-proc-public/jobs/sources/java/icu4j-61.1.jar',
                's3a://data-proc-public/jobs/sources/java/commons-lang-2.6.jar',
            ],
            properties={
                'spark.submit.deployMode': 'cluster',
            },
            packages=['org.slf4j:slf4j-simple:1.7.30'],
            repositories=['https://repo1.maven.org/maven2'],
            exclude_packages=['com.amazonaws:amazon-kinesis-client'],
        )

    except OperationError:
        logging.exception('Operation error:')

    finally:
        if dataproc.cluster_id is not None:
            dataproc.delete_cluster()