コード例 #1
0
def _get_api_client(config, command_name=""):
    verify = config.insecure is None
    if config.is_valid_with_token:
        return ApiClient(host=config.host, token=config.token, verify=verify,
                         command_name=command_name)
    return ApiClient(user=config.username, password=config.password,
                     host=config.host, verify=verify, command_name=command_name)
コード例 #2
0
ファイル: config.py プロジェクト: phi-dbq/databricks-cli
def _get_api_client():
    conf = DatabricksConfig.fetch_from_fs()
    if conf.is_valid_with_token:
        return ApiClient(host=conf.host, token=conf.token)
    return ApiClient(user=conf.username,
                     password=conf.password,
                     host=conf.host)
コード例 #3
0
 def __init__(self, user, token, workspaceUrl):
     self.dbcli_apiclient = ApiClient(user,
                                      password=token,
                                      host=workspaceUrl,
                                      verify=True,
                                      command_name='Python Client')
     self.dbfs_api_client = DbfsApi(self.dbcli_apiclient)
コード例 #4
0
class DatabricksAPIClient(object):
    def __init__(self, user, token, workspaceUrl):
        self.dbcli_apiclient = ApiClient(user,
                                         password=token,
                                         host=workspaceUrl,
                                         verify=True,
                                         command_name='Python Client')
        self.dbfs_api_client = DbfsApi(self.dbcli_apiclient)

    # List init script directory
    def _list_init_script_dir(self, srcPath="dbfs:/databricks/init"):
        print("Starting to list the legacy global init scripts folder")
        files = self.dbfs_api_client.list_files(dbfs_path=DbfsPath(srcPath))
        file_list = [f.dbfs_path.absolute_path for f in files]
        return file_list

    # Copy global init script to local
    def _cp_legacy_gis_to_local(self,
                                srcPath="dbfs:/databricks/init",
                                destPath="./dbx_gis_v1"):
        print("Starting to copy the legacy global init scripts to path {}".
              format(destPath))
        self.dbfs_api_client.cp(recursive=True,
                                overwrite=True,
                                src=srcPath,
                                dst=destPath)
        print("Copied the legacy global init scripts to path {}".format(
            destPath))

    def _copy_test_file(self):
        self.dbfs_api_client.cp(recursive=False,
                                overwrite=True,
                                src="./dbx_test_src/random.sh",
                                dst="dbfs:/databricks/init")
        print("copied test file")

    def _remove_test_file(self):
        self.dbfs_api_client.delete(
            dbfs_path=DbfsPath("dbfs:/databricks/init/random.sh"),
            recursive=False)
        print("removed test file")

    # Upload the init script as a global init script v2
    # By default disabled & placed at the last location in the order of execution
    def _upload_init_script_as_gis_v2(self, script_name,
                                      base64_encoded_content):
        request_data = {"name": script_name, "script": base64_encoded_content}
        self.dbcli_apiclient.perform_query(method='POST',
                                           path='/global-init-scripts',
                                           data=request_data)
        print("Script uploaded as GIS v2 - {}".format(script_name))
コード例 #5
0
    def __init__(self, **kwargs):
        if "host" in kwargs:
            if not kwargs["host"].startswith("https://"):
                kwargs["host"] = "https://" + kwargs["host"]

        self.client = ApiClient(**kwargs)

        for _, camel_name, service in _get_services():
            setattr(self, camel_name, service(self.client))
コード例 #6
0
 def __init__(self, logger, **kwargs):
     """
     :param **kwargs:
         reserved python word for unlimited parameters
         keys should only include: token, host
     :type **kwargs: dict
     """
     self.api_client = ApiClient(**kwargs)
     self.cluster_client = ClusterApi(self.api_client)
     self.libraries_client = LibrariesApi(self.api_client)
     self.logger = logger
コード例 #7
0
ファイル: cli.py プロジェクト: amineds/databricks-terraformer
def export_cli(dry_run, tag, delete, git_ssh_url, api_client: ApiClient, hcl,
               pattern_matches):
    block_key_map = {}
    ignore_attribute_key = {}
    required_attributes_key = {"instance_profile_arn"}

    if hcl:
        _data = {}
        headers = None
        profiles = api_client.perform_query(
            'GET', '/instance-profiles/list', data=_data,
            headers=headers)["instance_profiles"]
        log.info(profiles)

        with GitExportHandler(git_ssh_url,
                              "instance_profiles",
                              delete_not_found=delete,
                              dry_run=dry_run,
                              tag=tag) as gh:
            for profile in profiles:
                if not pattern_matches(profile["instance_profile_arn"]):
                    log.debug(
                        f"{profile['instance_profile_arn']} did not match pattern function {pattern_matches}"
                    )
                    continue
                log.debug(
                    f"{profile['instance_profile_arn']} matched the pattern function {pattern_matches}"
                )
                profile_resource_data = prep_json(block_key_map,
                                                  ignore_attribute_key,
                                                  profile,
                                                  required_attributes_key)

                base_name = normalize_identifier(
                    profile["instance_profile_arn"])
                name = "databricks_instance_profile"
                identifier = f"databricks_instance_profile-{base_name}"

                #Force validation. If we import it, we might as well be able to use it
                profile_resource_data["skip_validation"] = False
                instance_profile_hcl = create_resource_from_dict(
                    name, identifier, profile_resource_data, False)

                file_name_identifier = f"{identifier}.tf"
                gh.add_file(file_name_identifier, instance_profile_hcl)
                log.debug(instance_profile_hcl)
コード例 #8
0
def tgt_api_client(load_env):
    target_profile = os.environ.get("AZURE_TARGET_WORKSPACE")
    config = get_config_for_profile(target_profile)
    return ApiClient(host=config.host, token=config.token)
コード例 #9
0
ファイル: databricks.py プロジェクト: schipholgroup/takeoff
 def api_client(self, config: dict) -> ApiClient:
     credential_kwargs = super()._transform_key_to_credential_kwargs(
         config["azure"]["keyvault_keys"][current_filename(__file__)])
     return ApiClient(**credential_kwargs)
コード例 #10
0
def _get_api_client(config):
    if config.is_valid_with_token:
        return ApiClient(host=config.host, token=config.token)
    return ApiClient(user=config.username, password=config.password,
                     host=config.host)
コード例 #11
0
 def __init__(self, common_params):
     dbcli_apiclient = ApiClient(common_params["api_user"], password=common_params["api_password"],
                             host='https://accounts.cloud.databricks.com', 
                             verify=True, command_name='Python Dev')
     self.accounts_api_client = AccountsApi(dbcli_apiclient)
コード例 #12
0
def terminate_cluster(profile, cluster_id, host_url, host_token):
    dbks_api = ApiClient(host=host_url, token=host_token)
    terminate_status = ClusterService(dbks_api).delete_cluster(cluster_id)
    return terminate_status