def test_assemble_basic_path(): simple_directory = "/Users/SampleUser/" simple_file_name = "profiles.yml" assembled_path = utils.assemble_path(simple_directory, ".dbt", simple_file_name) assert assembled_path == "/Users/SampleUser/.dbt/profiles.yml"
def __init__(self, file_name: str = DEFAULT_DBTEA_CONFIG, config_dir: str = PROFILES_DIR, config_data: dict = None, replace_config_if_exists: bool = True, dbt_project: Optional[str] = None, looker_project: Optional[str] = None, looker_config_path: Optional[str] = None, looker_config_section: Optional[str] = None, looker_replace_config_if_exists: Optional[bool] = True): self.file_name = file_name self.config_dir = config_dir self.dbt_project = dbt_project self.looker_project = looker_project self.looker_config_path = looker_config_path or utils.assemble_path( utils.get_home_dir(), ".dbt", "looker.ini") self.looker_config_section = looker_config_section or "looker" self.looker_config = ConfigParser() if looker_project else None if self._config_data_exists() and not replace_config_if_exists: self.config_data = utils.parse_yaml_file(self.config_name_and_path) else: self.config_data = config_data or {} if looker_project and not self._config_file_exists( self.looker_config_path) or looker_replace_config_if_exists: self.write_looker_config() self.write_data_to_file(replace_if_exists=replace_config_if_exists)
def write_data_to_file(self, local_lookml_project_path: str) -> None: """""" with open( utils.assemble_path( local_lookml_project_path, self.lookml_file_name_and_path ) ) as lookml_file: lookml_file.write(self.lookml_string)
def test_assemble_windows_path(): root_windows_directory = r"C:\Program Files (x86)\Python" second_windows_directory = r"root\tests" file_name = "sample.py" assembled_path = utils.assemble_path(root_windows_directory, second_windows_directory, "unit", file_name) assert assembled_path == "C:/Program Files (x86)/Python/root/tests/unit/sample.py"
def to_lookml( data: dict, output_to: str = "stdout", output_file: str = None, lookml_file_type: str = None, output_directory: str = None, ) -> Optional[str]: """""" if output_to == "stdout": return lkml.dump(data) else: output_file = ( utils.assemble_path( output_directory, output_file + "." + lookml_file_type + ".lkml" ) if lookml_file_type else utils.assemble_path(output_directory, output_file + ".lkml") ) with open(output_file, "w") as output_stream: output_stream.write(lkml.dump(data))
def read_data_from_file(self, local_lookml_project_path: str) -> dict: """Parse a LookML file into a dictionary with keys for each of its primary properties and a list of values.""" logger.info( "Parsing data from local LookML file {}".format( self.lookml_file_name_and_path ) ) with open( utils.assemble_path( local_lookml_project_path, self.lookml_file_name_and_path ), "r", ) as lookml_file: return lkml.load(lookml_file)
def from_local_file(cls, looker_project_root: str, file_relative_path: str): """""" full_project_path = utils.assemble_path(looker_project_root, file_relative_path) path_suffixes = Path(full_project_path).suffixes lookml_file_type = path_suffixes[0] if len(path_suffixes) > 1 else "generic" directory, file_name = ( Path(full_project_path).parent, Path(full_project_path).stem, ) with open(full_project_path, "r") as local_file_stream: return cls( file_name, lookml_file_type, directory_path=directory, lookml_data=lkml.load(local_file_stream), )
def _parse_artifact(self, artifact_file: str): """""" if artifact_file not in ARTIFACT_DATA_FILES.values(): logger.warning( "You have specified an artifact file which is not in the list of known dbt artifacts" ) artifact_path = utils.assemble_path( self.project_root, self.target_path, artifact_file ) if not utils.file_exists(artifact_path): raise DbteaException( name="artifact-file-missing", title="Artifact file {} is missing".format(artifact_file), detail="There is no artifact {} at path {}. You may not have yet generated this artifact and " "need to run models, source freshness or docs generation".format( artifact_file, artifact_path ), ) return utils.parse_json_file(artifact_path)
def create_pull_request( organization_name: str, repository_name: str, git_token: str, head_branch: str, base_branch: str = "main", title: str = "dbtea updates", description: str = "dbtea metadata refresh", ): """Creates the pull request for the head_branch against the base_branch""" github_pulls_url = utils.assemble_path(GITHUB_API_URL, "repos", organization_name, repository_name, "pulls") headers = { "Authorization": "token {}".format(git_token), "Content-Type": "application/json", } payload = { "title": title, "body": description, "head": head_branch, "base": base_branch, } response = requests.post(github_pulls_url, headers=headers, data=json.dumps(payload)) if response.status_code >= 400: raise GitException( name="pull-request-create-fail", provider="github", title="Error Creating GitHub Pull Request via API", status=response.status_code, detail=response.json().get("errors"), response=response, ) logger.info("Created pull request for branch {} at URL: {}".format( head_branch, response.json().get("html_url")))
def run_dbt_deps(self, require_codegen: bool = False, *args, **kwargs) -> None: """Run `dbt deps` command to install dbt project dependencies; the `codegen` package must be included.""" project_packages_file = utils.assemble_path(self.project_root, "packages.yml") if require_codegen: if not utils.file_exists(project_packages_file): raise FileExistsError( "You must have a packages.yml file specified in your project" ) package_data = utils.parse_yaml_file(project_packages_file) package_list = [ entry.get("package") for entry in package_data.get("packages", {}) ] if "fishtown-analytics/codegen" not in package_list: raise ValueError( "You have not brought the codegen dbt package into your project! You must include the " "package 'fishtown-analytics/codegen' in your `packages.yml` file to codegen in bulk." ) logger.info("Fetching dbt project package dependencies...") result = self._dbt_cli_runner(DBT_DEPS, *args, **kwargs) logger.info(result)
def __init__(self, project_id: str, config_file_path: Optional[str] = None, config_section: str = "looker"): config_path = config_file_path if config_file_path \ else utils.assemble_path(utils.get_home_dir(), ".dbt", "looker.ini") super().__init__(id=project_id) self.api_client = looker_sdk.init31(config_file=config_path, section=config_section) self._project = self.api_client.project(project_id)
def create_lookml_model( model_name: str, output_to: str = "stdout", connection: str = None, label: str = None, includes: list = None, explores: List[dict] = None, access_grants: List[dict] = None, tests: List[dict] = None, datagroups: List[dict] = None, map_layers: List[dict] = None, named_value_formats: List[dict] = None, fiscal_month_offset: int = None, persist_for: str = None, persist_with: str = None, week_start_day: str = None, case_sensitive: bool = True, output_directory: str = None, ) -> Optional[str]: """""" assembled_model_dict = dict() logger.info("Creating LookML Model: {}".format(model_name)) # Validate inputs if output_to not in OUTPUT_TO_OPTIONS: raise DbteaException( name="invalid-lookml-model-properties", title="Invalid LookML Model Properties", detail="You must choose a valid output_to option from the following: {}".format( OUTPUT_TO_OPTIONS ), ) if output_to == "file" and not output_directory: raise DbteaException( name="missing-output-directory", title="No Model Output Directory Specified", detail="You must include an output_directory param if outputting model to a file", ) # Add optional model options if connection: assembled_model_dict["connection"] = connection if label: assembled_model_dict["label"] = label if includes: assembled_model_dict["includes"] = includes if persist_for: assembled_model_dict["persist_for"] = persist_for if persist_with: assembled_model_dict["persist_with"] = persist_with if fiscal_month_offset: assembled_model_dict["fiscal_month_offset"] = fiscal_month_offset if week_start_day: assembled_model_dict["week_start_day"] = week_start_day if not case_sensitive: assembled_model_dict["case_sensitive"] = "no" # Add body of Model if datagroups: assembled_model_dict["datagroups"] = datagroups if access_grants: assembled_model_dict["access_grants"] = access_grants if explores: assembled_model_dict["explores"] = explores if named_value_formats: assembled_model_dict["named_value_formats"] = named_value_formats if map_layers: assembled_model_dict["map_layers"] = map_layers if tests: assembled_model_dict["tests"] = tests if output_to == "stdout": return lkml.dump(assembled_model_dict) else: model_file_name = utils.assemble_path( output_directory, model_name + ".model.lkml" ) with open(model_file_name, "w") as output_stream: output_stream.write(lkml.dump(assembled_model_dict))
def lookml_file_name_and_path(self) -> str: """""" return utils.assemble_path(self.directory_path, self.lookml_file_name)
def config_name_and_path(self) -> str: return utils.assemble_path(self.config_dir, self.file_name)
def target_path(self): return utils.assemble_path( self.project_root, self.project_dict.get("target-path", "target") )
def log_path(self): return utils.assemble_path( self.project_root, self.project_dict.get("log-path", "logs") )