def same_seeds(first: ParsedNode, second: ParsedNode) -> bool: # for seeds, we check the hashes. If the hashes are different types, # no match. If the hashes are both the same 'path', log a warning and # assume they are the same # if the current checksum is a path, we want to log a warning. result = first.checksum == second.checksum if first.checksum.name == 'path': msg: str if second.checksum.name != 'path': msg = (f'Found a seed ({first.package_name}.{first.name}) ' f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was ' f'<={MAXIMUM_SEED_SIZE_NAME}, so it has changed') elif result: msg = ( f'Found a seed ({first.package_name}.{first.name}) ' f'>{MAXIMUM_SEED_SIZE_NAME} in size at the same path, dbt ' f'cannot tell if it has changed: assuming they are the same') elif not result: msg = ( f'Found a seed ({first.package_name}.{first.name}) ' f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was in ' f'a different location, assuming it has changed') else: msg = ( f'Found a seed ({first.package_name}.{first.name}) ' f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file had a ' f'checksum type of {second.checksum.name}, so it has changed') warn_or_error(msg, node=first) return result
def catch_as_completed( futures # typing: List[Future[agate.Table]] ) -> Tuple[agate.Table, List[Exception]]: # catalogs: agate.Table = agate.Table(rows=[]) tables: List[agate.Table] = [] exceptions: List[Exception] = [] for future in as_completed(futures): exc = future.exception() # we want to re-raise on ctrl+c and BaseException if exc is None: catalog = future.result() tables.append(catalog) elif ( isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception) ): raise exc else: warn_or_error( f'Encountered an error while generating catalog: {str(exc)}' ) # exc is not None, derives from Exception, and isn't ctrl+c exceptions.append(exc) return merge_tables(tables), exceptions
def auth_args(self): # Pull all of the optional authentication args for the connector, # let connector handle the actual arg validation result = {} if self.password: result['password'] = self.password if self.authenticator: result['authenticator'] = self.authenticator if self.authenticator == 'oauth': token = self.token # if we have a client ID/client secret, the token is a refresh # token, not an access token if self.oauth_client_id and self.oauth_client_secret: token = self._get_access_token() elif self.oauth_client_id: warn_or_error( 'Invalid profile: got an oauth_client_id, but not an ' 'oauth_client_secret!') elif self.oauth_client_secret: warn_or_error( 'Invalid profile: got an oauth_client_secret, but not ' 'an oauth_client_id!') result['token'] = token result['private_key'] = self._get_private_key() return result
def __post_init__(self): if (self.authenticator != 'oauth' and (self.oauth_client_secret or self.oauth_client_id or self.token)): # the user probably forgot to set 'authenticator' like I keep doing warn_or_error( 'Authenticator is not set to oauth, but an oauth-only ' 'parameter is set! Did you mean to set authenticator: oauth?')
def warn_for_unused_resource_config_paths(self, resource_fqns, disabled): unused = self.get_unused_resource_config_paths(resource_fqns, disabled) if len(unused) == 0: return msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format( len(unused), '\n'.join('- {}'.format('.'.join(u)) for u in unused)) warn_or_error(msg, log_fmt=printer.yellow('{}'))
def _fetch_metadata(self, project) -> ProjectPackageMetadata: path = self._checkout() if self.revision == 'master' and self.warn_unpinned: warn_or_error( 'The git package "{}" is not pinned.\n\tThis can introduce ' 'breaking changes into your project without warning!\n\nSee {}' .format(self.git, PIN_PACKAGE_URL), log_fmt=printer.yellow('WARNING: {}')) loaded = Project.from_project_root(path, {}) return ProjectPackageMetadata.from_project(loaded)
def warn_for_unused_resource_config_paths(self, resource_fqns, disabled): unused = self.get_unused_resource_config_paths(resource_fqns, disabled) if len(unused) == 0: return msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format( len(unused), '\n'.join('- {}'.format('.'.join(u)) for u in unused) ) warn_or_error(msg, log_fmt=printer.yellow('{}'))
def _fetch_metadata(self, project, renderer) -> ProjectPackageMetadata: path = self._checkout() if self.unpinned_msg() and self.warn_unpinned: warn_or_error( 'The git package "{}" \n\tis {}.\n\tThis can introduce ' 'breaking changes into your project without warning!\n\nSee {}' .format(self.git, self.unpinned_msg(), PIN_PACKAGE_URL), log_fmt=ui.yellow('WARNING: {}')) loaded = Project.from_project_root(path, renderer) return ProjectPackageMetadata.from_project(loaded)
def warn_for_unused_resource_config_paths( self, resource_fqns: Mapping[str, PathSet], disabled: PathSet, ) -> None: unused = self.get_unused_resource_config_paths(resource_fqns, disabled) if len(unused) == 0: return msg = UNUSED_RESOURCE_CONFIGURATION_PATH_MESSAGE.format( len(unused), '\n'.join('- {}'.format('.'.join(u)) for u in unused)) warn_or_error(msg, log_fmt=warning_tag('{}'))
def patch_macros(self) -> None: for macro in self.macros.values(): key = (macro.package_name, macro.name) patch = self.macro_patches.pop(key, None) if not patch: continue macro.patch(patch) if self.macro_patches: for patch in self.macro_patches.values(): warn_or_error( f'WARNING: Found documentation for macro "{patch.name}" ' f'which was not found')
def invalid_source_fail_unless_test(node, target_name, target_table_name, disabled): if node.resource_type == NodeType.Test: msg = get_source_not_found_or_disabled_msg(node, target_name, target_table_name, disabled) if disabled: logger.debug(warning_tag(msg)) else: warn_or_error(msg, log_fmt=warning_tag('{}')) else: source_target_not_found(node, target_name, target_table_name, disabled=disabled)
def patch_macros( self, patches: MutableMapping[MacroKey, ParsedMacroPatch]) -> None: for macro in self.macros.values(): key = (macro.package_name, macro.name) patch = patches.pop(key, None) if not patch: continue macro.patch(patch) if patches: for patch in patches.values(): warn_or_error( f'WARNING: Found documentation for macro "{patch.name}" ' f'which was not found')
def get_paths(self): # TODO: In order to support this, make FilesystemSearcher accept a list # of file patterns. eg: ['.yml', '.yaml'] yaml_files = list( FilesystemSearcher(self.project, self.project.all_source_paths, '.yaml')) if yaml_files: warn_or_error( 'A future version of dbt will parse files with both' ' .yml and .yaml file extensions. dbt found' f' {len(yaml_files)} files with .yaml extensions in' ' your dbt project. To avoid errors when upgrading' ' to a future release, either remove these files from' ' your dbt project, or change their extensions.') return FilesystemSearcher(self.project, self.project.all_source_paths, '.yml')
def warn_unused(self) -> None: unused_tables: Dict[SourceKey, Optional[Set[str]]] = {} for patch in self.results.source_patches.values(): key = (patch.overrides, patch.name) if key not in self.patches_used: unused_tables[key] = None elif patch.tables is not None: table_patches = {t.name for t in patch.tables} unused = table_patches - self.patches_used[key] # don't add unused tables, the if unused: # because patches are required to be unique, we can safely # write without looking unused_tables[key] = unused if unused_tables: msg = self.get_unused_msg(unused_tables) warn_or_error(msg, log_fmt=ui.warning_tag('{}'))
def alert_non_existence(raw_spec, nodes): if len(nodes) == 0: warn_or_error( f"The selection criterion '{str(raw_spec)}' does not match" f" any nodes")
def alert_non_existence(raw_spec, nodes): if len(nodes) == 0: warn_or_error( f"The selector '{str(raw_spec)}' does not match any nodes and will" f" be ignored" )