def setup(self): tokens.reset_tokens() tokens.add_token('whatAffects') tokens.add_token_number('digits') tokens.add_token('category', natural='natural', practical='practical', dramatic='dramatic', volumetric='volumetric', default='natural') tokens.add_token('function', key='key', fill='fill', ambient='ambient', bounce='bounce', rim='rim', custom='custom', kick='kick', default='custom') tokens.add_token('type', lighting='LGT', animation='ANI', default='lighting') rules.reset_rules() rules.add_rule('lights', '{category}_{function}_{whatAffects}_{digits}_{type}')
def test_parsing_without_separators(self): rules.reset_rules() rules.add_rule('lights', '{category}{function}{whatAffects}{digits}{type}') name = 'dramatic_bounce_chars_001_LGT' parsed = n.parse(name) assert parsed is None
def test_parse_anchoring_start(self): rules.reset_rules() rules.add_rule('anchoring', '{awesometoken}_crazy_hardcoded_value', rules.Rule.ANCHOR_START) name = 'hello_crazy_hardcoded_value' parsed = n.parse(name) assert parsed == {'awesometoken': 'hello'}
def test_parse_anchoring_end(self): rules.reset_rules() rules.add_rule('anchoring', 'crazy_hardcoded_value_{awesometoken}', rules.Rule.ANCHOR_END) name = 'crazy_hardcoded_value_bye' parsed = n.parse(name) assert parsed == {'awesometoken': 'bye'}
def test_solve_anchoring_start(self): rules.reset_rules() rules.add_rule('anchoring', '{awesometoken}_crazy_hardcoded_value', rules.Rule.ANCHOR_START) name = 'hello_crazy_hardcoded_value' solved = n.solve(awesometoken='hello') assert solved == name
def test_solve_anchoring_end(self): rules.reset_rules() rules.add_rule('anchoring', 'crazy_hardcoded_value_{awesometoken}', rules.Rule.ANCHOR_END) name = 'crazy_hardcoded_value_bye' solved = n.solve('bye') assert solved == name
def test_parse_anchoring_both(self): rules.reset_rules() rules.add_rule('anchoring', '{awesometoken}_crazy_hardcoded_value_{awesometoken}', rules.Rule.ANCHOR_BOTH) name = 'hello_crazy_hardcoded_value_bye' parsed = n.parse(name) assert parsed == {'awesometoken1': 'hello', 'awesometoken2': 'bye'}
def test_solve_anchoring_both(self): rules.reset_rules() rules.add_rule('anchoring', '{awesometoken}_crazy_hardcoded_value_{awesometoken}', rules.Rule.ANCHOR_BOTH) name = 'hello_crazy_hardcoded_value_bye' solved = n.solve(awesometoken1='hello', awesometoken2='bye') assert solved == name
def test_save_load_rule(self): rules.add_rule('test', '{category}_{function}_{whatAffects}_{digits}_{type}') tempdir = tempfile.mkdtemp() rules.save_rule('test', tempdir) rules.reset_rules() file_name = "{}.rule".format('test') filepath = os.path.join(tempdir, file_name) rules.load_rule(filepath) assert rules.has_rule('test') is True
def test_parsing_with_separators(self): rules.reset_rules() rules.add_rule('lights', '{category}_{function}_{whatAffects}_{digits}_{type}') name = 'dramatic_bounce_chars_001_LGT' parsed = n.parse(name) assert parsed['category'] == 'dramatic' assert parsed['function'] == 'bounce' assert parsed['whatAffects'] == 'chars' assert parsed['digits'] == 1 assert parsed['type'] == 'lighting'
def setup(self): tokens.reset_tokens() rules.reset_rules() tokens.add_token('side', center='C', left='L', right='R', default='center') tokens.add_token('region', orbital="ORBI", parotidmasseter="PAROT", mental="MENT", frontal="FRONT", zygomatic="ZYGO", retromandibularfossa="RETMAND") rules.add_rule("filename", '{side}-{region}_{side}-{region}_{side}-{region}')
def test_save_load_session(self): tokens.add_token('whatAffects') tokens.add_token_number('digits') tokens.add_token('category', natural='natural', practical='practical', dramatic='dramatic', volumetric='volumetric', default='natural') tokens.add_token('function', key='key', fill='fill', ambient='ambient', bounce='bounce', rim='rim', custom='custom', kick='kick', default='custom') tokens.add_token('type', lighting='LGT', animation='ANI', default='lighting') rules.add_rule('lights', '{category}.{function}.{whatAffects}.{digits}.{type}') rules.add_rule('test', '{category}_{function}') rules.set_active_rule('lights') repo = tempfile.mkdtemp() save_result = n.save_session(repo) assert save_result is True rules.reset_rules() tokens.reset_tokens() n.load_session(repo) assert tokens.has_token('whatAffects') is True assert tokens.has_token('digits') is True assert tokens.has_token('category') is True assert tokens.has_token('function') is True assert tokens.has_token('type') is True assert rules.has_rule('lights') is True assert rules.has_rule('test') is True assert rules.get_active_rule().name == 'lights'
def load_session(repo=None): """Load rules, tokens and config from a repository, and create Python objects in memory to work with them. Args: repo (str, optional): Absolute path to a repository. Defaults to None. Returns: bool: True if loading session operation was successful. """ repo = repo or get_repo() if not os.path.exists(repo): logger.warning("Given repo directory does not exist: {}".format(repo)) return False namingconf = os.path.join(repo, "naming.conf") if not os.path.exists(namingconf): logger.warning("Repo is not valid. naming.conf not found {}".format(namingconf)) return False rules.reset_rules() tokens.reset_tokens() # tokens and rules for dirpath, dirnames, filenames in os.walk(repo): for filename in filenames: filepath = os.path.join(dirpath, filename) if filename.endswith(".token"): logger.debug("Loading token: {}".format(filepath)) tokens.load_token(filepath) elif filename.endswith(".rule"): logger.debug("Loading rule: {}".format(filepath)) rules.load_rule(filepath) # extra configuration if os.path.exists(namingconf): logger.debug("Loading active rule: {}".format(namingconf)) with open(namingconf) as fp: config = json.load(fp) rules.set_active_rule(config.get('set_active_rule')) return True
def setup(self): rules.reset_rules() tokens.reset_tokens()
def test_reset_rules(self): result = rules.reset_rules() assert result is True
def setup(self): rules.reset_rules()