def miniLanguage(key, text, position, parameters): try: code, parameters = Utilities.importModule('Inputs', key, 'Parse').Parse.parse( text, parameters) result = etree.tostring(code) return result except: Utilities.logging.error("Failed to parse mini-language " + key)
def test_isDefined(self): self.assertTrue(Utilities.isDefined({'a': {'b': {'c': 'd'}}}, '/a')) self.assertTrue(Utilities.isDefined({'a': {'b': {'c': 'd'}}}, '/a/b')) self.assertTrue(Utilities.isDefined({'a': { 'b': { 'c': 'd' } }}, '/a/b/c')) self.assertFalse( Utilities.isDefined({'a': { 'b': { 'c': 'd' } }}, '/a/b/c/d')) self.assertFalse(Utilities.isDefined({'a': { 'b': { 'c': 'd' } }}, 'a/z/c'))
def prepareGroups(parameters): groups = {} for keyword, value in parameters['language'].iteritems(): group, module = tuple(value['package'].split(':')) if not Utilities.isDefined(groups, '/' + group + '/' + module): dpath.util.new(groups, '/' + group + '/' + module, []) groups[group][module].append(keyword) return groups
def createCustomGrammar(parameters, input): # @BUG if first element of the custom vector is '' it does not work # initialise variables keywords = {} # load language definitions for all modules for transform in parameters['manifesto']['Transformers'].keys(): keywords.update( Utilities.importModule('Transformers', transform, 'Language').Language.language) grammar = '' operators = [] for key, value in keywords.iteritems(): if 'input' in value.keys(): if input in value['input'].keys(): if 'custom' in value['input'][input].keys(): custom = value['input'][input]['custom'] expressions = [ 'expression:e' + str(i) for i in range(len(custom) - 1) ] infixes = [ ' ws \'' + custom[i] + '\' ws ' for i in range(1, len(custom) - 1) ] fullexpression = [ x for t in zip(expressions, infixes) for x in t ] fullexpression.append(expressions[-1]) if custom[0] is not '': fullexpression.insert(0, '\'' + custom[0] + '\' ws ') if custom[-1] is not '': fullexpression.append(' ws \'' + custom[0] + '\'') grammar += key + ' = ' + ''.join( fullexpression ) + ' -> xml(\'' + key + '\', ' + '+'.join([ 'e' + str(i) for i in range(len(custom) - 1) ]) + ', self.input.position, dictionary)\n' operators.append(key) grammar += 'customElements = ' + ' | '.join(operators) + '\n' return grammar
def parse(text, parameters): # @BUG If the command line flag --language is used may need to recompute dictionary # load cached language and grammar or create from scratch if needed grammar, dictionary = Utilities.cache( 'RoL-language-grammar', lambda: prepareGrammarEngine(parameters)) if parameters['Inputs']['RoL']['debug']['grammar']: print(grammar) # @NOTE could not pickle the language itself. Is there a way to solve this? # create the grammar language = parsley.makeGrammar( grammar, { 'xml': xml, 'xmlInfix': xmlInfix, 'dictionary': dictionary, 'miniLanguage': lambda x, y, z: miniLanguage(x, y, z, parameters) }) try: # parse the text against the grammar parsed_xml_text = ''.join(language(text).main()) except parsley.ParseError as error: Utilities.logErrors(Utilities.formatParsleyErrorMessage(error), parameters) sys.exit(1) try: # create XML object from xml string parsed_xml = etree.fromstring(parsed_xml_text) except etree.XMLSyntaxError as error: Utilities.logErrors(Utilities.formatLxmlErrorMessage(error), parameters) sys.exit(1) # If the node has parameters, then add them to the global parameters dictionary parameters = Utilities.mergeDictionaries( nodeParametersToDictionary(parsed_xml), parameters) # apply semantic changes parsed_xml = semanticTransformations(parsed_xml) return parsed_xml, parameters
def output(xml, parameters): # get node name node_name_underscore = Utilities.underscore(parameters['node']['name']) # make a copy of the xml tree xml_copy = copy.deepcopy(xml) # delete all atributes for element in xml_copy.iter(): element.attrib.clear() # save the tree into a file with open(parameters['globals']['deploy'] + '/' + node_name_underscore + '.xml', 'w') as xml_file: xml_file.write('<?xml version="1.0"?>\n' + etree.tostring(xml_copy, pretty_print = True)) Utilities.logging.debug('Wrote file '+parameters['globals']['deploy'] + '/' + node_name_underscore + '.xml...') return 0
def test_templateEngine(self): # create test folders Utilities.createFolder('/tmp/RoL/templates/_foldername_') Utilities.createFolder('/tmp/RoL/deploy') # create a temporary file to copy with open('/tmp/RoL/templates/_foldername_/copy.txt', 'w') as template_file: template_file.write('A file to copy') # create a temporary template with open('/tmp/RoL/templates/_foldername_/_test_.txt.template', 'w') as template_file: template_file.write( 'A template for a {{code|xpath("/node")|text}}') # some sample code code = etree.fromstring('<node>hello</node>') parameters = {} filepatterns = {'foldername': 'testfolder', 'test': 'hello'} templates_path = '/tmp/RoL/templates' deploy_path = '/tmp/RoL/deploy' # run template engine Utilities.templateEngine(code, parameters, filepatterns, templates_path, deploy_path) # check if files exist: result1 = os.path.isfile('/tmp/RoL/deploy/testfolder/hello.txt') result2 = os.path.isfile('/tmp/RoL/deploy/testfolder/copy.txt') # check is content of the files is correct if result1: with open('/tmp/RoL/deploy/testfolder/hello.txt', 'r') as template_file: text = template_file.read() result3 = (text == 'A template for a hello') if result2: with open('/tmp/RoL/deploy/testfolder/copy.txt', 'r') as template_file: text = template_file.read() result4 = (text == 'A file to copy') self.assertTrue(result1) self.assertTrue(result2) self.assertTrue(result3) self.assertTrue(result4)
def test_smartTitle(self): self.assertEqual(Utilities.smartTitle('this test is OK and RoL'), 'This Test Is OK And RoL')
def test_mergeDictionaries(self): self.assertEqual( Utilities.mergeDictionaries({'a': { 'b': '1' }}, {'a': { 'b': '1' }}), {'a': { 'b': '1' }}) self.assertEqual(Utilities.mergeDictionaries({'a': '1'}, {'b': '2'}), { 'a': '1', 'b': '2' }) self.assertEqual( Utilities.mergeDictionaries({ 'a': '1', 'b': '2' }, { 'a': '2', 'c': '3' }), { 'a': '1', 'b': '2', 'c': '3' }) self.assertEqual( Utilities.mergeDictionaries({ 'a': { 'b': '1', 'c': '3' }, 'b': '2' }, { 'a': { 'b': '2', 'f': '5' }, 'z': '9' }), { 'a': { 'b': '1', 'f': '5', 'c': '3' }, 'z': '9', 'b': '2' }) self.assertEqual( Utilities.mergeDictionaries( { 'a': { 'b': { 'x': '1', 'w': '6' }, 'c': '3' }, 'b': '2' }, { 'a': { 'b': { 'x': '2', 'y': '7' }, 'f': '5' }, 'z': '9' }), { 'a': { 'b': { 'x': '1', 'w': '6', 'y': '7' }, 'f': '5', 'c': '3' }, 'z': '9', 'b': '2' })
def createInfixGrammar(parameters, input): # initialise variables keywords = {} # load language definitions for all modules for transform in parameters['manifesto']['Transformers'].keys(): keywords.update( Utilities.importModule('Transformers', transform, 'Language').Language.language) infix_grammar = '' infix_operators = [] orders = {} for key, value in keywords.iteritems(): if 'input' in value.keys(): if input in value['input'].keys(): if 'infix' in value['input'][input].keys(): infix_operators.append({ 'key': key, 'infix': value['input'][input]['infix'], 'order': value['input'][input]['infixOrder'] }) if value['input'][input]['infixOrder'] not in orders.keys( ): orders[value['input'][input]['infixOrder']] = [] orders[value['input'][input]['infixOrder']].append(key) sorted_orders = sorted(orders) # definition of parenthesis infix_grammar += 'parenthesis = \'(\' ws infixElements:e ws \')\' -> e\n' # create language infix clauses for infix in infix_operators: if isinstance(infix['infix'], list): symbol = '( \'' + '\' | \''.join(infix['infix']) + '\' )' else: symbol = '\'' + infix['infix'] + '\'' infix_grammar += infix['key'] + ' = ' + symbol + ' ws level' + str( infix['order']) + ':n -> (\'' + infix['key'] + '\', n )\n' infix_grammar += 'level' + str( sorted_orders[-1] ) + ' = language | prefixElements | customElements | preinpostfixElements | function | type | variable | parenthesis\n' # create language level groups for key, value in orders.iteritems(): if len(value) > 1: symbol = '( ' + ' | '.join(value) + ' )' else: symbol = '' + value[0] + '' infix_grammar += 'level' + str(key) + 'keys = ws ' + symbol + '\n' infix_grammar += 'infixElements = level' + str( sorted_orders[0]) + ':left level' + str( sorted_orders[0] ) + 'keys*:right -> xmlInfix(left,right, self.input.position)\n' for i in range(1, len(sorted_orders)): infix_grammar += 'level' + str(sorted_orders[ i - 1]) + ' = level' + str(sorted_orders[i]) + ':left level' + str( sorted_orders[i] ) + 'keys*:right -> xmlInfix(left,right, self.input.position)\n' return infix_grammar
def test_isKeyDefined(self): self.assertTrue(Utilities.isKeyDefined('test', {'test': 'ok'})) self.assertFalse(Utilities.isKeyDefined('ok', {'test': 'ok'}))
def test_cache(self): Utilities.removeCache() self.assertEqual(Utilities.cache('test-cache', cache_some_string), 'hello') self.assertEqual(Utilities.cache('test-cache', cache_some_string), 'hello')
def test_importModule(self): self.assertEqual( Utilities.importModule( 'Outputs', 'RosCpp', 'Parameters').Parameters.manifesto['packageShortName'], 'roscpp')
def test_ensureList(self): self.assertEqual(Utilities.ensureList([1, 2, 3]), [1, 2, 3]) self.assertEqual(Utilities.ensureList(1), [1])
def test_text(self): xml1 = etree.fromstring('<string>some text</string>') xml2 = etree.fromstring('<xml><string>some text</string></xml>') self.assertEqual(Utilities.text(xml1), 'some text') self.assertEqual(Utilities.text(xml2), '')
def test_ProcessArguments(self): Utilities.createFolder('/tmp/RoL/') Utilities.createFolder(os.path.expanduser('~') + '/.rol/') # create a user wide parameter files global_parameters_file = os.path.expanduser( '~') + '/.rol/parameters.yaml' if os.path.isfile(global_parameters_file): # file exist!!! make a backup global_parameters_file_exists = True os.rename(global_parameters_file, global_parameters_file + '.backup') else: global_parameters_file_exists = False with open(global_parameters_file, 'w') as parameter_file: parameter_file.write( 'testing:\n parameterA: 1\n repeatedParameter: 1') # create a local parameter file with open('/tmp/RoL/.rol.parameters.yaml', 'w') as parameter_file: parameter_file.write( 'testing:\n parameterB: 2\n repeatedParameter: 2') # create extra parameter file 1 with open('/tmp/RoL/test1.yaml', 'w') as parameter_file: parameter_file.write( 'testing:\n parameterC: 3\n repeatedParameter: 3') # create extra parameter file 2 with open('/tmp/RoL/test2.yaml', 'w') as parameter_file: parameter_file.write( 'testing:\n parameterD: 4\n repeatedParameter: 4') # create RoL file with open('/tmp/RoL/test.rol', 'w') as template_file: template_file.write('print(\'hello\')') # set command line parameters command_line_parameters = [ 'rol', '/tmp/RoL/test.rol', '/tmp/RoL/test1.yaml', '/tmp/RoL/test2.yaml', '-o', 'RoLXML' ] parameters = { 'debug': {}, 'Information': {}, 'Transformers': {}, 'Outputs': {}, 'Inputs': {}, 'manifesto': { 'Inputs': { 'RoL': { 'fileFormat': 'rol', 'packageName': 'Robotics Language', 'packageShortName': 'RoL' } }, 'Transformers': {}, 'Outputs': {} }, 'command_line_flags': { 'globals:output': { 'choices': ['RoLXML'], 'description': 'Outputs', 'flag': 'o', 'longFlag': 'output', 'numberArguments': '*' }, }, 'globals': { 'output': '' } } # run the command line parser filename, filetype, outputs, parameters = CommandLine.ProcessArguments( command_line_parameters, parameters) # clean up if global_parameters_file_exists: # delete test file os.remove(global_parameters_file) # put back the original file os.rename(global_parameters_file + '.backup', global_parameters_file) # check filename self.assertEqual(filename, '/tmp/RoL/test.rol') # check filetype self.assertEqual(filetype, 'rol') # check list of outputs self.assertEqual(outputs, ['RoLXML']) # check parameters self.assertEqual(parameters['testing']['parameterA'], 1) self.assertEqual(parameters['testing']['parameterB'], 2) self.assertEqual(parameters['testing']['parameterC'], 3) self.assertEqual(parameters['testing']['parameterD'], 4) self.assertEqual(parameters['testing']['repeatedParameter'], 4)
def test_camelCase(self): self.assertEqual(Utilities.camelCase('/this_test is.OK and RoL'), 'ThisTestIsOKAndRoL')
def test_fullCaps(self): self.assertEqual(Utilities.fullCaps('/Here Is_A.Phrase'), 'HEREISAPHRASE')
def test_underscoreFullCaps(self): self.assertEqual(Utilities.underscoreFullCaps('/Here Is A.Phrase'), '_HERE_IS_A_PHRASE')
def test_underscore(self): self.assertEqual(Utilities.underscore('/Here Is A.Phrase'), '_here_is_a_phrase')
def test_lowerSpaceToDash(self): self.assertEqual(Utilities.lowerSpaceToDash('Here Is A Phrase'), 'here-is-a-phrase')
def test_lowerNoSpace(self): self.assertEqual(Utilities.lowerNoSpace('Here Is A Phrase'), 'hereisaphrase')