def main(argv): parser = argparse.ArgumentParser(description='Decompose Bluemix conversation service intents in .json format to intent files in .csv format', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument('intents', help='file with intents in .json format') parser.add_argument('intentsDir', help='directory with intents files') # optional arguments parser.add_argument('-ni', '--common_intents_nameCheck', action='append', nargs=2, help="regex and replacement for intent name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\\L' for lowercase") parser.add_argument('-s', '--soft', required=False, help='soft name policy - change intents and entities names without error.', action='store_true', default="") parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) NAME_POLICY = 'soft' if args.soft else 'hard' with openFile(args.intents, 'r') as intentsFile: intentsJSON = json.load(intentsFile) # process all intents for intentJSON in intentsJSON: examples = [] # process all example sentences for exampleJSON in intentJSON["examples"]: examples.append(exampleJSON["text"].strip().lower()) # new intent file intentFileName = os.path.join(args.intentsDir, toIntentName(NAME_POLICY, args.common_intents_nameCheck, intentJSON["intent"]) + ".csv") with openFile(intentFileName, "w") as intentFile: for example in examples: intentFile.write((example + "\n")) logger.verbose("Intents from file '%s' were successfully extracted\n", args.intents)
def saveDialogDataToFileSystem(dialogData, handler, config): # Create directory for dialogs (if it does not exist already) if hasattr(config, 'common_generated_dialogs') and not os.path.exists( getattr(config, 'common_generated_dialogs')): os.makedirs(getattr(config, 'common_generated_dialogs')) logger.info('Created new directory ' + getattr(config, 'common_generated_dialogs')) # Generate xml file per dialog domain (original xls workbook (all its sheets). domains = dialogData.getAllDomains() for domain_name in domains: # For all domains filename = getattr( config, 'common_generated_dialogs') + '/' + domain_name + '.xml' with openFile(filename, 'w', encoding='utf8') as dialogFile: xmlData = handler.convertDialogData( dialogData, domains[domain_name]) #process all nodes of the domain dialogFile.write(handler.printXml(xmlData)) # generate intents if 'common_generated_intents' folder is specified if hasattr(config, 'common_generated_intents'): generatedIntents = getattr(config, 'common_generated_intents') generatedIntentsFolder = generatedIntents[0] if isinstance( generatedIntents, list) else generatedIntents # Create directory for intents (if it does not exist already) if not os.path.exists(generatedIntentsFolder): os.makedirs(generatedIntentsFolder) logger.info('Created new directory ' + generatedIntentsFolder) # One file per intent for intent, intentData in dialogData.getAllIntents().items(): if len(intentData.getExamples()) > 0: intent_name = intent[1:] if intent.startswith(u'#') else intent with openFile( os.path.join(generatedIntentsFolder, intent_name + '.csv'), 'w') as intentFile: for example in intentData.getExamples(): intentFile.write(example + '\n') # generate entities if 'common_generated_entities' folder is specified if hasattr(config, 'common_generated_entities'): generatedEntities = getattr(config, 'common_generated_entities') generatedEntitiesFolder = generatedEntities[0] if isinstance( generatedEntities, list) else generatedEntities # Create directory for entities (if it does not exist already) if not os.path.exists(generatedEntitiesFolder): os.makedirs(generatedEntitiesFolder) logger.info('Created new directory ' + generatedEntitiesFolder) # One file per entity for entity_name, entityData in dialogData.getAllEntities().items(): with openFile( os.path.join(generatedEntitiesFolder, entity_name + '.csv'), 'w') as entityFile: for entityList in entityData.getValues(): entityFile.write(entityList + '\n')
def saveConfiguration(self, configFileName): outputConfig = configparser.ConfigParser() for optionUniqueName in self.__dict__: namesList = optionUniqueName.split(Cfg.sectionDelimiter) # find section and option names if len(namesList) > 1: section = namesList[0] option = Cfg.sectionDelimiter.join(namesList[1:]) else: logger.warning( "Missing section name in parameter name '%s', skipping.", optionUniqueName) continue # create non existing sections if not outputConfig.has_section(section): outputConfig.add_section(section) # convert types to string if isinstance(getattr(self, optionUniqueName), list): outputConfig.set(section, option, ','.join(getattr(self, optionUniqueName))) elif isinstance(getattr(self, optionUniqueName), bool): outputConfig.set(section, option, str(getattr(self, optionUniqueName))) else: outputConfig.set(section, option, getattr(self, optionUniqueName)) try: with openFile(configFileName, 'w') as configFile: outputConfig.write(configFile) except IOError: logger.error('Cannot save config file %s', configFileName)
def getEntities(entityDir, entitiesNameCheck, NAME_POLICY): """Retrieves entity value to entity name mapping from the directory with entity lists""" entities = {} for entityFileName in os.listdir(entityDir): entityName = toEntityName(NAME_POLICY, entitiesNameCheck, os.path.splitext(entityFileName)[0]) with openFile(os.path.join(entityDir, entityFileName), "r") as entityFile: for line in entityFile.readlines(): # remove comments line = line.split('#')[0] line = line.rstrip().lower() for entity in line.split(';'): entities[entity] = entityName return entities
def main(argv): parser = argparse.ArgumentParser( description= 'Decompose Bluemix conversation service workspace in .json format to intents json, entities json and dialog json', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument('workspace', help='workspace in .json format') # optional arguments parser.add_argument( '-i', '--intents', required=False, help= 'file with intents in .json format (not extracted if not specified)') parser.add_argument( '-e', '--entities', required=False, help= 'file with entities in .json format (not extracted if not specified)') parser.add_argument( '-d', '--dialog', required=False, help='file with dialog in .json format (not extracted if not specified)' ) parser.add_argument( '-c', '--counterexamples', required=False, help= 'file with counterexamples in .json format (not extracted if not specified)' ) parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) workspace_file = json.loads(openFile(args.workspace).read()) with openFile(args.workspace, 'r') as workspaceFile: workspaceJSON = json.load(workspaceFile) if args.intents: with openFile(args.intents, 'w') as intentsFile: intentsFile.write( json.dumps(workspaceJSON['intents'], indent=4, ensure_ascii=False)) if args.entities: with openFile(args.entities, 'w') as entitiesFile: entitiesFile.write( json.dumps(workspaceJSON['entities'], indent=4, ensure_ascii=False)) if args.dialog: with openFile(args.dialog, 'w') as dialogFile: dialogFile.write( json.dumps(workspaceJSON['dialog_nodes'], indent=4, ensure_ascii=False)) if args.counterexamples: with openFile(args.counterexamples, 'w') as counterexamplesFile: counterexamplesJSON = [] counterexampleIntentJSON = {} counterexampleIntentJSON['intent'] = "IRRELEVANT" counterexampleIntentJSON['examples'] = workspaceJSON[ 'counterexamples'] counterexamplesJSON.append(counterexampleIntentJSON) counterexamplesFile.write( json.dumps(counterexamplesJSON, indent=4, ensure_ascii=False)) logger.verbose("Workspace %s was successfully decomposed", args.workspace)
def main(argv): parser = argparse.ArgumentParser(description='Concatenate intents, entities and dialogue jsons to Watson Conversation Service workspace .json format', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append') parser.add_argument('-oc', '--common_output_config', help='output configuration file') parser.add_argument('-of', '--common_outputs_directory', required=False, help='directory where the otputs will be stored (outputs is default)') parser.add_argument('-oi', '--common_outputs_intents', required=False, help='json file with intents') parser.add_argument('-oe', '--common_outputs_entities', required=False, help='json file with entities') parser.add_argument('-od', '--common_outputs_dialogs', required=False, help='json file with dialogs') parser.add_argument('-ox', '--common_outputs_counterexamples', required=False, help='json file with counterexamples') parser.add_argument('-ow', '--common_outputs_workspace', required=False, help='json file with workspace') parser.add_argument('-wn','--conversation_workspace_name', required=False, help='name of this workspace') parser.add_argument('-wl','--conversation_language', required=False, help='language of generated workspace') parser.add_argument('-wd','--conversation_description', required=False, help='description') parser.add_argument('-v','--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) config = Cfg(args) logger.info('STARTING: ' + os.path.basename(__file__)) workspace = {} if hasattr(config, 'conversation_workspace_name'): workspace['name'] = getattr(config, 'conversation_workspace_name') else: workspace['name'] = 'default_workspace_name' if hasattr(config, 'conversation_language'): workspace['language'] = getattr(config, 'conversation_language') else: workspace['language'] = 'en' if hasattr(config, 'conversation_description'): workspace['description'] = getattr(config, 'conversation_description') else: workspace['description'] = '' if not hasattr(config, 'common_outputs_directory'): logger.info('outputs_directory is not defined!') exit(1) # process intents intentsJSON = {} if hasattr(config, 'common_outputs_intents'): with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_intents')), 'r', encoding='utf8') as intentsFile: intentsJSON = json.load(intentsFile) workspace['intents'] = intentsJSON else: logger.info('output_intents not specified, omitting intents.') # process entities entitiesJSON = {} if hasattr(config, 'common_outputs_entities'): with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_entities')), 'r', encoding='utf8') as entitiesFile: entitiesJSON = json.load(entitiesFile) workspace['entities'] = entitiesJSON else: logger.info('output_entities not specified, omitting entities.') # process dialog dialogJSON = {} if hasattr(config, 'common_outputs_dialogs'): with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_dialogs')), 'r', encoding='utf8') as dialogFile: dialogJSON = json.load(dialogFile) workspace['dialog_nodes'] = dialogJSON else: logger.info('outputs_dialogs not specified, omitting dialog.') # process counterexamples intentExamplesJSON = {} # counterexamples in "intent format" counterexamplesJSON = [] # simple list of counterexamples ("text": "example sentence") if hasattr(config, 'common_outputs_counterexamples'): with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_counterexamples')), 'r', encoding='utf8') as counterexamplesFile: intentExamplesJSON = json.load(counterexamplesFile) for intentExampleJSON in intentExamplesJSON: counterexamplesJSON.extend(intentExampleJSON['examples']) workspace['counterexamples'] = counterexamplesJSON else: logger.info('outputs_counterexamples not specified, omitting counterexamples.') if hasattr(config, 'common_outputs_workspace'): with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_workspace')), 'w', encoding='utf8') as outputFile: outputFile.write(json.dumps(workspace, indent=4, ensure_ascii=False)) else: logger.info('output_workspace not specified, generating to console.') logger.info('FINISHING: ' + os.path.basename(__file__))
def main(argv): parser = argparse.ArgumentParser( description= 'Replaces codes in text tags with sentences specified in the resource file.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument('dialog', help='dialog nodes in xml format.') parser.add_argument( 'resource', help= 'file with translations from codes to sentences (JSON format - https://console-regional.stage1.ng.bluemix.net/docs/services/GlobalizationPipeline/bundles.html#globalizationpipeline_workingwithbundles)' ) # optional arguments parser.add_argument( '-o', '--output', required=False, help='dialog nodes in xml format with all texts replaced by codes.') parser.add_argument( '-t', '--tagsXPath', required=False, nargs='+', default=['//text[not(values)]', '//values'], help='Additional XPath of tags whose code should be replaced by text.') parser.add_argument('-i', '--inplace', required=False, help='replace input dialog by output.', action='store_true') parser.add_argument( '-s', '--soft', required=False, help= 'soft name policy - change intents and entities names without error.', action='store_true', default="") parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) NAME_POLICY = 'soft' if args.soft else 'hard' # load dialog from XML dialogXML = LET.parse(args.dialog) # find all tags with codes to replace tagsToReplace = [] for tagXPath in args.tagsXPath: tagsToReplace.extend(dialogXML.xpath(tagXPath)) # LOAD RESOURCE FILE (TRANSLATIONS) with openFile(args.resource, 'r') as resourceFile: translations = json.load(resourceFile) # REPLACE ALL CODES WITH TEXTS for tagToReplace in tagsToReplace: if tagToReplace.text is None: continue logger.verbose("%s: code '%s'", tagToReplace.tag, tagToReplace.text) textParts = tagToReplace.text.split() for textPart in textParts: if not textPart.startswith('%%'): continue # it is not a code code = toCode(NAME_POLICY, textPart[2:]) # if this tag code is not in translations dictionary -> error if not code in translations: logger.error("code '%s' not in resource file!", code) else: # replace code (introduced with double %% and followed by white character or by the end) with its translation newText = re.sub(r"%%" + code + "(?=\s|$)", translations[code], tagToReplace.text) tagToReplace.text = newText logger.verbose("-> translated as %s", tagToReplace.text) # OUTPUT NEW DIALOG if args.output is not None: with openFile(args.output, 'w') as outputFile: outputFile.write( LET.tostring(dialogXML, pretty_print=True, encoding='utf8')) elif args.inplace: with openFile(args.dialog, 'w') as outputFile: outputFile.write( LET.tostring(dialogXML, pretty_print=True, encoding='utf8')) else: sys.stdout.write( LET.tostring(dialogXML, pretty_print=True, encoding='utf8')) logger.verbose('Codes were successfully replaced with texts.')
def main(argv): parser = argparse.ArgumentParser( description= 'Decompose Bluemix conversation service dialog in .json format to dialog files in .xml format', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument( 'dialog', nargs='?', type=argparse.FileType('r'), default=sys.stdin, help= 'file with dialog in .json format, if not specified, dialog is read from standard input' ) # optional arguments parser.add_argument( '-d', '--dialogDir', required=False, help= 'directory with dialog files. If not specified, output is printed to standard output' ) parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) global STDOUT STDOUT = not args.dialogDir # XML namespaces global XSI_NAMESPACE global XSI global NSMAP XSI_NAMESPACE = "http://www.w3.org/2001/XMLSchema-instance" XSI = "{%s}" % XSI_NAMESPACE NSMAP = {"xsi": XSI_NAMESPACE} # load dialogs JSON dialogsJSON = json.load(args.dialog, encoding='utf-8') # convert dialogs dialogsXML = convertDialog(dialogsJSON) # return dialog XML if args.dialogDir: # print to file dialogFileName = os.path.join(args.dialogDir, "dialog.xml") with openFile(dialogFileName, "w") as dialogFile: dialogFile.write( LET.tostring(dialogsXML, pretty_print=True, encoding='unicode')) else: # print to standard output print(LET.tostring(dialogsXML, pretty_print=True, encoding='unicode'))
def main(argv): parser = argparse.ArgumentParser(description='Converts intents files to one file in NLU tsv format', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument('intentsDir', help='directory with intents files - all of them will be included in output file') parser.add_argument('output', help='file with output intents in NLU data .tsv format') # optional arguments parser.add_argument('-e', '--entityDir', required=False, help='directory with lists of entities in csv files (file names = entity names), used to tag those entities in output') parser.add_argument('-l', '--list', required=False, help='file with list of all intents (if it should be generated)') parser.add_argument('-m', '--map', required=False, help='file with domain to intents map (if it should be generated)') parser.add_argument('-p', '--prefix', required=False, help='prefix for all generated intents (if it should be added)') parser.add_argument('-ni', '--common_intents_nameCheck', action='append', nargs=2, help="regex and replacement for intent name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\L' for lowercase") parser.add_argument('-ne', '--common_entities_nameCheck', action='append', nargs=2, help="regex and replacement for entity name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\L' for lowercase") parser.add_argument('-s', '--soft', required=False, help='soft name policy - change intents and entities names without error.', action='store_true', default="") parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true', default="") parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) NAME_POLICY = 'soft' if args.soft else 'hard' PREFIX = toIntentName(NAME_POLICY, args.common_intents_nameCheck, args.prefix) if args.entityDir: entities = getEntities(args.entityDir, args.common_entities_nameCheck, NAME_POLICY) with openFile(args.output, 'w') as outputFile: # process intents intentNames = [] for intentFileName in os.listdir(args.intentsDir): intentName = toIntentName(NAME_POLICY, args.common_intents_nameCheck, PREFIX, os.path.splitext(intentFileName)[0]) if intentName not in intentNames: intentNames.append(intentName) with open(os.path.join(args.intentsDir, intentFileName), "r") as intentFile: for line in intentFile.readlines(): # remove comments line = line.split('#')[0] if args.entityDir: line = tagEntities(line, entities) if line: outputFile.write("1\t" + intentName + "\t" + line) logger.verbose("Intents file '%s' was successfully created", args.output) if args.list: with openFile(args.list, 'w') as intentsListFile: for intentName in intentNames: intentsListFile.write(intentName + "\n") logger.verbose("Intents list '%s' was successfully created", args.list) if args.map: domIntMap = {} for intentName in intentNames: intentSplit = intentName.split("_",1) domainPart = intentSplit[0] intentPart = intentSplit[1] if domainPart in domIntMap: domIntMap[domainPart] = domIntMap[domainPart] + ";" + intentPart else: domIntMap[domainPart] = ";" + intentPart with openFile(args.map, 'w') as intentsMapFile: for domainPart in domIntMap.keys(): intentsMapFile.write(domainPart + domIntMap[domainPart] + "\n") logger.verbose("Domain-intent map '%s' was successfully created", args.output)
def main(argv): parser = argparse.ArgumentParser( description= 'Compares all dialog flows from given files and generate xml report', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument( 'expectedFileName', help= 'file with expected JSONs (One at each line at key \'output_message\')' ) parser.add_argument('receivedFileName', help='file with received JSONs') # optional arguments parser.add_argument('-o', '--output', required=False, help='name of generated xml file', default='test.junit.xml') parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) parser.add_argument('-e', '--exception_if_fail', required=False, help='script throws exception if any test fails', action='store_true') args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) testName = re.sub(r"\.[^\.]*$", "", os.path.basename(args.expectedFileName)) # expected JSON with openFile(args.expectedFileName, "r") as expectedJsonFile: # received JSON with openFile(args.receivedFileName, "r") as receivedJsonFile: # init whole test nDialogs = 0 nDialogsFailed = 0 firstFailedLine = None timeStart = time.time() # XML (whole test) outputXml = LET.Element('testsuites') # print (whole test) logger.info( '--------------------------------------------------------------------------------' ) logger.info('-- TEST: ' + testName) logger.info( '--------------------------------------------------------------------------------' ) # XML (new dialouge) dialogXml = LET.Element('testsuite') outputXml.append(dialogXml) expectedJsonLine = expectedJsonFile.readline() receivedJsonLine = receivedJsonFile.readline() line = 0 dialogId = 0 nTestsinDialog = 0 nFailuresInDialog = 0 timeDialogStart = time.time() # for every line while expectedJsonLine: line += 1 if not receivedJsonLine: # no more received line logger.error('Missing output JSON in file %s, line %d', args.receivedFileName, line) sys.exit(1) expectedData = json.loads(expectedJsonLine) expectedJson = expectedData['output_message'] receivedJson = json.loads(receivedJsonLine) if (dialogId == 0 or dialogId != expectedData['dialog_id']): if nDialogs > 0: # end previous dialog logger.info( '--------------------------------------------------------------------------------' ) if nFailuresInDialog: # at least one failure in this dialog logger.info( '-- TEST RESULT: FAILED, TOTAL FAILURES: %d, LINE OF THE FIRST FAILURE: %d', nFailuresInDialog, firstFailedLine) nDialogsFailed += 1 else: logger.info('-- TEST RESULT: OK') logger.info( '--------------------------------------------------------------------------------' ) # XML previous dialog dialogXml.attrib['name'] = 'dialog ' + str(dialogId) dialogXml.attrib['tests'] = str(nTestsinDialog) dialogXml.attrib['failures'] = str(nFailuresInDialog) dialogXml.attrib['time'] = str(time.time() - timeDialogStart) # XML (new dialouge) dialogXml = LET.Element('testsuite') outputXml.append(dialogXml) # init new dialog nDialogs += 1 nTestsinDialog = 0 nFailuresInDialog = 0 timeDialogStart = time.time() dialogId = expectedData['dialog_id'] nTestsinDialog += 1 timeLineStart = time.time() checkMessagesTime = 0 failureData = {'expectedElement': "", 'receivedElement': ""} # XML lineXml = LET.Element('testcase') dialogXml.append(lineXml) lineXml.attrib['name'] = 'line ' + str(line) lineXml.attrib['time'] = str(time.time() - timeLineStart) if not areSame(expectedJson, receivedJson, failureData, "root"): # line failure lineXml.append(createLineFailureXML(failureData)) nFailuresInDialog += 1 # in this file if firstFailedLine is None: firstFailedLine = line logger.info( 'EXPECTED OUTPUT: ' + json.dumps(expectedJson, indent=4, ensure_ascii=False)) logger.info( 'RECEIVED OUTPUT: ' + json.dumps(receivedJson, indent=4, ensure_ascii=False)) resultText = 'FAILED' else: resultText = 'OK' logger.info(' LINE: %d, RESULT: %s, TIME: %.2f sec', line, resultText, checkMessagesTime) expectedJsonLine = expectedJsonFile.readline() receivedJsonLine = receivedJsonFile.readline() # end for each line # end previous dialog logger.info( '--------------------------------------------------------------------------------' ) if nFailuresInDialog: # at least one failure in this dialog logger.info( '-- TEST RESULT: FAILED, TOTAL FAILURES: %d, LINE OF THE FIRST FAILURE: %d', nFailuresInDialog, firstFailedLine) nDialogsFailed += 1 else: logger.info('-- TEST RESULT: OK') logger.info( '--------------------------------------------------------------------------------' ) # XML previous dialog dialogXml.attrib['name'] = 'dialog ' + str(dialogId) dialogXml.attrib['tests'] = str(nTestsinDialog) dialogXml.attrib['failures'] = str(nFailuresInDialog) dialogXml.attrib['time'] = str(time.time() - timeDialogStart) if receivedJsonLine: logger.error('More than expected lines in file %s, line %d', args.receivedFileName, line) # close files logger.info( '-------------------------------------------------------------------------------' ) logger.info( '--------------------------------------------------------------------------------' ) if nDialogsFailed: logger.info( '-- SUMMARY - DIALOUGES: %s, RESULT: FAILED, FAILED DIALOGS: %d', nDialogs, nDialogsFailed) else: logger.info('-- SUMMARY - DIALOUGES: %s, RESULT: OK', nDialogs) logger.info( '--------------------------------------------------------------------------------' ) outputXml.attrib['name'] = testName outputXml.attrib['tests'] = str(nDialogs) outputXml.attrib['failures'] = str(nDialogsFailed) outputXml.attrib['timestamp'] = '{0:%Y-%b-%d %H:%M:%S}'.format( datetime.datetime.now()) outputXml.attrib['time'] = str(time.time() - timeStart) with openFile(args.output, "w") as outputFile: outputFile.write( LET.tostring(outputXml, pretty_print=True, encoding='unicode')) #as last step of our script, we raise an exception in case user required such behavior and any test failure was detected if args.exception_if_fail and nDialogsFailed: raise NameError('FailedTestDetected')
def main(argv): parser = argparse.ArgumentParser( description= 'Converts intent csv files to .json format of Watson Conversation Service', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append') parser.add_argument('-oc', '--common_output_config', help='output configuration file') parser.add_argument( '-ii', '--common_intents', help= 'directory with intent csv files to be processed (all of them will be included in output json)', action='append') #-gi is functionsally equivalent to -ii parser.add_argument( '-gi', '--common_generated_intents', help= 'directory with generated intent csv files to be processed (all of them will be included in output json)', action='append') parser.add_argument( '-od', '--common_outputs_directory', required=False, help='directory where the otputs will be stored (outputs is default)') parser.add_argument('-oi', '--common_outputs_intents', help='file with output json with all the intents') parser.add_argument( '-ni', '--common_intents_nameCheck', action='append', nargs=2, help= "regex and replacement for intent name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\\L' for lowercase" ) parser.add_argument( '-s', '--soft', required=False, help= 'soft name policy - change intents and entities names without error.', action='store_true', default="") parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) config = Cfg(args) NAME_POLICY = 'soft' if args.soft else 'hard' logger.info('STARTING: ' + os.path.basename(__file__)) if not hasattr(config, 'common_intents'): logger.info('intents parameter is not defined.') if not hasattr(config, 'common_generated_intents'): logger.info('generated_intents parameter is not defined, ignoring') if not hasattr(config, 'common_outputs_intents'): logger.info( 'Outputs_intents parameter is not defined, output will be generated to console.' ) intents = [] pathList = getattr(config, 'common_intents') if hasattr(config, 'common_generated_intents'): pathList = pathList + getattr(config, 'common_generated_intents') filesAtPath = getFilesAtPath(pathList) for intentFileName in sorted(filesAtPath): intentName = toIntentName( NAME_POLICY, args.common_intents_nameCheck, os.path.splitext(os.path.basename(intentFileName))[0]) with openFile(intentFileName, 'r', encoding='utf8') as intentFile: intent = {} intent['intent'] = intentName examples = [] for line in intentFile: # remove comments line = line.split('#')[0] line = line.rstrip().lower() #non-ascii characters fix #line = line.encode('utf-8') if line: example = processExample(line, intentName, examples) #adding to the list if example: examples.append(example) intent['examples'] = examples intents.append(intent) if hasattr(config, 'common_outputs_directory') and hasattr( config, 'common_outputs_intents'): if not os.path.exists(getattr(config, 'common_outputs_directory')): os.makedirs(getattr(config, 'common_outputs_directory')) logger.info('Created new output directory ' + getattr(config, 'common_outputs_directory')) with codecs.open(os.path.join( getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_intents')), 'w', encoding='utf8') as outputFile: outputFile.write(json.dumps(intents, indent=4, ensure_ascii=False)) else: print(json.dumps(intents, indent=4, ensure_ascii=False)) logger.info('FINISHING: ' + os.path.basename(__file__))
def main(argv): parser = argparse.ArgumentParser( description= 'convert NLU tsv files into domain-entity and intent-entity mappings.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument( 'entitiesDir', help= 'directory with entities files - all of them will be included in output list if specified' ) # optional arguments parser.add_argument( '-is', '--sentences', help= '.tsv file in NLU format with tagged entities in example sentences in third column and intent names in second column' ) parser.add_argument( '-l', '--list', required=False, help='output file with list of all entities (if it should be generated)' ) parser.add_argument( '-d', '--domEnt', required=False, help= 'output file with domain-entity mapping (if it should be generated)') parser.add_argument( '-i', '--intEnt', required=False, help= 'output file with intent-entity mapping (if it should be generated)') parser.add_argument( '-ni', '--common_intents_nameCheck', action='append', nargs=2, help= "regex and replacement for intent name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\L' for lowercase" ) parser.add_argument( '-ne', '--common_entities_nameCheck', action='append', nargs=2, help= "regex and replacement for entity name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\L' for lowercase" ) parser.add_argument( '-s', '--soft', required=False, help= 'soft name policy - change intents and entities names without error.', action='store_true', default="") parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) NAME_POLICY = 'soft' if args.soft else 'hard' domEntMap = defaultdict(dict) intEntMap = defaultdict(dict) if args.sentences: with openFile(args.sentences, "r") as sentencesFile: for line in sentencesFile.readlines(): line = line.rstrip() if not line: continue intentName = toIntentName(NAME_POLICY, args.common_intents_nameCheck, line.split("\t")[1]) intentText = line.split("\t")[2] intentSplit = intentName.split("_", 1) domainPart = intentSplit[0] intentPart = intentSplit[1] for entity in re.findall('<([^>]+)>[^<]+<\/[^>]+>', intentText): domEntMap[domainPart][entity] = 1 intEntMap[intentPart][entity] = 1 if args.domEnt: with openFile(args.domEnt, 'w') as domEntFile: for domain in sorted(domEntMap.keys()): entities = "NONE;" for entity in sorted(domEntMap[domain].keys()): entities += entity + ";" domEntFile.write(domain + ";" + entities + "\n") logger.debug("Domain-entity map '%s' was successfully created", args.domEnt) if args.domEnt: with openFile(args.intEnt, 'w') as intEntFile: for intent in sorted(intEntMap.keys()): entities = "NONE;" for entity in sorted(intEntMap[intent].keys()): entities += entity + ";" intEntFile.write(intent + ";" + entities + "\n") logger.debug("Intent-entity map '%s' was successfully created", args.domEnt) if args.list: with openFile(args.list, 'w') as listFile: # process entities entityNames = [] for entityFileName in os.listdir(args.entitiesDir): entityName = toEntityName(NAME_POLICY, args.common_entities_nameCheck, os.path.splitext(entityFileName)[0]) if entityName not in entityNames: entityNames.append(entityName) for entityName in entityNames: listFile.write(entityName + ";\n") logger.debug("Entities list '%s' was successfully created", args.list)
def main(argv): parser = argparse.ArgumentParser( description= 'Tests all dialog flows from given file and save received responses to output file', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument( 'inputFileName', help= 'file with test jsons to be sent to conversation service. (One at each line at key \'input\'.)' ) parser.add_argument( 'outputFileName', help= 'file where to store received data from conversation service. (One response at each line.)' ) # optional arguments parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append') parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) config = Cfg(args) workspacesUrl = getRequiredParameter(config, 'conversation_url') version = getRequiredParameter(config, 'conversation_version') username = getRequiredParameter(config, 'conversation_username') password = getRequiredParameter(config, 'conversation_password') workspaces = filterWorkspaces( config, getWorkspaces(workspacesUrl, version, username, password)) if len(workspaces) > 1: # if there is more than one workspace with the same name -> error logger.error( 'There are more than one workspace with this name, do not know which one to test.' ) exit(1) elif len(workspaces) == 1: workspaceId = workspaces[0]['workspace_id'] else: logger.error('There is no workspace with this name, cannot test it.') exit(1) # wait until workspace is done with training checkWorkspaceTime = 0 requestUrl = workspacesUrl + '/' + workspaceId + '?version=' + version while True: logger.verbose("requestUrl: %s", requestUrl) response = requests.get(requestUrl, auth=(username, password)) if response.status_code == 200: responseJson = response.json() if errorsInResponse(responseJson): sys.exit(1) logger.verbose("response: %s", responseJson) status = responseJson['status'] logger.info('WCS WORKSPACE STATUS: %s', status) if status == 'Available': break else: # sleep some time and check messages again if checkWorkspaceTime > CHECK_WORKSPACE_TIME_MAX: logger.error( 'Workspace have not become available before timeout, timeout: %d, response: %s', CHECK_MESSAGES_TIME_MAX, json.dumps(responseJson, indent=4, sort_keys=True, ensure_ascii=False).encode('utf8')) sys.exit(1) time.sleep(CHECK_WORKSPACE_TIME_DELAY) checkWorkspaceTime = checkWorkspaceTime + CHECK_WORKSPACE_TIME_DELAY elif response.status_code == 400: logger.error('WA not available.') sys.exit(1) else: logger.error('Unknown status code:%s.', response.status_code) # run tests url = workspacesUrl + '/' + workspaceId + '/message?version=' + version receivedOutputJson = [] try: with openFile(args.inputFileName, "r") as inputFile: try: with openFile(args.outputFileName, "w") as outputFile: first = True dialogId = "" # for every input line for inputLine in inputFile: loadedJson = json.loads(inputLine) inputJson = loadedJson[ 'input_message'] # input json for tests if dialogId and dialogId == loadedJson['dialog_id']: if receivedOutputJson and 'context' in receivedOutputJson and receivedOutputJson[ 'context']: inputJson['context'] = receivedOutputJson[ 'context'] # use context from last dialog turn dialogId = loadedJson['dialog_id'] logger.verbose("url: %s", url) response = requests.post( url, auth=(username, password), headers={'Content-Type': 'application/json'}, data=json.dumps(inputJson, indent=4, ensure_ascii=False).encode('utf8')) if response.status_code == 200: receivedOutputJson = response.json() if not first: outputFile.write("\n") outputFile.write( json.dumps(receivedOutputJson, ensure_ascii=False)) first = False elif response.status_code == 400: logger.error('Error while testing.') errorsInResponse(response.json()) sys.exit(1) else: logger.error('Unknown status code:%s.', response.status_code) sys.exit(1) except IOError: logger.error('Cannot open test output file %s', args.outputFileName) sys.exit(1) except IOError: logger.error('Cannot open test input file %s', args.inputFileName) sys.exit(1) logger.info('FINISHING: ' + os.path.basename(__file__))
def main(argv): parser = argparse.ArgumentParser( description= 'Replaces sentences in text tags with codes and creates resource file with translations from codes to sentences.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument('dialog', help='dialog nodes in xml format.') parser.add_argument( 'resource', help= 'file with generated translations from codes to sentences (JSON format - https://console-regional.stage1.ng.bluemix.net/docs/services/GlobalizationPipeline/bundles.html#globalizationpipeline_workingwithbundles)' ) # optional arguments parser.add_argument( '-o', '--output', required=False, help='dialog nodes in xml format with all texts replaced by codes.') parser.add_argument( '-p', '--prefix', required=False, default='TXT', help='the prefix for generated codes (alphanumeric upercase only).') parser.add_argument( '-t', '--tagsXPath', required=False, nargs='+', default=['//text[not(values)]', '//values'], help='XPath of tags whose text should be replaced by code.') parser.add_argument( '-a', '--append', required=False, help= 'append translations to the existing resource file as new ones. (Duplicate codes will be overwritten by new ones.)', action='store_true') parser.add_argument( '-j', '--join', required=False, help= 'use translations from the existing resource file and append new ones.', action='store_true') parser.add_argument('-i', '--inplace', required=False, help='replace input dialog by output.', action='store_true') parser.add_argument( '-s', '--soft', required=False, help= 'soft name policy - change intents and entities names without error.', action='store_true', default="") parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) NAME_POLICY = 'soft' if args.soft else 'hard' PREFIX = toCode(NAME_POLICY, args.prefix) # load dialog from XML # TODO might need UTF-8 dialogsXML = LET.parse(args.dialog) # find all tags with texts to replace tagsToReplace = [] for tagXPath in args.tagsXPath: tagsToReplace.extend(dialogsXML.xpath(tagXPath)) # LOAD EXISTING RESOURCE FILE (TRANSLATIONS) if args.join: with openFile(args.resource, 'r') as resourceFile: translations = json.load(resourceFile) else: translations = {} counter = 0 # REPLACE ALL TEXTS WITH CODES for tagToReplace in tagsToReplace: text = tagToReplace.text logger.verbose("%s: %s", tagToReplace.tag, tagToReplace.text) # if this tag text is not in translations dictionary (it has not a code), # create new code for it and add it to dictionary if not text in translations.values(): translations[toCode(NAME_POLICY, PREFIX + str(counter))] = text counter += 1 # replace tag text by its code code = translations.keys()[translations.values().index( text)] # returns key (code) for this value (text) tagToReplace.text = '%%' + code logger.verbose("-> encoded as %s", code) # OUTPUT NEW DIALOG if args.output is not None: with openFile(args.output, 'w') as outputFile: outputFile.write( LET.tostring(dialogsXML, pretty_print=True, encoding='utf8')) elif args.inplace: with openFile(args.dialog, 'w') as outputFile: outputFile.write( LET.tostring(dialogsXML, pretty_print=True, encoding='utf8')) else: sys.stdout.write( LET.tostring(dialogsXML, pretty_print=True, encoding='utf8')) # EXTEND RESOURCE FILE if args.append: with openFile(args.resource, 'r') as resourceFile: resourceJSON = json.load(resourceFile) resourceJSON.update( translations ) # add new translations to existing ones (Duplicate codes will be overwritten by new ones.) translations = resourceJSON # CREATE RESOURCE FILE with openFile(args.resource, 'w') as resourceFile: resourceFile.write( json.dumps(translations, indent=4, ensure_ascii=False)) logger.verbose('Texts were successfully replaced with codes.')
def __init__(self, args): logger.info('cfg.__init__') self.config = {} # Sections (names can not contain '_') commonSection = 'common' conversationSection = 'conversation' cloudfunctionsSection = 'cloudfunctions' workspaceSection = 'workspace' weatherSection = 'weatherservice' replaceSection = 'replace' versionSection = 'version' contextSection = 'context' # List of attributes of framework section to be appended rather then ovewrriden (if the same parameter is defined in more config files) frameworkAppend = [ 'xls', 'intents', 'entities', 'dialogs', 'functions', 'generated_intents', 'generated_entities', 'generated_dialog' ] if args.common_configFilePaths: for common_configFilePath in args.common_configFilePaths: # go over all the config files and collect all parameters try: logger.info("Processing config file:" + common_configFilePath) configPart = configparser.ConfigParser() with openFile(common_configFilePath) as configFile: configPart.read_file(configFile) # Collect all attributes from all sections for section in configPart.sections(): options = configPart.options(section) for option in options: optionUniqueName = self.toOptionName( section, option) # value can be list newValueList = configPart.get(section, option).split(',') if (len(newValueList) < 2) and not ( option in frameworkAppend ): # only single value not in framework append newValue = newValueList[0] else: # multiple values newValue = newValueList if hasattr(self, optionUniqueName): warning = "WARNING: '" + optionUniqueName + " already exists. " if (section == commonSection) and ( option in frameworkAppend): # appended logger.debug( warning + "Appending '[" + ' '.join(newValue) + "]' to [" + ' '.join( getattr(self, optionUniqueName)) + "]") setattr(self, optionUniqueName, newValue) else: # replace oldValue = getattr(self, optionUniqueName) logger.debug(warning + "Replacing '" + oldValue + "' by '[" + ' '.join(newValue) + "]'") setattr(self, optionUniqueName, newValue) else: setattr(self, optionUniqueName, newValue) except IOError: logger.error('Cannot load config file %s', common_configFilePath) sys.exit(1) # Set command line parameters # command line parameters are having precedence, therefore they are set the last for arg in vars(args): if hasattr(args, arg) and getattr( args, arg): # attribute is present and not empty if hasattr(self, arg): logger.warning( "Overwriting config file parameter '%s' with value '%s' from command line argumets.", arg, getattr(args, arg)) setattr(self, arg, getattr(args, arg))
def main(argv): parser = argparse.ArgumentParser( description= 'Decompose Bluemix conversation service entities in .json format to entity files in .csv format', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # positional arguments parser.add_argument('entities', help='file with entities in .json format') parser.add_argument('entitiesDir', help='directory with entities files') # optional arguments parser.add_argument( '-ne', '--common_entities_nameCheck', action='append', nargs=2, help= "regex and replacement for entity name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\\L' for lowercase" ) parser.add_argument( '-s', '--soft', required=False, help= 'soft name policy - change intents and entities names without error.', action='store_true', default="") parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) NAME_POLICY = 'soft' if args.soft else 'hard' with openFile(args.entities, 'r') as entitiesFile: entitiesJSON = json.load(entitiesFile) systemEntities = [] # process all entities for entityJSON in entitiesJSON: # process system entity if entityJSON["entity"].strip().lower().startswith("sys-"): # issue #82: make entity name check parameter-dependent #systemEntities.append(toEntityName(NAME_POLICY, entityJSON["entity"])) systemEntities.append(entityJSON["entity"]) # process normal entity else: values = [] # process all entity values for valueJSON in entityJSON["values"]: value = [] # synonyms entities if 'synonyms' in valueJSON: value.append(valueJSON["value"].strip()) # add all synonyms for synonym in valueJSON['synonyms']: # empty-string synonyms are ignored when exported from WA json if synonym.strip() != '': value.append(synonym.strip()) # for pattern entities add tilde to the value if 'patterns' in valueJSON: value.append("~" + valueJSON["value"].strip()) # add all synonyms for pattern in valueJSON["patterns"]: value.append(pattern.strip()) values.append(value) # new entity file entityFileName = os.path.join( args.entitiesDir, toEntityName(NAME_POLICY, args.common_entities_nameCheck, entityJSON["entity"])) + ".csv" with openFile(entityFileName, "w") as entityFile: for value in values: entityFile.write(';'.join(value) + "\n") # write file with system entities with openFile(os.path.join(args.entitiesDir, "system_entities.csv"), 'w') as systemEntitiesFile: systemEntitiesFile.write( "# a special list for the system entities - only one value at each line\n" ) for systemEntity in systemEntities: systemEntitiesFile.write(systemEntity + "\n") logger.verbose("Entities from file '%s' were successfully extracted\n", args.entities)
def main(argv): parser = argparse.ArgumentParser(description="Deploys a workspace in json format\ to the Watson Conversation Service. If there is no 'conversation_workspace_id' provided\ and the 'conversation_workspace_name_unique' is set to 'true', it uploads\ a workspace to the place specified by the 'conversation_workspace_name'" ,\ formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-of', '--common_outputs_directory', required=False, help='directory where the otputs are stored') parser.add_argument('-ow', '--common_outputs_workspace', required=False, help='name of the json file with workspace') parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append') parser.add_argument('-oc', '--common_output_config', help='output configuration file') parser.add_argument('-cu', '--conversation_url', required=False, help='url of the conversation service API') parser.add_argument('-cv', '--conversation_version', required=False, help='version of the conversation service API') parser.add_argument('-cn', '--conversation_username', required=False, help='username of the conversation service instance') parser.add_argument('-cp', '--conversation_password', required=False, help='password of the conversation service instance') parser.add_argument( '-cid', '--conversation_workspace_id', required=False, help= 'workspace_id of the application. If a workspace id is provided, previous workspace content is overwritten, otherwise a new workspace is created ' ) parser.add_argument('-wn', '--conversation_workspace_name', required=False, help='name of the workspace') parser.add_argument( '-wnu', '--conversation_workspace_name_unique', required=False, help= 'true if the workspace name should be unique across apecified assistant' ) parser.add_argument('-v', '--verbose', required=False, help='verbosity', action='store_true') parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values())) args = parser.parse_args(argv) if __name__ == '__main__': setLoggerConfig(args.log, args.verbose) config = Cfg(args) logger.info('STARTING: ' + os.path.basename(__file__)) # workspace info try: workspaceFilePath = os.path.join( getRequiredParameter(config, 'common_outputs_directory'), getRequiredParameter(config, 'common_outputs_workspace')) with openFile(workspaceFilePath, 'r') as workspaceFile: workspace = json.load(workspaceFile) except IOError: logger.error('Cannot load workspace file %s', workspaceFilePath) sys.exit(1) # workspace name workspaceName = getOptionalParameter(config, 'conversation_workspace_name') if workspaceName: workspace['name'] = workspaceName # workspace language workspaceLanguage = getOptionalParameter(config, 'conversation_language') if workspaceLanguage: workspace['language'] = workspaceLanguage # credentials (required) username = getRequiredParameter(config, 'conversation_username') password = getRequiredParameter(config, 'conversation_password') # url (required) workspacesUrl = getRequiredParameter(config, 'conversation_url') # version (required) version = getRequiredParameter(config, 'conversation_version') # workspace id workspaces = filterWorkspaces( config, getWorkspaces(workspacesUrl, version, username, password)) if len(workspaces) > 1: # if there is more than one workspace with the same name -> error logger.error( 'There are more than one workspace with this name, do not know which one to update.' ) exit(1) elif len(workspaces) == 1: workspaceId = workspaces[0]['workspace_id'] logger.info("Updating existing workspace.") else: workspaceId = "" logger.info("Creating new workspace.") requestUrl = workspacesUrl + '/' + workspaceId + '?version=' + version # create/update workspace response = requests.post(requestUrl, auth=(username, password), headers={'Content-Type': 'application/json'}, data=json.dumps(workspace, indent=4)) responseJson = response.json() logger.verbose("response: %s", responseJson) if not errorsInResponse(responseJson): logger.info('Workspace successfully uploaded.') else: logger.error('Cannot upload workspace.') sys.exit(1) if not getOptionalParameter(config, 'conversation_workspace_id'): setattr(config, 'conversation_workspace_id', responseJson['workspace_id']) logger.info('WCS WORKSPACE_ID: %s', responseJson['workspace_id']) outputConfigFile = getOptionalParameter(config, 'common_output_config') if outputConfigFile: config.saveConfiguration(outputConfigFile) clientName = getOptionalParameter(config, 'context_client_name') if clientName: # Assembling uri of the client clientv2URL = 'https://clientv2-latest.mybluemix.net/#defaultMinMode=true' clientv2URL += '&prefered_workspace_id=' + getattr( config, 'conversation_workspace_id') clientv2URL += '&prefered_workspace_name=' + getattr( config, 'conversation_workspace_name') clientv2URL += '&shared_examples_service=&url=http://zito.mybluemix.net' clientv2URL += '&username='******'conversation_username') clientv2URL += '&custom_ui.title=' + getattr( config, 'conversation_workspace_name') clientv2URL += '&password='******'conversation_password') clientv2URL += '&custom_ui.machine_img=' clientv2URL += '&custom_ui.user_img=' clientv2URL += '&context.user_name=' + getattr(config, 'context_client_name') clientv2URL += '&context.link_build_date=' + unicode( datetime.datetime.now().strftime("%y-%m-%d-%H-%M")) clientv2URL += '&prefered_tts=none' clientv2URL += '&bluemix_tts.username=xx' clientv2URL += '&bluemix_tts.password=xx' clientv2URL += '&compact_mode=true' clientv2URL += '&compact_switch_enabled=true' clientv2URL += 'developer_switch_enabled=false' logger.info('clientv2URL=%s', clientv2URL) # create file with automatic redirect clientFileName = getOptionalParameter(config, 'common_outputs_client') if clientFileName: clientFilePath = os.path.join( getRequiredParameter(config, 'common_outputs_directory'), clientFileName) try: with openFile(clientFilePath, "w") as clientFile: clientFile.write( '<meta http-equiv="refresh" content=\"0; url=' + clientv2URL + '\" />') clientFile.write('<p><a href=\"' + clientv2URL + '\">Redirect</a></p>') clientFile.close() except IOError: logger.error('Cannot write to %s', clientFilePath) sys.exit(1) logger.info('FINISHING: ' + os.path.basename(__file__))