def test_missingParametersCombinationString(self):
     ''' Tests if one combination (one parameter provided as string) is missing in config '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     config = Cfg(args)
     self.t_exitCodeAndLogMessage(
         1, 'CRITICAL Combination 0: \'parameterName\'',
         [config, 'parameterName'])
 def test_noParametersCombination(self):
     ''' Tests if no parameters combination is provided '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     config = Cfg(args)
     self.t_exitCodeAndLogMessage(
         1, 'CRITICAL no parameters combination provided to be retrieved',
         [config])
 def test_missingParametersCombinationMix(self):
     ''' Tests if two combinations (one parameter provided as string and two parameters provided as list) are missing in config '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     config = Cfg(args)
     self.t_exitCodeAndLogMessage(
         1, 'CRITICAL Combination 1: \'parameterNameC\'',
         [config, ['parameterNameA', 'parameterNameB'], 'parameterNameC'])
 def test_parametersCombinationString(self):
     ''' Tests if one combination (one parameter provided as string) is returned from config '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     args.parameterName = 'parameterNameValue'
     config = Cfg(args)
     rc = wawCommons.getParametersCombination(config, 'parameterName')
     assert rc == {'parameterName': 'parameterNameValue'}
 def test_parametersCombinationMixC(self):
     ''' Tests if one combination (one parameter provided as string and two parameters provided as list) is returned from config, variant C '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     args.parameterNameC = 'parameterNameCValue'
     config = Cfg(args)
     rc = wawCommons.getParametersCombination(
         config, 'parameterNameC', ['parameterNameA', 'parameterNameB'])
     assert rc == {'parameterNameC': 'parameterNameCValue'}
 def test_badParametersCombinationType(self):
     ''' Tests if type of parameters combination is provided '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     config = Cfg(args)
     self.t_exitCodeAndLogMessage(
         1,
         'CRITICAL arguments could be only parameter names or array of parameters names, arg type \'dict\'',
         [config, {}])
 def test_missingParametersCombinationList(self):
     ''' Tests if one combination (two parameters provided as list) is missing in config '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     config = Cfg(args)
     self.t_exitCodeAndLogMessage(
         1,
         'CRITICAL Combination 0: \'[\'parameterNameA\', \'parameterNameB\']\'',
         [config, ['parameterNameA', 'parameterNameB']])
 def test_missingParametersCombinationListPart(self):
     ''' Tests if one combination (two parameters provided as list) is missing in config but one parameter is set '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     args.parameterNameA = 'parameterNameAValue'
     config = Cfg(args)
     self.t_exitCodeAndLogMessage(
         1, 'missing parameters: \'[\'parameterNameB\']\'',
         [config, ['parameterNameA', 'parameterNameB']])
def main(argv):
    parser = argparse.ArgumentParser(description='This script takes a workspace JSON as one parameter and another JSON (i.e., piece of context data structure) and put the second one into desired place in the first one. This happens inplace.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    # arguments
    parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append')
    parser.add_argument('-w','--common_outputs_workspace', required=False, help='filename of the original workspace JSON')
    parser.add_argument('-d','--common_outputs_directory', required=False, help='directory, where the workspace is located')
    parser.add_argument('-j','--includejsondata_jsonfile', required=False, help='file with JSON you want to include')
    parser.add_argument('-t','--includejsondata_targetkey', required=False, help='the key, where you want to add your JSON, i.e., "data_structure" : null; where you want to replace null, you would put "data_strucute" as this parameter')
    # optional arguments
    parser.add_argument('-v','--verbose', required=False, help='verbosity', action='store_true')
    parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values()))
    #init the parameters
    args = parser.parse_args(argv)
    
    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    logger.info('STARTING: ' + os.path.basename(__file__))

    # get required parameters
    # workspace
    with codecs.open(os.path.join(getRequiredParameter(config, 'common_outputs_directory'), getRequiredParameter(config, 'common_outputs_workspace')), 'r', encoding='utf8') as inputpath:
        try:
            workspaceInput = json.load(inputpath)
        except:
            logger.error('Workspace JSON is not valid JSON: %s', os.path.join(getRequiredParameter(config, 'common_outputs_directory'), getRequiredParameter(config, 'common_outputs_workspace')))
            exit(1)
    # json to add
    with codecs.open(os.path.join(getRequiredParameter(config, 'includejsondata_jsonfile')), 'r', encoding='utf8') as jsonincludepath:
        try:
            jsonInclude = json.load(jsonincludepath)
        except:
            logger.error('JSON to include is not valid JSON: %s', os.path.join(getRequiredParameter(config, 'includejsondata_jsonfile')))
            exit(1)
    # target element
    targetKey = getRequiredParameter(config, 'includejsondata_targetkey')

    # find the target key and add the json
    replacedValuesNumber = 0
    if 'dialog_nodes' in workspaceInput:
        workspaceInput['dialog_nodes'], replacedValuesNumber = replaceValue(workspaceInput['dialog_nodes'], targetKey, jsonInclude)
    else:
        logger.warning('Workspace does not contain \'dialog_nodes\'')

    # writing the file
    with codecs.open(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_workspace')), 'w', encoding='utf8')  as outfile:
        json.dump(workspaceInput, outfile, indent=4)

    if replacedValuesNumber == 0:
        logger.warning('Target key not found.')
    else:
        logger.info('Writing workspaces with added JSON successfull.')

    logger.info('FINISHING: ' + os.path.basename(__file__))
 def test_moreParametersCombinationProvidedAB(self):
     ''' Tests if two combinations (one parameter provided as string and two parameters provided as list) are provided in config, variant AB '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     args.parameterNameA = 'parameterNameAValue'
     args.parameterNameB = 'parameterNameBValue'
     args.parameterNameC = 'parameterNameCValue'
     config = Cfg(args)
     self.t_exitCodeAndLogMessage(
         1,
         'CRITICAL only one combination of parameters can be set, combination already set: \'[\'parameterNameA\', \'parameterNameB\']\', another argument set: \'parameterNameC\'',
         [config, ['parameterNameA', 'parameterNameB'], 'parameterNameC'])
 def test_parametersCombinationList(self):
     ''' Tests if one combination (two parameters provided as list) is returned from config '''
     args = argparse.Namespace()
     args.common_configFilePaths = None
     args.parameterNameA = 'parameterNameAValue'
     args.parameterNameB = 'parameterNameBValue'
     config = Cfg(args)
     rc = wawCommons.getParametersCombination(
         config, ['parameterNameA', 'parameterNameB'])
     assert rc == {
         'parameterNameA': 'parameterNameAValue',
         'parameterNameB': 'parameterNameBValue',
     }
def main(argv):
    parser = argparse.ArgumentParser(description='Concatenate intents, entities and dialogue jsons to Watson Conversation Service workspace .json format', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append')
    parser.add_argument('-oc', '--common_output_config', help='output configuration file')
    parser.add_argument('-of', '--common_outputs_directory', required=False, help='directory where the otputs will be stored (outputs is default)')
    parser.add_argument('-oi', '--common_outputs_intents', required=False, help='json file with intents')
    parser.add_argument('-oe', '--common_outputs_entities', required=False, help='json file with entities')
    parser.add_argument('-od', '--common_outputs_dialogs', required=False, help='json file with dialogs')
    parser.add_argument('-ox', '--common_outputs_counterexamples', required=False, help='json file with counterexamples')
    parser.add_argument('-ow', '--common_outputs_workspace', required=False, help='json file with workspace')
    parser.add_argument('-wn','--conversation_workspace_name', required=False, help='name of this workspace')
    parser.add_argument('-wl','--conversation_language', required=False, help='language of generated workspace')
    parser.add_argument('-wd','--conversation_description', required=False, help='description')
    parser.add_argument('-v','--verbose', required=False, help='verbosity', action='store_true')
    parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)
    logger.info('STARTING: ' + os.path.basename(__file__))

    workspace = {}
    if hasattr(config, 'conversation_workspace_name'):
        workspace['name'] = getattr(config, 'conversation_workspace_name')
    else:
        workspace['name'] = 'default_workspace_name'
    if hasattr(config, 'conversation_language'):
        workspace['language'] = getattr(config, 'conversation_language')
    else:
        workspace['language'] = 'en'
    if hasattr(config, 'conversation_description'):
        workspace['description'] = getattr(config, 'conversation_description')
    else:
        workspace['description'] = ''

    if not hasattr(config, 'common_outputs_directory'):
        logger.info('outputs_directory is not defined!')
        exit(1)

    # process intents
    intentsJSON = {}
    if hasattr(config, 'common_outputs_intents'):
        with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_intents')), 'r', encoding='utf8') as intentsFile:
            intentsJSON = json.load(intentsFile)
        workspace['intents'] = intentsJSON
    else:
        logger.info('output_intents not specified, omitting intents.')

    # process entities
    entitiesJSON = {}
    if hasattr(config, 'common_outputs_entities'):
        with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_entities')), 'r', encoding='utf8') as entitiesFile:
            entitiesJSON = json.load(entitiesFile)
        workspace['entities'] = entitiesJSON
    else:
        logger.info('output_entities not specified, omitting entities.')

    # process dialog
    dialogJSON = {}
    if hasattr(config, 'common_outputs_dialogs'):
        with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_dialogs')), 'r', encoding='utf8') as dialogFile:
            dialogJSON = json.load(dialogFile)
            workspace['dialog_nodes'] = dialogJSON
    else:
        logger.info('outputs_dialogs not specified, omitting dialog.')

    # process counterexamples
    intentExamplesJSON = {} # counterexamples in "intent format"
    counterexamplesJSON = [] # simple list of counterexamples ("text": "example sentence")
    if hasattr(config, 'common_outputs_counterexamples'):
        with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_counterexamples')), 'r', encoding='utf8') as counterexamplesFile:
            intentExamplesJSON = json.load(counterexamplesFile)
            for intentExampleJSON in intentExamplesJSON:
                counterexamplesJSON.extend(intentExampleJSON['examples'])
            workspace['counterexamples'] = counterexamplesJSON
    else:
        logger.info('outputs_counterexamples not specified, omitting counterexamples.')

    if hasattr(config, 'common_outputs_workspace'):
        with openFile(os.path.join(getattr(config, 'common_outputs_directory'), getattr(config, 'common_outputs_workspace')), 'w', encoding='utf8') as outputFile:
            outputFile.write(json.dumps(workspace, indent=4, ensure_ascii=False))
    else:
        logger.info('output_workspace not specified, generating to console.')

    logger.info('FINISHING: ' + os.path.basename(__file__))
Esempio n. 13
0
def main(argv):
    parser = argparse.ArgumentParser(
        description='Clean generated directories.',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help='configuaration file',
                        action='append')
    parser.add_argument('-oc',
                        '--common_output_config',
                        help='output configuration file')
    parser.add_argument(
        '-od',
        '--common_outputs_directory',
        required=False,
        help='directory where the otputs will be stored (outputs is default)')
    parser.add_argument('-oi',
                        '--common_outputs_intents',
                        help='file with output json with all the intents')
    parser.add_argument('-oe',
                        '--common_outputs_entities',
                        help='file with output json with all the entities')
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument(
        '-s',
        '--common_soft',
        required=False,
        help=
        'soft name policy - change intents and entities names without error.',
        action='store_true',
        default="")
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    logger.info('STARTING: ' + os.path.basename(__file__))

    if os.path.exists(config.common_generated_dialogs[0]):
        shutil.rmtree(config.common_generated_dialogs[0])
        logger.verbose('%s does not exist.',
                       config.common_generated_dialogs[0])
    else:
        logger.verbose('%s does not exist.',
                       config.common_generated_dialogs[0])

    if os.path.exists(config.common_generated_intents[0]):
        shutil.rmtree(config.common_generated_intents[0])
        logger.verbose('%s does not exist.',
                       config.common_generated_intents[0])
    else:
        logger.verbose('%s doess not exist.',
                       config.common_generated_intents[0])
    if os.path.exists(config.common_generated_entities[0]):
        shutil.rmtree(config.common_generated_entities[0])
        logger.verbose('%s does not exist.',
                       config.common_generated_entities[0])
    else:
        logger.verbose('Does not exist.')
    if os.path.exists(config.common_outputs_directory):
        shutil.rmtree(config.common_outputs_directory)
        logger.verbose('%s has been removed.', config.common_outputs_directory)
    else:
        logger.verbose('%s does not exist.', config.common_outputs_directory)

    logger.info('FINISHING: ' + os.path.basename(__file__))
    def test_deleteById(self, envVarNameUsername, envVarNamePassword):
        """Tests if workspace can be deleted by its id."""

        # use outputPath instead of dataBasePath when workspace_deploy script will be able to take workspace
        # and config file from different directories (workspace should be taken from
        # dataBasePath and config should be saved to outputs directory)
        createOutputConfigFilename = 'createWorkspaceOutput.cfg'
        createOutputConfigPath = os.path.abspath(
            os.path.join(self.dataBasePath, createOutputConfigFilename))
        deleteOutputConfigFilename = 'deleteWorkspaceOutput.cfg'
        deleteOutputConfigPath = os.path.abspath(
            os.path.join(self.dataBasePath, deleteOutputConfigFilename))

        workspaceName = 'deleteById_workspace'

        # deploy test workspace
        deployParams = list(self.deployParamsBase)
        deployParams.extend([
            '--common_output_config', createOutputConfigPath,
            '--conversation_workspace_name', workspaceName
        ])
        workspace_deploy.main(deployParams)
        # deploy one more workspace
        deployParamsMore = list(self.deployParamsBase)
        deployParamsMore.extend(
            ['--conversation_workspace_name', workspaceName])
        workspace_deploy.main(deployParamsMore)

        # try to delete workspace by its id (id is obtained from output config of deploy script)
        deleteParams = list(self.deleteParamsBase)
        deleteParams.extend([
            '-c', createOutputConfigPath, '--common_output_config',
            deleteOutputConfigPath
        ])
        self.t_noExceptionAndLogMessage(
            "One workspace has been successfully deleted", [deleteParams])

        # parse output config of deploy script (contains workspace id to delete)
        parser = argparse.ArgumentParser()
        parser.add_argument('-c',
                            '--common_configFilePaths',
                            help='configuaration file',
                            action='append')
        args = parser.parse_args(
            ['--common_configFilePaths', createOutputConfigPath])
        createOutputConfig = Cfg(args)

        workspaces = getWorkspaces(self.workspacesUrl, self.version,
                                   self.username, self.password)

        # in workspaces on server there should be no workspace with id from config file
        workspacesFound = 0
        for workspace in workspaces:
            if workspace['workspace_id'] == getRequiredParameter(
                    createOutputConfig, 'conversation_workspace_id'):
                workspacesFound += 1

        assert workspacesFound == 0

        # there should be still one workspace left (even with the same name)
        assert len(workspaces) == 1

        # check if workspace_id is not present in the output config of delete script
        parser = argparse.ArgumentParser()
        parser.add_argument('-c',
                            '--common_configFilePaths',
                            help='configuaration file',
                            action='append')
        args = parser.parse_args(
            ['--common_configFilePaths', deleteOutputConfigPath])
        deleteOutputConfig = Cfg(args)

        assert hasattr(deleteOutputConfig,
                       'conversation_workspace_id') == False
def main(argv):
    parser = argparse.ArgumentParser(
        description='Creates dialog nodes with answers to intents .',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    # optional arguments
    parser.add_argument('-x',
                        '--common_xls',
                        required=False,
                        help='file with MSExcel formated dialog',
                        action='append')
    parser.add_argument('-gd',
                        '--common_generated_dialogs',
                        nargs='?',
                        help='generated dialog file')
    parser.add_argument('-gi',
                        '--common_generated_intents',
                        nargs='?',
                        help='directory for generated intents')
    parser.add_argument('-ge',
                        '--common_generated_entities',
                        nargs='?',
                        help='directory for generated entities')
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help='configuaration file',
                        action='append')
    parser.add_argument('-oc',
                        '--common_output_config',
                        help='output configuration file')
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    logger.info('STARTING: ' + os.path.basename(__file__))

    if hasattr(config, 'verbose') and getattr(config, 'verbose'):
        name_policy = 'soft_verbose'
    if not hasattr(config, 'common_xls'):
        logger.error('xls is not defined')
        exit(1)
    if not hasattr(config, 'common_generated_dialogs'):
        logger.verbose('generated_dialogs parameter is not defined')
    if not hasattr(config, 'common_generated_intents'):
        logger.verbose('generated_intents parameter is not defined')
    if not hasattr(config, 'common_generated_entities'):
        logger.verbose('generated_entities parameter is not defined')

    xlsxHandler = XLSXHandler(config)
    allDataBlocks = {
    }  # map of datablocks, key: Excel sheet name, value: list of all block in the sheet

    logger.info(getattr(config, 'common_xls'))
    for fileOrFolder in getattr(config, 'common_xls'):
        logger.verbose('Searching in path: %s', fileOrFolder)
        if os.path.isdir(fileOrFolder):
            xlsDirList = os.listdir(fileOrFolder)
            for xlsFile in xlsDirList:
                if os.path.isfile(os.path.join(fileOrFolder, xlsFile)) and xlsFile.endswith('.xlsx') and \
                        not(xlsFile.startswith('~')) and not(xlsFile.startswith('.')):
                    xlsxHandler.parseXLSXIntoDataBlocks(fileOrFolder + "/" +
                                                        xlsFile)
                else:
                    logger.warning(
                        'The file %s skipped due to failing file selection policy check. '
                        'It should be .xlsx file not starting with ~ or .(dot).',
                        os.path.join(fileOrFolder, xlsFile))

        elif os.path.exists(fileOrFolder):
            xlsxHandler.parseXLSXIntoDataBlocks(fileOrFolder)

    xlsxHandler.convertBlocksToDialogData()  # Blocks-> DialogData
    xlsxHandler.updateReferences()  # Resolving cross references
    saveDialogDataToFileSystem(xlsxHandler.getDialogData(), XMLHandler(),
                               config)

    logger.info('FINISHING: ' + os.path.basename(__file__))
Esempio n. 16
0
def main(argv):
    parser = argparse.ArgumentParser(
        description=
        'Tests all dialog flows from given file and save received responses to output file',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    # positional arguments
    parser.add_argument(
        'inputFileName',
        help=
        'file with test jsons to be sent to conversation service. (One at each line at key \'input\'.)'
    )
    parser.add_argument(
        'outputFileName',
        help=
        'file where to store received data from conversation service. (One response at each line.)'
    )
    # optional arguments
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help='configuaration file',
                        action='append')
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    workspacesUrl = getRequiredParameter(config, 'conversation_url')
    version = getRequiredParameter(config, 'conversation_version')
    username = getRequiredParameter(config, 'conversation_username')
    password = getRequiredParameter(config, 'conversation_password')
    workspaces = filterWorkspaces(
        config, getWorkspaces(workspacesUrl, version, username, password))
    if len(workspaces) > 1:
        # if there is more than one workspace with the same name -> error
        logger.error(
            'There are more than one workspace with this name, do not know which one to test.'
        )
        exit(1)
    elif len(workspaces) == 1:
        workspaceId = workspaces[0]['workspace_id']
    else:
        logger.error('There is no workspace with this name, cannot test it.')
        exit(1)

    # wait until workspace is done with training
    checkWorkspaceTime = 0
    requestUrl = workspacesUrl + '/' + workspaceId + '?version=' + version
    while True:
        logger.verbose("requestUrl: %s", requestUrl)
        response = requests.get(requestUrl, auth=(username, password))
        if response.status_code == 200:
            responseJson = response.json()
            if errorsInResponse(responseJson):
                sys.exit(1)
            logger.verbose("response: %s", responseJson)
            status = responseJson['status']
            logger.info('WCS WORKSPACE STATUS: %s', status)
            if status == 'Available':
                break
            else:
                # sleep some time and check messages again
                if checkWorkspaceTime > CHECK_WORKSPACE_TIME_MAX:
                    logger.error(
                        'Workspace have not become available before timeout, timeout: %d, response: %s',
                        CHECK_MESSAGES_TIME_MAX,
                        json.dumps(responseJson,
                                   indent=4,
                                   sort_keys=True,
                                   ensure_ascii=False).encode('utf8'))
                    sys.exit(1)
                time.sleep(CHECK_WORKSPACE_TIME_DELAY)
                checkWorkspaceTime = checkWorkspaceTime + CHECK_WORKSPACE_TIME_DELAY
        elif response.status_code == 400:
            logger.error('WA not available.')
            sys.exit(1)
        else:
            logger.error('Unknown status code:%s.', response.status_code)

    # run tests
    url = workspacesUrl + '/' + workspaceId + '/message?version=' + version
    receivedOutputJson = []
    try:
        with openFile(args.inputFileName, "r") as inputFile:
            try:
                with openFile(args.outputFileName, "w") as outputFile:
                    first = True
                    dialogId = ""
                    # for every input line
                    for inputLine in inputFile:
                        loadedJson = json.loads(inputLine)
                        inputJson = loadedJson[
                            'input_message']  # input json for tests
                        if dialogId and dialogId == loadedJson['dialog_id']:
                            if receivedOutputJson and 'context' in receivedOutputJson and receivedOutputJson[
                                    'context']:
                                inputJson['context'] = receivedOutputJson[
                                    'context']  # use context from last dialog turn
                        dialogId = loadedJson['dialog_id']
                        logger.verbose("url: %s", url)
                        response = requests.post(
                            url,
                            auth=(username, password),
                            headers={'Content-Type': 'application/json'},
                            data=json.dumps(inputJson,
                                            indent=4,
                                            ensure_ascii=False).encode('utf8'))
                        if response.status_code == 200:
                            receivedOutputJson = response.json()
                            if not first:
                                outputFile.write("\n")
                            outputFile.write(
                                json.dumps(receivedOutputJson,
                                           ensure_ascii=False))
                            first = False
                        elif response.status_code == 400:
                            logger.error('Error while testing.')
                            errorsInResponse(response.json())
                            sys.exit(1)
                        else:
                            logger.error('Unknown status code:%s.',
                                         response.status_code)
                            sys.exit(1)
            except IOError:
                logger.error('Cannot open test output file %s',
                             args.outputFileName)
                sys.exit(1)
    except IOError:
        logger.error('Cannot open test input file %s', args.inputFileName)
        sys.exit(1)

    logger.info('FINISHING: ' + os.path.basename(__file__))
def main(argv):
    '''
    Scripts takes input json file that represents test that should be run against
    Cloud Functions and produce output that extends input json file by results
    from CFs and evaluation.

    Inputs and expected outputs can contain string values that starts with '::'
    (e.g. "key": "::valueToBeReplaced1") which will be replaced by matching 
    configuration parameters or by values specified by parameter 'replace'
    (format \'valueToBeReplaced1:replacement1,valueToBeReplaced2:replacement2\')).

    Input json file example:
    [
        {
            "name": "test example 1", # OPTIONAL
            "type": "EXACT_MATCH", # OPTIONAL (DEFAULT = EXACT_MATCH, OPTIONS = [EXACT_MATCH])
            "cf_package": "<CLOUD FUNCTIONS PACKAGE NAME>", # OPTIONAL (could be provided directly to script, at least one has to be specified, test level overrides global script one)
            "cf_function": "<CLOUD FUNCTIONS SPECIFIC FUNCTION TO BE TESTED>", # --||--
            "input": <OBJECT> | <@PATH/TO/FILE>, # payload to be send to CF (could be specified as a relative or absolute path to the file that contains json file, e.g. "input": "@inputs/test_example_1.json")
            "outputExpected": <OBJECT> | <@PATH/TO/FILE>, # expected payload to be return from CF (--||--)
        },
        {
            "name": "test example 2",
            ...
              rest of the test definition
            ...
        }
    ]

    Output json file example:
    [
        {
            "name": "test example 1",
            ...
              rest of the input test definition
            ...
            "outputReturned": <OBJECT>, # returned payload from CF
            "result": <0 - test passed, 1 - test failed>
            "diff": <OBJECT> # if test passed then "diff" is Null, else contains object that represents differences
        }
    ]
    '''
    parser = argparse.ArgumentParser(description='Tests all tests specified in given file against Cloud Functions and save test outputs to output file', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    # positional arguments
    parser.add_argument('inputFileName', help='File with json array containing tests.')
    parser.add_argument('outputFileName', help='File where to store test outputs.')
    # optional arguments
    parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append')
    parser.add_argument('--cloudfunctions_url', required=False, help='url of cloud functions API')
    parser.add_argument('--cloudfunctions_namespace', required=False, help='cloud functions namespace')
    parser.add_argument('--cloudfunctions_package', required=False, help='cloud functions package name')
    parser.add_argument('--cloudfunctions_function', required=False, help='cloud functions specific function to be tested')
    parser.add_argument('--cloudfunctions_apikey', required=False, help="cloud functions apikey")
    parser.add_argument('--cloudfunctions_username', required=False, help='cloud functions user name')
    parser.add_argument('--cloudfunctions_password', required=False, help='cloud functions password')
    parser.add_argument('-v','--verbose', required=False, help='verbosity', action='store_true')
    parser.add_argument('--log', type=str.upper, default=None, choices=list(logging._levelToName.values()))
    parser.add_argument('--replace', required=False, help='string values to be replaced in input and expected output json (format \'valueToBeReplaced1:replacement1,valueToBeReplaced2:replacement2\')')
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    logger.info('STARTING: '+ os.path.basename(__file__))

    url = getRequiredParameter(config, 'cloudfunctions_url')
    namespace = getRequiredParameter(config, 'cloudfunctions_namespace')
    auth = getParametersCombination(config, 'cloudfunctions_apikey', ['cloudfunctions_password', 'cloudfunctions_username'])
    package = getOptionalParameter(config, 'cloudfunctions_package')
    function = getOptionalParameter(config, 'cloudfunctions_function')

    if 'cloudfunctions_apikey' in auth:
        username, password = convertApikeyToUsernameAndPassword(auth['cloudfunctions_apikey'])
    else:
        username = auth['cloudfunctions_username']
        password = auth['cloudfunctions_password']

    try:
        inputFile = open(args.inputFileName, 'r')
    except IOError:
        logger.critical('Cannot open test input file %s', args.inputFileName)
        sys.exit(1)

    try:
        outputFile = open(args.outputFileName, 'w')
    except IOError:
        logger.critical('Cannot open test output file %s', args.outputFileName)
        sys.exit(1)

    try:
        inputJson = json.load(inputFile)
    except ValueError as e:
        logger.critical('Cannot decode json from test input file %s, error: %s', args.inputFileName, str(e))
        sys.exit(1)

    if not isinstance(inputJson, list):
        logger.critical('Input test json is not array!')
        sys.exit(1)

    replaceDict = {}
    for attr in dir(config):
        if not attr.startswith("__"):
            if attr == 'replace':
                # format \'valueToBeReplaced1:replacement1,valueToBeReplaced2:replacement2\'
                replacementsString = getattr(config, attr)
                for replacementString in replacementsString.split(','):
                    replacementStringSplit = replacementString.split(':')
                    if len(replacementStringSplit) != 2 or not replacementStringSplit[0] or not replacementStringSplit[1]:
                        logger.critical('Invalid format of \'replace\' parameter, valid format is \'valueToBeReplaced1:replacement1,valueToBeReplaced2:replacement2\'')
                        sys.exit(1)
                    replaceDict[replacementStringSplit[0]] = replacementStringSplit[1]
            else:
                replaceDict[attr] = getattr(config, attr)

    # run tests
    testCounter = 0
    for test in inputJson:
        if not isinstance(test, dict):
            logger.error('Input test array element %d is not dictionary. Each test has to be dictionary, please see doc!', testCounter)
            continue
        logger.info('Test number: %d, name: %s', testCounter, (test['name'] if 'name' in test else '-'))
        testUrl = \
            url + ('' if url.endswith('/') else '/') + \
            namespace + '/actions/' + (test['cf_package'] if 'cf_package' in test else package) + '/' + \
            (test['cf_function'] if 'cf_function' in test else function) + \
            '?blocking=true&result=true'
        logger.info('Tested function url: %s', testUrl)

        # load test input payload json
        testInputJson = test['input']
        testInputPath = None
        try:
            if testInputJson.startswith('@'): 
                testInputPath = os.path.join(os.path.dirname(args.inputFileName), testInputJson[1:])
                logger.debug('Loading input payload from file: %s', testInputPath)
                try:
                    inputFile = open(testInputPath, 'r')
                except IOError:
                    logger.error('Cannot open input payload from file: %s', testInputPath)
                    continue
                try:
                    testInputJson = json.load(inputFile)
                except ValueError as e:
                    logger.error('Cannot decode json from input payload from file %s, error: %s', testInputPath, str(e))
                    continue
        except:
            pass

        if not testInputPath:
            logger.debug('Input payload provided inside the test')

        # load test expected output payload json
        testOutputExpectedJson = test['outputExpected']
        testOutputExpectedPath = None
        try:
            if testOutputExpectedJson.startswith('@'):
                testOutputExpectedPath = os.path.join(os.path.dirname(args.inputFileName), testOutputExpectedJson[1:])
                logger.debug('Loading expected output payload from file: %s', testOutputExpectedPath)
                try:
                    outputExpectedFile = open(testOutputExpectedPath, 'r')
                except IOError:
                    logger.error('Cannot open expected output payload from file: %s', testOutputExpectedPath)
                    continue
                try:
                    testOutputExpectedJson = json.load(outputExpectedFile)
                except ValueError as e:
                    logger.error('Cannot decode json from expected output payload from file %s, error: %s', testOutputExpectedPath, str(e))
                    continue
        except:
            pass

        if not testOutputExpectedPath:
            logger.debug('Expected output payload provided inside the test')

        logger.debug('Replacing values in input and expected output jsons by configuration parameters.')

        for target, value in replaceDict.items():
            testInputJson, replacementNumber = replaceValue(testInputJson, '::' + target, value, False)
            if replacementNumber > 0:
                logger.debug('Replaced configuration parameter \'%s\' in input json, number of occurences: %d.', target, replacementNumber)
            testOutputExpectedJson, replacementNumber = replaceValue(testOutputExpectedJson, '::' + target, value, False)
            if replacementNumber > 0:
                logger.debug('Replaced configuration parameter \'%s\' in expected output json, number of occurences: %d.', target, replacementNumber)

        # call CF
        logger.debug('Sending input json: %s', json.dumps(testInputJson, ensure_ascii=False).encode('utf8'))
        response = requests.post(
            testUrl, 
            auth=(username, password), 
            headers={'Content-Type': 'application/json'}, 
            data=json.dumps(testInputJson, ensure_ascii=False).encode('utf8'))

        responseContentType = response.headers.get('content-type')
        if responseContentType != 'application/json':
            logger.error('Response content type is not json, content type: %s, response:\n%s', responseContentType, response.text)
            continue

        # check status
        if response.status_code == 200:
            testOutputReturnedJson = response.json()
            logger.debug('Received output json: %s', json.dumps(testOutputReturnedJson, ensure_ascii=False).encode('utf8'))
            test['outputReturned'] = testOutputReturnedJson

            # evaluate test
            if 'type' not in test or test['type'] == 'EXACT_MATCH':
                testResultString = DeepDiff(testOutputExpectedJson, testOutputReturnedJson, ignore_order=True).json
                testResultJson = json.loads(testResultString)
                if testResultJson == {}:
                    test['result'] = 0
                else:
                    test['result'] = 1
                    test['diff'] = testResultJson
            else:
                logger.error('Unknown test type: %s', test['type'])
        elif response.status_code in [202, 403, 404, 408]:
            # 202 Accepted activation request (should not happen while sending 'blocking=true&result=true')
            # 403 Forbidden (could be just for specific package or function)
            # 404 Not Found (action or package could be incorrectly specified for given test)
            # 408 Request Timeout (could happen e.g. for CF that calls some REST APIs, e.g. Discovery service)
            # 502 Bad Gateway (when the CF raises exception, e.g. bad params where provided)
            # => Could be issue just for given test, so we don't want to stop whole testing.
            logger.error('Unexpected response status: %d, response: %s', response.status_code, json.dumps(response.json(), ensure_ascii=False).encode('utf8'))
        else:
            # 401 Unauthorized (while we use same credentials for all tests then we want to end after the first test returns bad authentification)
            # 500 Internal Server Error (could happen that IBM Cloud has several issue and is not able to handle incoming requests, then it would be probably same for all tests)
            # => We don't want to continue with testing.
            logger.critical('Unexpected response status (cannot continue with testing): %d, response: %s', response.status_code, json.dumps(response.json(), ensure_ascii=False).encode('utf8'))
            sys.exit(1)

        testCounter += 1

    outputFile.write(json.dumps(inputJson, indent=4, ensure_ascii=False) + '\n')
    logger.info('FINISHING: '+ os.path.basename(__file__))
def main(argv):
    parser = argparse.ArgumentParser(
        description=
        'Converts intent csv files to .json format of Watson Conversation Service',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help='configuaration file',
                        action='append')
    parser.add_argument('-oc',
                        '--common_output_config',
                        help='output configuration file')
    parser.add_argument(
        '-ii',
        '--common_intents',
        help=
        'directory with intent csv files to be processed (all of them will be included in output json)',
        action='append')  #-gi is functionsally equivalent to -ii
    parser.add_argument(
        '-gi',
        '--common_generated_intents',
        help=
        'directory with generated intent csv files to be processed (all of them will be included in output json)',
        action='append')
    parser.add_argument(
        '-od',
        '--common_outputs_directory',
        required=False,
        help='directory where the otputs will be stored (outputs is default)')
    parser.add_argument('-oi',
                        '--common_outputs_intents',
                        help='file with output json with all the intents')
    parser.add_argument(
        '-ni',
        '--common_intents_nameCheck',
        action='append',
        nargs=2,
        help=
        "regex and replacement for intent name check, e.g. '-' '_' for to replace hyphens for underscores or '$special' '\\L' for lowercase"
    )
    parser.add_argument(
        '-s',
        '--soft',
        required=False,
        help=
        'soft name policy - change intents and entities names without error.',
        action='store_true',
        default="")
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    NAME_POLICY = 'soft' if args.soft else 'hard'
    logger.info('STARTING: ' + os.path.basename(__file__))

    if not hasattr(config, 'common_intents'):
        logger.info('intents parameter is not defined.')
    if not hasattr(config, 'common_generated_intents'):
        logger.info('generated_intents parameter is not defined, ignoring')
    if not hasattr(config, 'common_outputs_intents'):
        logger.info(
            'Outputs_intents parameter is not defined, output will be generated to console.'
        )

    intents = []

    pathList = getattr(config, 'common_intents')
    if hasattr(config, 'common_generated_intents'):
        pathList = pathList + getattr(config, 'common_generated_intents')

    filesAtPath = getFilesAtPath(pathList)
    for intentFileName in sorted(filesAtPath):
        intentName = toIntentName(
            NAME_POLICY, args.common_intents_nameCheck,
            os.path.splitext(os.path.basename(intentFileName))[0])
        with openFile(intentFileName, 'r', encoding='utf8') as intentFile:
            intent = {}
            intent['intent'] = intentName
            examples = []
            for line in intentFile:
                # remove comments
                line = line.split('#')[0]
                line = line.rstrip().lower()
                #non-ascii characters fix
                #line = line.encode('utf-8')
                if line:
                    example = processExample(line, intentName, examples)
                    #adding to the list
                    if example:
                        examples.append(example)

            intent['examples'] = examples
            intents.append(intent)

    if hasattr(config, 'common_outputs_directory') and hasattr(
            config, 'common_outputs_intents'):
        if not os.path.exists(getattr(config, 'common_outputs_directory')):
            os.makedirs(getattr(config, 'common_outputs_directory'))
            logger.info('Created new output directory ' +
                        getattr(config, 'common_outputs_directory'))
        with codecs.open(os.path.join(
                getattr(config, 'common_outputs_directory'),
                getattr(config, 'common_outputs_intents')),
                         'w',
                         encoding='utf8') as outputFile:
            outputFile.write(json.dumps(intents, indent=4, ensure_ascii=False))
    else:
        print(json.dumps(intents, indent=4, ensure_ascii=False))

    logger.info('FINISHING: ' + os.path.basename(__file__))
def main(argv):
    """Deletes the cloudfunctions package specified in the configuration file or as CLI argument."""
    parser = argparse.ArgumentParser(
        description="Deletes cloud functions package.",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help="configuration file",
                        action='append')
    parser.add_argument('--common_functions',
                        required=False,
                        help="directory where the cloud functions are located")
    parser.add_argument('--cloudfunctions_namespace',
                        required=False,
                        help="cloud functions namespace")
    parser.add_argument('--cloudfunctions_apikey',
                        required=False,
                        help="cloud functions apikey")
    parser.add_argument('--cloudfunctions_username',
                        required=False,
                        help="cloud functions user name")
    parser.add_argument('--cloudfunctions_password',
                        required=False,
                        help="cloud functions password")
    parser.add_argument('--cloudfunctions_package',
                        required=False,
                        help="cloud functions package name")
    parser.add_argument('--cloudfunctions_url',
                        required=False,
                        help="url of cloud functions API")
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))

    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    def handleResponse(response):
        """Get response code and show an error if it's not OK"""
        code = response.status_code
        if code != requests.codes.ok:
            if code == 401:
                logger.error(
                    "Authorization error. Check your credentials. (Error code "
                    + str(code) + ")")
            elif code == 403:
                logger.error(
                    "Access is forbidden. Check your credentials and permissions. (Error code "
                    + str(code) + ")")
            elif code == 404:
                logger.error(
                    "The resource could not be found. Check your cloudfunctions url and namespace. (Error code "
                    + str(code) + ")")
            elif code >= 500:
                logger.error("Internal server error. (Error code " +
                             str(code) + ")")
            else:
                logger.error("Unexpected error code: " + str(code))

            errorsInResponse(response.json())
            return False
        return True

    def isActionSequence(action):
        for annotation in action['annotations']:
            if 'key' in annotation and annotation['key'] == 'exec':
                if 'value' in annotation and annotation['value'] == 'sequence':
                    return True
        return False

    config = Cfg(args)
    logger.info('STARTING: ' + os.path.basename(__file__))

    namespace = getRequiredParameter(config, 'cloudfunctions_namespace')
    urlNamespace = quote(namespace)
    auth = getParametersCombination(
        config, 'cloudfunctions_apikey',
        ['cloudfunctions_password', 'cloudfunctions_username'])
    package = getRequiredParameter(config, 'cloudfunctions_package')
    cloudfunctionsUrl = getRequiredParameter(config, 'cloudfunctions_url')
    functionDir = getRequiredParameter(config, 'common_functions')

    if 'cloudfunctions_apikey' in auth:
        username, password = convertApikeyToUsernameAndPassword(
            auth['cloudfunctions_apikey'])
    else:
        username = auth['cloudfunctions_username']
        password = auth['cloudfunctions_password']

    logger.info("Will delete cloud functions in package '" + package + "'.")

    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
    packageUrl = cloudfunctionsUrl + '/' + urlNamespace + '/packages/' + package
    response = requests.get(packageUrl,
                            auth=(username, password),
                            headers={'Content-Type': 'application/json'})
    if not handleResponse(response):
        logger.critical("Unable to get information about package '" + package +
                        "'.")
        sys.exit(1)

    actions = response.json()['actions']
    # put the sequences at the beggining
    actions.sort(key=lambda action: isActionSequence(action))

    for action in actions:
        name = action['name']
        actionUrl = cloudfunctionsUrl + '/' + urlNamespace + '/actions/' + package + '/' + name
        logger.verbose("Deleting action '" + name + "' at " + actionUrl)
        response = requests.delete(
            actionUrl,
            auth=(username, password),
            headers={'Content-Type': 'application/json'})
        if not handleResponse(response):
            logger.critical("Unable to delete action " + name + "' at " +
                            actionUrl)
            sys.exit(1)
        logger.verbose("Action deleted.")

    logger.verbose("Deleting package '" + package + "' at " + packageUrl)
    response = requests.delete(packageUrl,
                               auth=(username, password),
                               headers={'Content-Type': 'application/json'})
    if not handleResponse(response):
        logger.critical("Unable to delete package '" + package + "' at " +
                        packageUrl)
        sys.exit(1)
    logger.verbose("Package deleted.")
    logger.info("Cloud functions in package successfully deleted.")
Esempio n. 20
0
    printf('\nSTARTING: ' + os.path.basename(__file__) + '\n')
    parser = argparse.ArgumentParser(description='Converts dialog nodes from .xml format to Bluemix conversation service workspace .json format', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-dm','--common_dialog_main', required=False, help='main dialog file with dialogue nodes in xml format')
    parser.add_argument('-c','--common_configFilePaths', help='configuaration file', action='append')
    parser.add_argument('-oc', '--common_output_config', help='output configuration file')
    parser.add_argument('-s', '--common_schema', required=False, help='schema file')
    parser.add_argument('-of', '--common_outputs_directory', required=False, help='directory where the otputs will be stored (outputs is default)')
    parser.add_argument('-od', '--common_outputs_dialogs', required=False, help='name of generated file (dialogs.xml is the default)')
    #CF parameters are specific to Cloud Functions Credentials placement from config file and will be replaced in the future by a separate script
    parser.add_argument('-cfn','--cloudfunctions_namespace', required=False, help='cloud functions namespace')
    parser.add_argument('-cfu','--cloudfunctions_username', required=False, help='cloud functions username')
    parser.add_argument('-cfp','--cloudfunctions_password', required=False, help='cloud functions password')
    parser.add_argument('-cfa','--cloudfunctions_package', required=False, help='cloud functions package')
    parser.add_argument('-v','--common_verbose', required=False, help='verbosity', action='store_true')
    args = parser.parse_args(sys.argv[1:])
    config = Cfg(args);
    VERBOSE = hasattr(config, 'common_verbose')

    if hasattr(config, 'cloudfunctions_namespace') and hasattr(config, 'cloudfunctions_package'):
        setattr(config, 'cloudfunctions_path_to_actions', '/' + '/'.join([getattr(config, 'cloudfunctions_namespace').strip("/"), getattr(config, 'cloudfunctions_package').strip("/")]).strip("/") + '/')

    # load dialogue from XML
    if hasattr(config, 'common_dialog_main'):
        dialogTree = LET.parse(getattr(config, 'common_dialog_main'))
    else:
        dialogTree = LET.parse(sys.stdin)

    # load schema
    schemaDirname, this_filename = os.path.split(os.path.abspath(__file__))
    if not hasattr(config, 'common_schema') or getattr(config, 'common_schema') is None:
        setattr(config, 'common_schema', schemaDirname+'/../data_spec/dialog_schema.xml')
def main(argv):
    parser = argparse.ArgumentParser(
        description=
        'Converts dialog nodes from .xml format to Bluemix conversation service workspace .json format',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument(
        '-dm',
        '--common_dialog_main',
        required=False,
        help='main dialog file with dialogue nodes in xml format')
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help='configuaration file',
                        action='append')
    parser.add_argument('-oc',
                        '--common_output_config',
                        help='output configuration file')
    parser.add_argument('-s',
                        '--common_schema',
                        required=False,
                        help='schema file')
    parser.add_argument('-sc',
                        '--common_scope',
                        required=False,
                        help='scope of dialog, e.g. type-local')
    parser.add_argument(
        '-of',
        '--common_outputs_directory',
        required=False,
        help='directory where the outputs will be stored (outputs is default)')
    parser.add_argument(
        '-od',
        '--common_outputs_dialogs',
        required=False,
        help='name of generated file (dialog.json is the default)')
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)
    global config

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    logger.info('STARTING: ' + os.path.basename(__file__))

    # XML namespaces
    global XSI_NAMESPACE
    global XSI
    global NSMAP
    XSI_NAMESPACE = "http://www.w3.org/2001/XMLSchema-instance"
    XSI = "{%s}" % XSI_NAMESPACE
    NSMAP = {"xsi": XSI_NAMESPACE}

    # load dialogue from XML
    if hasattr(config, 'common_dialog_main'):
        #TODO might need UTF-8
        dialogTree = LET.parse(getattr(config, 'common_dialog_main'))
    else:
        dialogTree = LET.parse(sys.stdin)

    # load schema
    schemaParam = getOptionalParameter(config, 'common_schema')
    if schemaParam:
        schemaDirname = os.path.split(os.path.abspath(__file__))[0]
        schemaFile = os.path.join(schemaDirname, schemaParam)
        if not os.path.exists(schemaFile):
            logger.error('Schema file %s not found.', schemaFile)
            exit(1)
        #TODO might need UTF-8
        schemaTree = LET.parse(schemaFile)
        global schema
        schema = LET.XMLSchema(schemaTree)
        validate(dialogTree)

    # process dialog tree
    root = dialogTree.getroot()
    global rootGlobal
    rootGlobal = root
    importNodes(root, config)

    # remove all comments
    removeAllComments(dialogTree)

    # remove nodes which are out of specified scope
    removeOutOfScopeNodes(dialogTree)

    # find all node names
    global names
    names = findAllNodeNames(dialogTree)

    parent_map = dict((c, p) for p in dialogTree.getiterator() for c in p)
    generateNodes(root, None, DEFAULT_ABORT, DEFAULT_AGAIN, DEFAULT_BACK,
                  DEFAULT_REPEAT, DEFAULT_GENERIC)

    # create dialog structure for JSON
    dialogNodes = []

    # convert XML tree to JSON structure
    printNodes(root, None, dialogNodes)

    if hasattr(config, 'common_outputs_directory') and hasattr(
            config, 'common_outputs_dialogs'):
        if not os.path.exists(getattr(config, 'common_outputs_directory')):
            os.makedirs(getattr(config, 'common_outputs_directory'))
            logger.info("Created new output directory %s",
                        getattr(config, 'common_outputs_directory'))
        with io.open(os.path.join(getattr(config, 'common_outputs_directory'),
                                  getattr(config, 'common_outputs_dialogs')),
                     'w',
                     encoding='utf-8') as outputFile:
            outputFile.write(
                json.dumps(dialogNodes, indent=4, ensure_ascii=False))
        logger.info(
            "File %s created",
            os.path.join(getattr(config, 'common_outputs_directory'),
                         getattr(config, 'common_outputs_dialogs')))
    else:
        print(json.dumps(dialogNodes, indent=4, ensure_ascii=False))

    if hasattr(config, 'common_output_config'):
        config.saveConfiguration(getattr(config, 'common_output_config'))

    logger.info('FINISHING: ' + os.path.basename(__file__))
def main(argv):
    # parse sequence names - because we need to get the name first and
    # then create corresponding arguments for the main parser
    sequenceSubparser = argparse.ArgumentParser()
    sequenceSubparser.add_argument('--cloudfunctions_sequences', nargs='+')
    argvWithoutHelp = list(argv)
    if "--help" in argv: argvWithoutHelp.remove("--help")
    if "-h" in argv: argvWithoutHelp.remove("-h")
    sequenceNames = sequenceSubparser.parse_known_args(
        argvWithoutHelp)[0].cloudfunctions_sequences or []

    parser = argparse.ArgumentParser(
        description="Deploys the cloud functions",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help="configuaration file",
                        action='append')
    parser.add_argument('--common_functions',
                        required=False,
                        help="directory where the cloud functions are located")
    parser.add_argument('--cloudfunctions_namespace',
                        required=False,
                        help="cloud functions namespace")
    parser.add_argument('--cloudfunctions_apikey',
                        required=False,
                        help="cloud functions apikey")
    parser.add_argument('--cloudfunctions_username',
                        required=False,
                        help="cloud functions user name")
    parser.add_argument('--cloudfunctions_password',
                        required=False,
                        help="cloud functions password")
    parser.add_argument('--cloudfunctions_package',
                        required=False,
                        help="cloud functions package name")
    parser.add_argument('--cloudfunctions_url',
                        required=False,
                        help="url of cloud functions API")
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    parser.add_argument('--cloudfunctions_sequences',
                        nargs='+',
                        required=False,
                        help="cloud functions sequence names")

    for runtime in list(interpretedRuntimes.values()) + list(
            compiledRuntimes.values()):
        parser.add_argument('--cloudfunctions_' + runtime + '_version',
                            required=False,
                            help="cloud functions " + runtime + " version")

    # Add arguments for each sequence to be able to define the functions in the sequence
    for sequenceName in sequenceNames:
        try:
            parser.add_argument("--cloudfunctions_sequence_" + sequenceName,
                                required=True,
                                help="functions in sequence '" + sequenceName +
                                "'")
        except argparse.ArgumentError as e:
            if "conflicting option" in str(e):
                # from None is needed in order to show only the custom exception and not the whole traceback
                # (It would read as 'During handling of the above exception, another exception has occurred', but we DID handle it)
                raise argparse.ArgumentError(
                    None, "Duplicate sequence name: " + sequenceName) from None
            else:
                raise e

    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    logger.info('STARTING: ' + os.path.basename(__file__))

    def handleResponse(response):
        """Get response code and show an error if it's not OK"""
        code = response.status_code
        if code != requests.codes.ok:
            if code == 401:
                logger.error(
                    "Authorization error. Check your credentials. (Error code "
                    + str(code) + ")")
            elif code == 403:
                logger.error(
                    "Access is forbidden. Check your credentials and permissions. (Error code "
                    + str(code) + ")")
            elif code == 404:
                logger.error(
                    "The resource could not be found. Check your cloudfunctions url and namespace. (Error code "
                    + str(code) + ")")
            elif code == 408:
                logger.error("Request Timeout. (Error code " + str(code) + ")")
            elif code >= 500:
                logger.error("Internal server error. (Error code " +
                             str(code) + ")")
            else:
                logger.error("Unexpected error code: " + str(code))

            errorsInResponse(response.json())
            return False
        return True

    config = Cfg(args)

    namespace = getRequiredParameter(config, 'cloudfunctions_namespace')
    urlNamespace = quote(namespace)
    auth = getParametersCombination(
        config, 'cloudfunctions_apikey',
        ['cloudfunctions_password', 'cloudfunctions_username'])
    package = getRequiredParameter(config, 'cloudfunctions_package')
    cloudFunctionsUrl = getRequiredParameter(config, 'cloudfunctions_url')
    functionDir = getRequiredParameter(config, 'common_functions')
    # If sequence names are already defined (from console), do nothing. Else look for them in the configuration.
    if not sequenceNames:
        sequenceNames = getOptionalParameter(config,
                                             'cloudfunctions_sequences') or []
    # SequenceNames has to be a list
    if type(sequenceNames) is str:
        sequenceNames = [sequenceNames]
    # Create a dict of {<seqName>: [<functions 1>, <function2> ,...]}
    sequences = {
        seqName: getRequiredParameter(config,
                                      "cloudfunctions_sequence_" + seqName)
        for seqName in sequenceNames
    }

    if 'cloudfunctions_apikey' in auth:
        username, password = convertApikeyToUsernameAndPassword(
            auth['cloudfunctions_apikey'])
    else:
        username = auth['cloudfunctions_username']
        password = auth['cloudfunctions_password']

    runtimeVersions = {}
    for ext, runtime in list(interpretedRuntimes.items()) + list(
            compiledRuntimes.items()):
        runtimeVersions[runtime] = runtime + ':' + getattr(
            config, 'cloudfunctions_' + runtime + '_version', 'default')

    requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
    packageUrl = cloudFunctionsUrl + '/' + urlNamespace + '/packages/' + package + '?overwrite=true'
    logger.info("Will create cloudfunctions package %s.", package)
    response = requests.put(packageUrl,
                            auth=(username, password),
                            headers={'Content-Type': 'application/json'},
                            data='{}')
    if not handleResponse(response):
        logger.critical("Cannot create cloud functions package %s.", package)
        sys.exit(1)
    else:
        logger.info('Cloud functions package successfully uploaded')

    filesAtPath = getFilesAtPath(functionDir, [
        '*' + ext for ext in (list(interpretedRuntimes) +
                              list(compiledRuntimes) + compressedFiles)
    ])

    logger.info("Will deploy functions at paths %s.", functionDir)

    for functionFilePath in filesAtPath:
        fileName = os.path.basename(functionFilePath)
        (funcName, ext) = os.path.splitext(fileName)

        runtime = None
        binary = False
        # if the file is zip, it's necessary to look inside
        if ext == '.zip':
            runtime = _getZipPackageType(functionFilePath)
            if not runtime:
                logger.warning(
                    "Cannot determine function type from zip file '%s'. Skipping!",
                    functionFilePath)
                continue
            binary = True
        else:
            if ext in interpretedRuntimes:
                runtime = interpretedRuntimes[ext]
                binary = False
            elif ext in compiledRuntimes:
                runtime = compiledRuntimes[ext]
                binary = True
            else:
                logger.warning(
                    "Cannot determine function type of '%s'. Skipping!",
                    functionFilePath)
                continue

        functionUrl = cloudFunctionsUrl + '/' + urlNamespace + '/actions/' + package + '/' + funcName + '?overwrite=true'

        if binary:
            content = base64.b64encode(open(functionFilePath,
                                            'rb').read()).decode('utf-8')
        else:
            content = open(functionFilePath, 'r').read()
        payload = {
            'exec': {
                'kind': runtimeVersions[runtime],
                'binary': binary,
                'code': content
            }
        }

        logger.verbose("Deploying function %s", funcName)
        response = requests.put(functionUrl,
                                auth=(username, password),
                                headers={'Content-Type': 'application/json'},
                                data=json.dumps(payload),
                                verify=False)
        if not handleResponse(response):
            logger.critical("Cannot deploy cloud function %s.", funcName)
            sys.exit(1)
        else:
            logger.verbose('Cloud function %s successfully deployed.',
                           funcName)
    logger.info("Cloudfunctions successfully deployed.")

    if sequences:
        logger.info("Will deploy cloudfunction sequences.")

    for seqName in sequences:
        sequenceUrl = cloudFunctionsUrl + '/' + urlNamespace + '/actions/' + package + '/' + seqName + '?overwrite=true'
        functionNames = sequences[seqName]
        fullFunctionNames = [
            namespace + '/' + package + '/' + functionName
            for functionName in functionNames
        ]
        payload = {
            'exec': {
                'kind': 'sequence',
                'binary': False,
                'components': fullFunctionNames
            }
        }
        logger.verbose("Deploying cloudfunctions sequence '%s': %s", seqName,
                       functionNames)
        response = requests.put(sequenceUrl,
                                auth=(username, password),
                                headers={'Content-Type': 'application/json'},
                                data=json.dumps(payload),
                                verify=False)
        if not handleResponse(response):
            logger.critical("Cannot deploy cloudfunctions sequence %s",
                            seqName)
            sys.exit(1)
        else:
            logger.verbose("Sequence '%s' deployed.", seqName)
    if sequences:
        logger.info("Cloudfunction sequences successfully deployed.")
    logger.info('FINISHING: ' + os.path.basename(__file__))
Esempio n. 23
0
from cfgCommons import Cfg
from wawCommons import printf, eprintf
import shutil

if __name__ == '__main__':
    printf('\nSTARTING: ' + os.path.basename(__file__) + '\n')
    parser = argparse.ArgumentParser(description='Clean generated directories.',formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-c', '--common_configFilePaths', help='configuaration file', action='append')
    parser.add_argument('-oc', '--common_output_config', help='output configuration file')
    parser.add_argument('-od', '--common_outputs_directory', required=False, help='directory where the otputs will be stored (outputs is default)')
    parser.add_argument('-oi', '--common_outputs_intents', help='file with output json with all the intents')
    parser.add_argument('-oe', '--common_outputs_entities', help='file with output json with all the entities')
    parser.add_argument('-v','--common_verbose', required=False, help='verbosity', action='store_true')
    parser.add_argument('-s', '--common_soft', required=False, help='soft name policy - change intents and entities names without error.', action='store_true', default="")
    args = parser.parse_args(sys.argv[1:])
    config=Cfg(args);
    VERBOSE = hasattr(config, 'common_verbose')

    if os.path.exists(config.common_generated_dialogs[0]):
        shutil.rmtree(config.common_generated_dialogs[0])
        if VERBOSE:printf('%s does not exist.',config.common_generated_dialogs[0])
    else:
        if VERBOSE:printf('%s does not exist.', common_config.generated_dialogs[0])

    if os.path.exists(config.common_generated_intents[0]):
        shutil.rmtree(config.common_generated_intents[0])
        if VERBOSE:printf('%s does not exist.', config.common_generated_intents[0])
    else:
        if VERBOSE:printf('%s doess not exist.', config.common_generated_intents[0])
    if os.path.exists(config.common_generated_entities[0]):
        shutil.rmtree(config.common_generated_entities[0])
def main(argv):
    logger.info('STARTING: ' + os.path.basename(__file__))
    parser = argparse.ArgumentParser(
        description=
        'Deletes Bluemix conversation service workspace and deletes workspace id from config file.',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help='configuaration file',
                        action='append')
    parser.add_argument('-oc',
                        '--common_output_config',
                        help='output configuration file')
    parser.add_argument('-cu',
                        '--conversation_url',
                        required=False,
                        help='url of the conversation service API')
    parser.add_argument('-cv',
                        '--conversation_version',
                        required=False,
                        help='version of the conversation service API')
    parser.add_argument('-cn',
                        '--conversation_username',
                        required=False,
                        help='username of the conversation service instance')
    parser.add_argument('-cp',
                        '--conversation_password',
                        required=False,
                        help='password of the conversation service instance')
    parser.add_argument('-cid',
                        '--conversation_workspace_id',
                        required=False,
                        help='workspace_id of the application.')
    parser.add_argument('-wn',
                        '--conversation_workspace_name',
                        required=False,
                        help='name of the workspace')
    parser.add_argument(
        '-wnm',
        '--conversation_workspace_match_by_name',
        required=False,
        help=
        'true if the workspace name should be matched by name (or pattern if defined)'
    )
    parser.add_argument(
        '-wnp',
        '--conversation_workspace_name_pattern',
        required=False,
        help='regex pattern specifying a name of workspaces to be deleted')
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)

    # load credentials
    version = getRequiredParameter(config, 'conversation_version')
    workspacesUrl = getRequiredParameter(config, 'conversation_url')
    username = getRequiredParameter(config, 'conversation_username')
    password = getRequiredParameter(config, 'conversation_password')
    try:
        workspaces = filterWorkspaces(
            config, getWorkspaces(workspacesUrl, version, username, password))
    except SystemExit as e:
        logger.error("Failed to retrieve workspaces to delete.")
        sys.exit(1)

    nWorkspacesDeleted = 0
    for workspace in workspaces:
        # delete workspace
        requestUrl = workspacesUrl + '/' + workspace[
            'workspace_id'] + '?version=' + version
        response = requests.delete(requestUrl,
                                   auth=(username, password),
                                   headers={'Accept': 'text/html'})
        responseJson = response.json()
        # check errors during upload
        errorsInResponse(responseJson)

        if response.status_code == 200:
            nWorkspacesDeleted += 1
            logger.info("Workspace '%s' was successfully deleted",
                        workspace['name'])
            # delete workspaceId from config file
            if hasattr(config, 'conversation_workspace_id'):
                delattr(config, 'conversation_workspace_id')
        elif response.status_code == 400:
            logger.error(
                "Error while deleting workspace  '%s', status code '%s' (invalid request)",
                workspace['name'], response.status_code)
            sys.exit(1)
        else:
            logger.error(
                "Error while deleting workspace  '%s', status code '%s'",
                workspace['name'], response.status_code)
            sys.exit(1)

    if not nWorkspacesDeleted:
        logger.info("No workspace has been deleted")
    elif nWorkspacesDeleted == 1:
        logger.info("One workspace has been successfully deleted")
    else:
        logger.info(
            str(nWorkspacesDeleted) +
            " workspaces have been successfully deleted")

    outputConfigFile = getOptionalParameter(config, 'common_output_config')
    if outputConfigFile:
        config.saveConfiguration(outputConfigFile)
        logger.info("Configuration was saved to %s", outputConfigFile)
def main(argv):
    parser = argparse.ArgumentParser(description="Deploys a workspace in json format\
     to the Watson Conversation Service. If there is no 'conversation_workspace_id' provided\
     and the 'conversation_workspace_name_unique' is set to 'true', it uploads\
     a workspace to the place specified by the 'conversation_workspace_name'"                                                                             ,\
      formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('-of',
                        '--common_outputs_directory',
                        required=False,
                        help='directory where the otputs are stored')
    parser.add_argument('-ow',
                        '--common_outputs_workspace',
                        required=False,
                        help='name of the json file with workspace')
    parser.add_argument('-c',
                        '--common_configFilePaths',
                        help='configuaration file',
                        action='append')
    parser.add_argument('-oc',
                        '--common_output_config',
                        help='output configuration file')
    parser.add_argument('-cu',
                        '--conversation_url',
                        required=False,
                        help='url of the conversation service API')
    parser.add_argument('-cv',
                        '--conversation_version',
                        required=False,
                        help='version of the conversation service API')
    parser.add_argument('-cn',
                        '--conversation_username',
                        required=False,
                        help='username of the conversation service instance')
    parser.add_argument('-cp',
                        '--conversation_password',
                        required=False,
                        help='password of the conversation service instance')
    parser.add_argument(
        '-cid',
        '--conversation_workspace_id',
        required=False,
        help=
        'workspace_id of the application. If a workspace id is provided, previous workspace content is overwritten, otherwise a new workspace is created '
    )
    parser.add_argument('-wn',
                        '--conversation_workspace_name',
                        required=False,
                        help='name of the workspace')
    parser.add_argument(
        '-wnu',
        '--conversation_workspace_name_unique',
        required=False,
        help=
        'true if the workspace name should be unique across apecified assistant'
    )
    parser.add_argument('-v',
                        '--verbose',
                        required=False,
                        help='verbosity',
                        action='store_true')
    parser.add_argument('--log',
                        type=str.upper,
                        default=None,
                        choices=list(logging._levelToName.values()))
    args = parser.parse_args(argv)

    if __name__ == '__main__':
        setLoggerConfig(args.log, args.verbose)

    config = Cfg(args)
    logger.info('STARTING: ' + os.path.basename(__file__))

    # workspace info
    try:
        workspaceFilePath = os.path.join(
            getRequiredParameter(config, 'common_outputs_directory'),
            getRequiredParameter(config, 'common_outputs_workspace'))
        with openFile(workspaceFilePath, 'r') as workspaceFile:
            workspace = json.load(workspaceFile)
    except IOError:
        logger.error('Cannot load workspace file %s', workspaceFilePath)
        sys.exit(1)
    # workspace name
    workspaceName = getOptionalParameter(config, 'conversation_workspace_name')
    if workspaceName: workspace['name'] = workspaceName
    # workspace language
    workspaceLanguage = getOptionalParameter(config, 'conversation_language')
    if workspaceLanguage: workspace['language'] = workspaceLanguage

    # credentials (required)
    username = getRequiredParameter(config, 'conversation_username')
    password = getRequiredParameter(config, 'conversation_password')
    # url (required)
    workspacesUrl = getRequiredParameter(config, 'conversation_url')
    # version (required)
    version = getRequiredParameter(config, 'conversation_version')
    # workspace id
    workspaces = filterWorkspaces(
        config, getWorkspaces(workspacesUrl, version, username, password))
    if len(workspaces) > 1:
        # if there is more than one workspace with the same name -> error
        logger.error(
            'There are more than one workspace with this name, do not know which one to update.'
        )
        exit(1)
    elif len(workspaces) == 1:
        workspaceId = workspaces[0]['workspace_id']
        logger.info("Updating existing workspace.")
    else:
        workspaceId = ""
        logger.info("Creating new workspace.")

    requestUrl = workspacesUrl + '/' + workspaceId + '?version=' + version

    # create/update workspace
    response = requests.post(requestUrl,
                             auth=(username, password),
                             headers={'Content-Type': 'application/json'},
                             data=json.dumps(workspace, indent=4))
    responseJson = response.json()

    logger.verbose("response: %s", responseJson)
    if not errorsInResponse(responseJson):
        logger.info('Workspace successfully uploaded.')
    else:
        logger.error('Cannot upload workspace.')
        sys.exit(1)

    if not getOptionalParameter(config, 'conversation_workspace_id'):
        setattr(config, 'conversation_workspace_id',
                responseJson['workspace_id'])
        logger.info('WCS WORKSPACE_ID: %s', responseJson['workspace_id'])

    outputConfigFile = getOptionalParameter(config, 'common_output_config')
    if outputConfigFile:
        config.saveConfiguration(outputConfigFile)

    clientName = getOptionalParameter(config, 'context_client_name')
    if clientName:
        # Assembling uri of the client
        clientv2URL = 'https://clientv2-latest.mybluemix.net/#defaultMinMode=true'
        clientv2URL += '&prefered_workspace_id=' + getattr(
            config, 'conversation_workspace_id')
        clientv2URL += '&prefered_workspace_name=' + getattr(
            config, 'conversation_workspace_name')
        clientv2URL += '&shared_examples_service=&url=http://zito.mybluemix.net'
        clientv2URL += '&username='******'conversation_username')
        clientv2URL += '&custom_ui.title=' + getattr(
            config, 'conversation_workspace_name')
        clientv2URL += '&password='******'conversation_password')
        clientv2URL += '&custom_ui.machine_img='
        clientv2URL += '&custom_ui.user_img='
        clientv2URL += '&context.user_name=' + getattr(config,
                                                       'context_client_name')
        clientv2URL += '&context.link_build_date=' + unicode(
            datetime.datetime.now().strftime("%y-%m-%d-%H-%M"))
        clientv2URL += '&prefered_tts=none'
        clientv2URL += '&bluemix_tts.username=xx'
        clientv2URL += '&bluemix_tts.password=xx'
        clientv2URL += '&compact_mode=true'
        clientv2URL += '&compact_switch_enabled=true'
        clientv2URL += 'developer_switch_enabled=false'
        logger.info('clientv2URL=%s', clientv2URL)

        # create file with automatic redirect
        clientFileName = getOptionalParameter(config, 'common_outputs_client')
        if clientFileName:
            clientFilePath = os.path.join(
                getRequiredParameter(config, 'common_outputs_directory'),
                clientFileName)
            try:
                with openFile(clientFilePath, "w") as clientFile:
                    clientFile.write(
                        '<meta http-equiv="refresh" content=\"0; url=' +
                        clientv2URL + '\" />')
                    clientFile.write('<p><a href=\"' + clientv2URL +
                                     '\">Redirect</a></p>')
                clientFile.close()
            except IOError:
                logger.error('Cannot write to %s', clientFilePath)
                sys.exit(1)

    logger.info('FINISHING: ' + os.path.basename(__file__))