def get_log_files(self):
        """Get execution logs dir and results """

        if self.logs_startdir is not None:
            if self.logs_startdir == self.res_startdir:
                logs_execdir = self.results_execdir
            else:
                logs_execdir = file_Utils.createDir_addtimestamp(
                    self.logs_startdir, self.nameonly)
            logfile = self.get_exec_file_by_type("Logs", logs_execdir)

        elif self.logs_startdir is None:
            colocate = False
            logs_location = xml_Utils.getChildTextbyParentTag(
                self.filepath, 'Details', 'Logsdir')
            results_location = xml_Utils.getChildTextbyParentTag(
                self.filepath, 'Details', 'Resultsdir')

            #get default logs and results directory
            default_xml = Tools.__path__[0] + os.sep + 'w_settings.xml'
            default_logsdir = get_credentials(default_xml, 'def_dir',
                                              ['Logsdir'], 'Setting')
            default_resultsdir = get_credentials(default_xml, 'def_dir',
                                                 ['Resultsdir'], 'Setting')
            #use the default directory if user didn't define it in test case/test suite/project
            if results_location is None or results_location is False:
                if default_resultsdir['Resultsdir'] is not None:
                    results_location = default_resultsdir['Resultsdir']

            if logs_location is None or logs_location is False:
                if default_logsdir['Logsdir'] is not None:
                    logs_location = default_logsdir['Logsdir']

            if logs_location is None or logs_location is False\
            or str(logs_location).strip() == "":
                logs_execdir = self.create_def_exec_dir()
                logfile = self.get_exec_file_by_type('Logs', logs_execdir)

            elif logs_location is not None and logs_location is not False:
                logs_location_rel = str(logs_location).strip()
                logs_location = file_Utils.getAbsPath(
                    logs_location_rel, os.path.dirname(self.filepath))
                results_location_rel = str(results_location).strip()
                results_location = file_Utils.getAbsPath(
                    results_location_rel, os.path.dirname(self.filepath))
                if logs_location == results_location:
                    colocate = True

                logfile, logs_execdir = self.checkdir_create_file(
                    logs_location, 'Logs', colocate)

        # print "printing logs_execdir: ", logs_execdir
        logsdir = os.path.dirname(logfile)
        return logfile, logsdir, logs_execdir
Exemple #2
0
    def get_result_files(self):
        """Get execution results dir and files """

        if self.res_startdir is not None:
            results_execdir = file_Utils.createDir_addtimestamp(self.res_startdir, self.nameonly)
            rfile = self.get_exec_file_by_type("Results", results_execdir)
        elif self.res_startdir is None:
            results_location = xml_Utils.getChildTextbyParentTag(self.filepath,
                                                                 'Details', 'Resultsdir')

            #get default results directory
            default_xml = Tools.__path__[0] + os.sep + 'w_settings.xml'
            default_resultsdir = get_credentials(default_xml, 'def_dir', ['Resultsdir'], 'Setting')
            #use the default directory if user didn't define it in test case/test suite/project
            if results_location is None or results_location is False:
                if default_resultsdir['Resultsdir'] is not None:
                    results_location = default_resultsdir['Resultsdir']

            if results_location is None or results_location is False\
            or str(results_location).strip() == "":
                results_execdir = self.create_def_exec_dir() #proj_exec_dir
                rfile = self.get_exec_file_by_type("Results", results_execdir)

            elif results_location is not None and results_location is not False:
                results_location_rel = str(results_location).strip()
                results_location = file_Utils.getAbsPath(results_location_rel,
                                                         os.path.dirname(self.filepath))
                rfile, results_execdir = self.checkdir_create_file(results_location, "Results")

        # print "printing results_execdir: ", results_execdir
        resultfile = file_Utils.getNewExtension(rfile, "xml")
        resultsdir = os.path.dirname(resultfile)
        return resultfile, resultsdir, results_execdir
def convert_xml_to_csv(input_file,
                       mapping_file=None,
                       output_csv_file_path=None,
                       overwrite="yes"):
    """
        It takes xml file path as input and converts to csv.

        Arguments:
            1. input_file: Takes xml file path as input
            2. mapping_file: If a mapping file path is given, it is used to map
               columns with the meaningful name as recognized by the user else
               the tags in the xml file will be used as column names in
               the csv file.
            3. output_csv_file_path: If user gives the output_csv_file_path,
               creating an csv file in that path else creating
               csv file in the path from where he have given xml file.
        Returns:
            Returns output csv file path.
    """
    count = 0
    try:
        dict_response = xml_Utils.convert_xml_to_list_of_dict(input_file)
        if mapping_file:
            mapping_dict = data_Utils.get_credentials(mapping_file,
                                                      'mapping_scheme')

            mapping_dictionary = {v: k for k, v in mapping_dict.items()}
        else:
            mapping_dictionary = {}

        if output_csv_file_path:
            output_csv_file = output_csv_file_path
        else:
            output_csv_file = input_file.replace(".xml", ".csv")

        if overwrite == "no":
            output_csv_file = file_Utils.addTimeDate(output_csv_file)

        f = open(output_csv_file, 'wb+')
        csvwriter = csv.writer(f)
        for element in dict_response:
            if count == 0:
                header = list(element.keys())
                for index, val in enumerate(header):
                    for key, value in mapping_dictionary.items():
                        if val == value:
                            header[index] = key
                csvwriter.writerow(header)
                count += 1
            csvwriter.writerow(list(element.values()))
        f.close()

    except Exception as exception:
        print_exception(exception)
        output_csv_file = None

    return output_csv_file
Exemple #4
0
    def _create_cs_obj(self, system_name):
        '''Initializes the CloudShell object and logons to cloudshell
        returns the cloudshell object
        '''
        keys_for_credentials = ['ip', 'cloudshell_port', 'username',
                                'password', 'domain']
        credentials = data_Utils.get_credentials(self.datafile, system_name,
                                                 keys_for_credentials)

        return cs(credentials['ip'], credentials['username'],
                  credentials['password'], credentials['domain'])
Exemple #5
0
    def testset_calibration(self, system_name):
        """
        Check if the test set calibration is current if less than 1 year old,
        otherwise, re-calibration is required.
        """
        wdesc = "Check if Lab Test set calibration is current."
        #Resolve system_name and subsystem_list
        system_name, subsystem_list = Utils.data_Utils.resolve_system_subsystem_list(
            self.datafile, system_name)
        output_dict = {}
        status = True
        attempt = 1 if subsystem_list == None else len(subsystem_list)
        for i in range(attempt):
            Utils.testcase_Utils.pSubStep(wdesc)
            #Get name from the list when it's not 'None', otherwise, set it to 'None'
            subsystem_name = subsystem_list[
                i] if subsystem_list != None else None
            call_system_name = system_name if subsystem_name is None \
            else "{0}[{1}]".format(system_name, subsystem_name)
            credentials = get_credentials(
                self.datafile, call_system_name,
                ['calibration', 'user', 'location', 'testdata'])
            pNote("system={0}".format(call_system_name))
            #Demo Framework testdata capability
            testdatafile = file_Utils.getAbsPath(
                credentials["testdata"], os.path.dirname(self.datafile))
            add_info = Utils.xml_Utils.getElementWithTagAttribValueMatch(
                testdatafile, 'add_info', 'name', 'testdata')
            if add_info is not None:
                info_text = Utils.xml_Utils.get_text_from_direct_child(
                    add_info, 'info')
                pNote(info_text)
                pNote(testdatafile)

            if credentials is not None and credentials is not False:
                calibrated_date = credentials["calibration"]
                num_of_year = 1
                pass_msg = "Lab Test set {0} calibration is current, "\
                           "re-calibration is NOT required."\
                           .format(call_system_name)
                fail_msg = "Lab Test set {0} calibration is NOT current, it's "\
                           "more than than 1 year old. Re-calibration is "\
                           "required".format(call_system_name)
                result = Utils.demo_utils.lab_eqpt_status(
                    calibrated_date, num_of_year, pass_msg, fail_msg)

            Utils.data_Utils.update_datarepository(output_dict)
            Utils.testcase_Utils.report_substep_status(result)
            status = status and result

        return status, output_dict
Exemple #6
0
    def pc_replacement(self, system_name):
        """
        Verify lab PC is current if less than 4 years old, otherwise
        a replacement is required.
        """
        wdesc = "Check if lab PC is current or need replacement"
        #Resolve system_name and subsystem_list
        system_name, subsystem_list = Utils.data_Utils.resolve_system_subsystem_list(
            self.datafile, system_name)
        output_dict = {}
        status = True
        attempt = 1 if subsystem_list == None else len(subsystem_list)
        for i in range(attempt):
            Utils.testcase_Utils.pSubStep(wdesc)
            #Get name from the list when it's not 'None', otherwise, set it to 'None'
            subsystem_name = subsystem_list[
                i] if subsystem_list != None else None
            call_system_name = system_name if subsystem_name is None \
            else "{0}[{1}]".format(system_name, subsystem_name)
            credentials = get_credentials(self.datafile, call_system_name,
                                          ['dom', 'user', 'os', 'testdata'])
            pNote("system={0}".format(call_system_name))
            #Demo Framework testdata capability
            testdatafile = file_Utils.getAbsPath(
                credentials["testdata"], os.path.dirname(self.datafile))
            add_info = Utils.xml_Utils.getElementWithTagAttribValueMatch(
                testdatafile, 'add_info', 'name', 'testdata')
            if add_info is not None:
                info_text = Utils.xml_Utils.get_text_from_direct_child(
                    add_info, 'info')
                pNote(info_text)
                pNote(testdatafile)

            if credentials is not None and credentials is not False:
                num_of_year = 4
                date_of_mfg = credentials["dom"]
                pass_msg = "Lab PC {0} is current, it's less than 4 years old."\
                           " A replacement is NOT required."\
                           .format(call_system_name)
                fail_msg = "Lab PC {0} is NOT current, it's more than than 4 "\
                           "years old. Please schedule for a replacement."\
                           .format(call_system_name)
                result = Utils.demo_utils.lab_eqpt_status(
                    date_of_mfg, num_of_year, pass_msg, fail_msg)

            Utils.data_Utils.update_datarepository(output_dict)
            Utils.testcase_Utils.report_substep_status(result)
            status = status and result

        return status, output_dict
Exemple #7
0
 def get_credential_value(self, arg, system):
     """get the value of arg in data file corresponding to system
     """
     datafile = self.data_repository['wt_datafile']
     var = arg
     if not hasattr(self, 'tag_dict'):
         self.tag_dict = data_Utils.get_credentials(datafile, system)
     if isinstance(arg, str) and arg.startswith("wtag"):
         var = arg.split("=")[1].strip()
         if var in self.tag_dict:
             value = self.tag_dict[var]
             # substitute environment/datarepo variables in the value and return
             if isinstance(value, (str, list, dict)):
                 return data_Utils.substitute_var_patterns(value)
             else:
                 return value
     return var
def decide_overwrite_var(namespace):
    """options provided in cli get preference over the ones provided inside tests
    """
    overwrite = {}
    if namespace.datafile:
        if namespace.datafile[0] != os.sep:
            namespace.datafile = os.getcwd() + os.sep + namespace.datafile
        overwrite['ow_datafile'] = namespace.datafile
    #namespace for random tc execution
    if namespace.random_tc_execution:
        overwrite['random_tc_execution'] = namespace.random_tc_execution
    #namespace for wrapperfile
    if namespace.wrapperfile:
        if namespace.wrapperfile[0] != os.sep:
            namespace.wrapperfile = os.getcwd(
            ) + os.sep + namespace.wrapperfile
        overwrite['ow_testwrapperfile'] = namespace.wrapperfile
    if namespace.resultdir:
        if namespace.resultdir[0] != os.sep:
            namespace.resultdir = os.getcwd() + os.sep + namespace.resultdir
        overwrite['ow_resultdir'] = namespace.resultdir
    if namespace.logdir:
        if namespace.logdir[0] != os.sep:
            namespace.logdir = os.getcwd() + os.sep + namespace.logdir
        overwrite['ow_logdir'] = namespace.logdir
    if namespace.outputdir:
        if namespace.outputdir[0] != os.sep:
            namespace.outputdir = os.getcwd() + os.sep + namespace.outputdir
        overwrite['ow_resultdir'] = namespace.outputdir
        overwrite['ow_logdir'] = namespace.outputdir
    if all([namespace.outputdir,
            any([namespace.resultdir, namespace.logdir])]):
        print_error("outputdir shouldn't be used with resultdir or logdir")
        exit(1)
    if namespace.jobid:
        settings_xml = Tools.__path__[0] + os.sep + 'w_settings.xml'
        job_url = get_credentials(settings_xml, 'job_url', ['url'], 'Setting')
        if job_url['url'] is not None:
            url = job_url['url']
        else:
            print_info("jobid is specified but no job url found in w_settings")
            print_info("Using jobid only in JUnit file")
            url = ""
        overwrite['jobid'] = url + str(namespace.jobid)
    return overwrite
Exemple #9
0
    def cs_create_topology_reservation(self, system_name, reservation_name,
                                       duration_in_mins, notify_on_start,
                                       notify_on_end, notify_mins_before_end,
                                       topology_full_path):
        """Defines a reservation to be started immediately

        :Datafile usage:
            Tags or attributes to be used in input datafile for the system
            or subsystem.If both tag and attribute is provided the attribute
            will be used.
            1. username   = name of the cloudshell user

        :Arguments:
            1. system_name(string) = Name of the UAP system from the datafile
            2. reservation_name(string) = Specify the name of the reservation.
            3. duration_in_mins(int) = Specify the length of the reservation.
            4. notify_on_start(bool) = Indicate whether to notify the
               reservation owner when the reservation starts.
            5. notify_on_end(bool) = Indicate whether to notify the reservation
               owner when the reservation ends.
            6. notify_mins_before_end(int) = Notification Minutes Before End -
               Indicate the number of minutes before the end of the reservation
               to send out a Notify On End alert to the reservation owner.
               (0 = disabled)
            7. topology_full_path(string) = Specify the full topology name. Include
               the full path from the root to the topology, separated by slashes.
               For example: FolderName/Topologies/TopologyName

        :Returns:
            1. status(bool)= True/False
            2. output_dict = consists of following key value pairs:
               1. domain_id: Domain Id returned after login to cloudshell.
               2. reservation_id: Reservation Id returned after successful
                  creation of resources.
        """

        wdesc = "Create Topology Reservation in CloudShell API Host"
        testcase_Utils.pSubStep(wdesc)
        testcase_Utils.pNote(file_Utils.getDateTime())

        testcase_Utils.pNote("cs_create_topology_reservation, cs obj-{}".\
                             format(cloud_shell), "info")

        keys_for_credentials = ['username']
        credentials = data_Utils.get_credentials(self.datafile, system_name,
                                                 keys_for_credentials)

        status = False
        output_dict = {}
        try:
            xml_resp = cloud_shell.CreateImmediateTopologyReservation(
                reservation_name, credentials['username'],
                int(duration_in_mins), notify_on_start, notify_on_end,
                int(notify_mins_before_end), topology_full_path)
            if xml_resp is not None:
                reservation_id = xml_resp.Reservation.Id
                output_dict = {
                    'domain_id':
                    cloud_shell.domain,
                    '{0}_{1}_reservationId'.format(system_name, reservation_name):
                    reservation_id
                }
                testcase_Utils.pNote("\n\n *** Cloudshell CreateTopologyReservation"
                                     " successfull for ResName-\"{}\" ResId-{}\n".\
                                     format(reservation_name,
                                            output_dict['{0}_{1}_reservationId'.\
                                                        format(system_name, reservation_name)]),
                                     "info")
                status = True
            else:
                testcase_Utils.pNote(
                    "\n\n *** Cloudshell CreateTopologyReservation"
                    " failed for \"{}\"".format(reservation_name), "warning")
        except Exception as exception:
            print_exception(exception)

        testcase_Utils.report_substep_status(status)
        return status, output_dict
Exemple #10
0
    def cs_create_reservation(self, system_name, reservation_name,
                              duration_in_mins, notify_on_start, notify_on_end,
                              notify_mins_before_end):
        """
        Defines a reservation to be created.

        This keyword only defines the reservation with all its details by saving
        the details in the data repository. Actual creation is done by using the
        cs_add_topology_to_reservation keyword by providing the reservation name
        to it.

        :Datafile usage:
            Tags or attributes to be used in input datafile for the system
            or subsystem.If both tag and attribute is provided the attribute
            will be used.
            1. username   = name of the cloudshell user

        :Arguments:
            1. system_name(string) = Name of the UAP system from the datafile
            2. reservation_name(string) = Specify the name of the reservation.
            3. duration_in_mins(int) = Specify the length of the reservation.
            4. notify_on_start(bool) = Indicate whether to notify the
               reservation owner when the reservation starts.
            5. notify_on_end(bool) = Indicate whether to notify the reservation
               owner when the reservation ends.
            6. notify_mins_before_end(int) = Notification Minutes Before End -
               Indicate the number of minutes before the end of the reservation
               to send out a Notify On End alert to the reservation owner.
               (0 = disabled)

        :Returns:
            1. status(bool)= True/False
            2. output_dict = consists of following key value pairs:
               1. domain_id: Domain Id returned after login to cloudshell.
               2. reservation_id: Reservation Id returned after successful
                  creation of resources.
        """

        wdesc = "Save reservation details for the reservation name provided"
        testcase_Utils.pSubStep(wdesc)
        testcase_Utils.pNote(file_Utils.getDateTime())

        testcase_Utils.pNote("save reservation, cs obj-{}".\
                             format(cloud_shell), "info")

        keys_for_credentials = ['username']
        credentials = data_Utils.get_credentials(self.datafile, system_name,
                                                 keys_for_credentials)

        status = True
        output_dict = {}
        try:
            pNote("This keyword will only collect the reservation information "\
                  "and save the details in data repository.\n")
            pNote("In order to create reservation in cloudshell, execute this keyword and then "\
                  "use the keyword 'cs_add_topology_to_reservation' and provide the "\
                  "reservation_name to it, 'cs_add_topology_to_reservation' keyword will use "\
                  "the reservation  details for he reservation_name provided, create a "\
                  "reservation and add topology to the reservation.")

            res_key = "{0}_{1}_cs_rsrv_details".format(system_name,
                                                       reservation_name)
            output_dict = {
                res_key: {
                    "reservation_name": reservation_name,
                    "username": credentials['username'],
                    "duration": duration_in_mins,
                    "notify_on_start": notify_on_start,
                    "notify_on_end": notify_on_end,
                    "notify_mins_before_end": notify_mins_before_end
                }
            }

        except Exception as exception:
            pNote("Saving reservation details for reservation_name={0} failed!!"\
                  .format(reservation_name))
            print_exception(exception)
            status = False
        else:
            pNote("Sucessfully saved reservation details for reservation_name={0}"\
                  .format(reservation_name))

        testcase_Utils.report_substep_status(status)
        return status, output_dict
Exemple #11
0
    def gnmi_subscribe(self, system_name, option, q_query, qt_querytype="once",
                       polling_interval="30s", stop_after=None, verify=None,
                       external_system=None, external_system_session=None,
                       streaming_duration="0s", timestamp="''", user_arg=""):
        """
        gnmi Subscribe Keyword Will perform get(subscribe once is get), Polling
        operation and store the output json in data dictionary.
        :param system_name: server System name as in data file, server where gnmi client
         will send out its request.
        :param q_query: query xpath as string e.g. "/system/services/sftp/sftp-server"
                        can have multiple query separated by ","
                         e.g. "/system/services/sftp/sftp-server,/interfaces/interface"
        :param qt_querytype: Tyep of query as string.
                            qt_querytype must be one of: once, polling or streaming.
                             (default "once")
        :param polling_interval: Interval at which to poll in seconds if polling
        is specified for query_type.(default 30s)
        :param stop_after: User can specify a time after that polling or streaming
        will be stopped. (default None)
        :param verify: user provided string to verify can provide multiple sting separated by ","
                       e.g. '"sftp-server-port": "2202", "sftp-server-enabled": "true"'.
                       Verify string also has regular expression support.
        :param external_system: External system name mentioned as in data file.
                                This is optional if user want to execute gnmi from a different
                                server other than the warrior framework host machine.
        :param external_system_session: External system system session.
        :param streaming_duration: Length of time to collect streaming queries (0 is infinite).
         (default 0s)
        :param timestamp:  Specify timestamp formatting in output.
                           One of (<empty string>, on, raw, <FORMAT>)
                           where <empty string> is disabled,
                           on is human readable,
                           raw is int64 nanos since epoch,
                           and <FORMAT> is according to golang time.Format(<FORMAT>)
        :param user_arg: Extra argument place for feature use
         if any new argument user wants to pass on.
        :return:  True or False and dictionary containing output string and gnmi session
        in case of streaming or polling
        """

        wdesc = "Executing gnmi Subscribe"
        testcase_Utils.pSubStep(wdesc)
        status = False
        result = None
        outputdict = {}
        gnmi_execute = gnmi()
        gnmi_param = ['ip', 'gnmi_port', 'username', 'password', 'prompt',
                      'ca_crt', 'client_crt', 'client_key']
        gnmi_param_dic = data_Utils.get_credentials(self.datafile,
                                                    system_name,
                                                    gnmi_param)
        __gnmi_obj = Utils.data_Utils.get_object_from_datarepository(str(system_name)+"_gnmi_session")
        file_dir = os.path.dirname(os.path.abspath(__file__))
        war_dir = os.path.abspath(os.path.join(file_dir, '../..'))
        binary = os.path.join(war_dir, 'Framework/Gnmi/gnmi_cli')
        
        testcase_Utils.pNote("***** Binary path: {0} *****".format(binary))
        if __gnmi_obj:
            gnmi_obj = __gnmi_obj
        else:
            gnmi_obj = None
        if external_system:
            ext_gnmi_param_dic = data_Utils.get_credentials(self.datafile,
                                                            external_system,
                                                            ['ca_crt', 'client_crt', 'client_key'])

        if external_system == None:
            ca_crt, client_crt, client_key = data_Utils.set_gnmi_cert_params(gnmi_param_dic)
        else:
            ca_crt, client_crt, client_key = data_Utils.set_gnmi_cert_params(ext_gnmi_param_dic)
 
        username = gnmi_param_dic.get('username')
        password = gnmi_param_dic.get('password')
        prompt = gnmi_param_dic.get('prompt')

        cmd_string = gnmi_execute.get_cmd_string(ip=gnmi_param_dic['ip'],
                                                 gnmi_port=gnmi_param_dic['gnmi_port'],
                                                 ca_crt=ca_crt,
                                                 client_crt_path=client_crt,
                                                 client_key=client_key,
                                                 option=option,
                                                 qt_querytype=qt_querytype,
                                                 q_query=q_query,
                                                 polling_interval=polling_interval,
                                                 timestamp=timestamp,
                                                 streaming_duration=streaming_duration,
                                                 user_arg=user_arg)
        status, result, child = gnmi_execute.execute(binary, cmd_string, username,
                                                      password, prompt, external_system,
                                                      external_system_session, stop_after, gnmi_obj)
        if status and verify and result:
            status = gnmi_execute.verify(result, verify)

        if external_system or qt_querytype not in ['polling', 'streaming']:
            outputdict = {'{}_gnmi_result'.format(system_name): result}
        else:
            outputdict = {'{}_gnmi_result'.format(system_name): result,
                         '{}_gnmi_session'.format(system_name):child}
        return status, outputdict
Exemple #12
0
    def gnmi_operations(self, system_name, q_query, operation, verify=None,
                        external_system=None, external_system_session=None, 
                        timestamp="''", user_arg=""):
        """
        will perform the following operations:
        1. Get
        2. Set operation(types: delete/replace/update)
        3. Capabilities
        and store the output JSON in a data dictionary.
        :param system_name: server System name as in data file, server where gnmi client will
        send out its request.
        :param q_query: query xpath as string
        e.g. "/system/services/sftp/sftp-server[sftp-server-port=2202]"
        :param operation: Type of operation(get/set/capabilities).
        :param verify: user provided string to verify can provide multiple sting separated by ","
                       e.g. '"sftp-server-port": "2202", "sftp-server-enabled": "true"'.
                       Verify string also has regular expression support.
        :param external_system_session: External system system session.
                       e.g. '"sftp-server-port": "2202", "sftp-server-enabled": "true"'.
                       Verify string also has regular expression support.
        :param timestamp:  Specify timestamp formatting in output.
                           One of (<empty string>, on, raw, <FORMAT>)
                           where <empty string> is disabled,
                           on is human readable,
                           raw is int64 nanos since epoch,
                           and <FORMAT> is according to golang time.Format(<FORMAT>)
        :param user_arg: Extra argument place for feature use
                         if any new argument user wants to pass on.
        :return: True or False and dictionary containing output string
        """

        wdesc = "***** Executing gnmi " + operation + " operation *****"
        testcase_Utils.pSubStep(wdesc)
        status = False
        result = None
        outputdict = {}
        gnmi_execute = gnmi()
        gnmi_param = ['ip', 'gnmi_port', 'username',
                      'password', 'prompt', 'ca_crt', 'client_crt', 'client_key']
        gnmi_param_dic = data_Utils.get_credentials(self.datafile,
                                                    system_name,
                                                    gnmi_param)
        __gnmi_obj = Utils.data_Utils.get_object_from_datarepository(str(system_name)+
                                                                     "_gnmi_session")
        file_dir = os.path.dirname(os.path.abspath(__file__))
        war_dir = os.path.abspath(os.path.join(file_dir, '../..'))
        binary = os.path.join(war_dir, 'Framework/Gnmi/gnmi_cli')
        testcase_Utils.pNote("***** Binary path: {0} *****".format(binary))
        if __gnmi_obj:
            gnmi_obj = __gnmi_obj
        else:
            gnmi_obj = None
        if external_system:
            ext_gnmi_param_dic = data_Utils.get_credentials(self.datafile,
                                                            external_system,
                                                            ['ca_crt', 'client_crt', 'client_key'])

        if external_system == None:
            ca_crt, client_crt, client_key = data_Utils.set_gnmi_cert_params(gnmi_param_dic)
        else:
            ca_crt, client_crt, client_key = data_Utils.set_gnmi_cert_params(ext_gnmi_param_dic)
 
        username = gnmi_param_dic.get('username')
        password = gnmi_param_dic.get('password')
        prompt = gnmi_param_dic.get('prompt')

        if operation in "get":
            cmd_string = gnmi_execute.get_cmd_string(ip=gnmi_param_dic['ip'],
                                                     gnmi_port=gnmi_param_dic['gnmi_port'],
                                                     username=username, password=password,
                                                     ca_crt=ca_crt,
                                                     client_crt_path=client_crt,
                                                     client_key=client_key,
                                                     operation=operation, q_query=q_query,
                                                     timestamp=timestamp, user_arg=user_arg)
        else:
            cmd_string = gnmi_execute.get_cmd_string(ip=gnmi_param_dic['ip'],
                                                     gnmi_port=gnmi_param_dic['gnmi_port'],
                                                     username=username, password=password,
                                                     ca_crt=ca_crt,
                                                     client_crt_path=client_crt,
                                                     client_key=client_key,
                                                     operation=operation, q_query=q_query)
        print_info("** {0} Operation in progress **".format(operation))
        if cmd_string:
            status, result, child = gnmi_execute.execute(binary, cmd_string, username,
                                                         password, prompt, external_system,
                                                         external_system_session, None,
                                                         gnmi_obj)
        if status and verify:
            status = gnmi_execute.verify(result, verify)
        outputdict = {'{}_gnmi_result'.format(system_name): result}
        return status, outputdict
Exemple #13
0
    def build_server(self, datafile, system_name):
        """
            Take in a system and read all its routes
            Load the routes into Bottle server object
            Start a thread with the bottle server

            return the bottle server adapter and server thread
        """
        app = Bottle()
        # Get system and routes
        system_data = data_Utils.get_credentials(datafile, system_name)
        self.datafile = datafile

        route_file = system_data['mapping_file']
        if route_file:
            route_file = getAbsPath(route_file, getDirName(datafile))
        # Loop through each route
        for route in data_Utils.get_all_system_or_subsystem(route_file):
            route_name = route.get('name')
            if route_name[0] != '/':
                route_name = '/' + route_name

            # Group request condition with the same method together
            route_methods = {}
            for request in route:
                request_method = request.find('request_method').text.upper()
                if request_method not in route_methods:
                    route_methods[request_method] = [request]
                else:
                    route_methods[request_method].append(request)

            # Build route with the grouped conditions
            for method_type, same_type_methods in route_methods.items():
                # A route can have general response and conditional response
                specific_res = []
                general_res = {}

                for method in same_type_methods:
                    dict_of_info = {}
                    method_req = {}
                    method_res = {}

                    # Get all info from the condition
                    for info in iter(method):
                        if info.tag in dict_of_info:
                            dict_of_info[info.tag].append(info.text)
                        else:
                            dict_of_info[info.tag] = [info.text]

                    # Extract request/response related info
                    for key, value in dict_of_info.items():
                        if key in request_verify_list:
                            method_req = {key: value}
                        elif key in response_list:
                            method_res[key] = value

                    if any([
                            key in request_verify_list
                            for key in dict_of_info.keys()
                    ]):
                        # this condition has request/response pair
                        method_combine = method_req
                        method_combine.update(method_res)
                        specific_res.append(method_req)
                        # this ensure when all verification fail and no general response given
                        # there will be some responses
                        if any([
                                key in on_fail_response_list
                                for key in dict_of_info.keys()
                        ]):
                            general_res.update(method_res)
                    else:
                        # this condition only has general response
                        general_res.update(method_res)

                app.route(
                    route_name, method_type,
                    self.build_route(route_name, method_type, specific_res,
                                     general_res))

        # Build a class to hold the server so it can be closed easily
        port = 5000 if "port" not in system_data else int(system_data["port"])
        server = ServerHandler(host="0.0.0.0", port=port)
        server_thread = threading.Thread(target=run,
                                         kwargs={
                                             "app": app,
                                             "server": server,
                                             "debug": True
                                         })
        server_thread.daemon = True
        server_thread.start()
        sleep(2)

        if server_thread.is_alive():
            return True, {"server": server, "server_thread": server_thread}
        else:
            return False, {}
Exemple #14
0
    def execute_robot_wrapper(self, system_name, session_name=None):
        """
        This keyword is to execute python scripts which internally calls robot scripts.
        :Arguments:
            1. system_name(string) - Name of the system/subsystem in the datafile
            2. session_name(string) - name of the session to the system
        :Returns:
            1. status(bool)= True/False
        :Datafile usage:
            Tags or attributes to be used in input datafile for the system/subsystem
            If both tag and attribute is provided the attribute will be used
            1. ip = IP address of the system where the python script will be executed
                Default value for ip type is ip, it can take any type of ip's
                to connect to (like ipv4, ipv6, dns etc)
                Users can provide tag/attribute for any ip_type under the
                system in the input datafile and specify the tag/attribute name
                as the value for ip_type argument, then the connection will be
                established using that value
            2. username = username for the session
            3. password = password for the session
            4. end_prompt = prompt expected when the command(python script) execution
                is successful, default value: .*(%|#|\$).
            5. remote = 'yes' when executed in remote system & 'no'(default)
                when executed in local system
            6. file_path = path of the python script to be executed
            7. output_dir = directory path used as outputdir for robot scripts
               available in the python script(in execution machine). All the
               Robot tests listed in the Python script should have same output directory.
            8. local_output_dir = path of the directory in the local system
                where the robot output files from remote system will be copied.
                If this tag is not available or left empty, results will be
                stored in 'home/<username>/robot_wrapper_opdir' directory.
            Note: Tags 1,2,3 & 8 are only required to copy the results from
             remote to local system  when remote(5) argument is set to 'yes'.
        """

        session_id = get_session_id(system_name, session_name)
        session_object = get_object_from_datarepository(session_id)

        credentials = get_credentials(self.datafile, system_name, [
            'ip', 'username', 'password', 'end_prompt', 'remote', 'file_path',
            'output_dir', 'local_output_dir'
        ])

        if not credentials['file_path'] or not credentials['output_dir']:
            pNote(
                "Please provide values for 'file_path & output_dir' "
                "tags in input data_file", 'warning')
            return False

        if credentials['end_prompt']:
            prompt = credentials['end_prompt']
        else:
            prompt = ".*(%|#|\$)"

        data_directory = os.path.dirname(self.datafile)
        abs_filepath = getAbsPath(credentials['file_path'], data_directory)
        abs_output_dir = getAbsPath(credentials['output_dir'], data_directory)

        current_time = time.time()
        if os.path.isfile(abs_filepath):
            command = "python " + abs_filepath
            status = session_object.send_command(".*", prompt, command)[0]
            if status is True:
                pNote("Robot_wrapper script: '{}' execution is successful".
                      format(abs_filepath))
            else:
                pNote(
                    "Robot_wrapper script: '{}' execution failed".format(
                        abs_filepath), 'warning')
        else:
            pNote(
                "Robot_wrapper script: '{}' does not exist".format(
                    abs_filepath), 'warning')
            status = False

        # When executed in remote machine
        if credentials['remote'] and credentials['remote'].upper() == "YES":

            if credentials['local_output_dir']:
                local_output_dir = getAbsPath(credentials['local_output_dir'],
                                              data_directory)
            else:
                local_output_dir = "~/robot_wrapper_opdir"
            get_file_from_remote_server(credentials['ip'],
                                        credentials['username'],
                                        credentials['password'],
                                        abs_output_dir, local_output_dir)
            abs_output_dir = local_output_dir + os.sep + os.path.basename(
                abs_output_dir)
        # Get the modified xml files in the output_dir
        modified_list = get_modified_files(abs_output_dir, current_time,
                                           ".xml")
        # Get the robot xml files from the modified list of files
        robot_xml_list = robot_wrapper_utils.get_robot_xml_files(modified_list)
        # Get results from robot xml files
        robot_test_results = robot_wrapper_utils.get_results_from_robot_xml(
            robot_xml_list)
        # Create junit for robot tests
        robot_wrapper_utils.create_case_junit(robot_test_results)

        return status
def convert_csv_or_excel_to_xml(input_file,
                                mapping_file=None,
                                output_xml_file_path=None,
                                overwrite="yes"):
    """
        Takes file path as input
        1. If it is excel file, converts to csv and then converts
           csv file to xml
        2. If it is csv file, converts to xml file.
        3. Mapping file is used to map the column names in the excel sheet to
            a meaningful name as recognized by the code.

    Arguments:
        1. input_file: input_file which is either
           csv file path or excel file path
        2. mapping_file: If a mapping file path is given, it is used to map
           columns with the meaningful name as recognized by the user else
           the spaces in the column names will be replaced by "_" in
           the output xml
        3. output_xml_file_path: If user gives the output_xml_file_path,
           creating an xml file in that path else creating
           xml file in the path from where he have given csv or excel file.

    Returns:
        1. output_xml_file_path: Returns the output xml file path
        2  output_dict: Updates the output_dict with
           json string and with output xml.
    """
    output_dict = {}
    generate_csv = False
    try:
        if ".xls" in input_file:
            input_file = convert_excel_to_csv(input_file)
            generate_csv = True

        dict_response = convert_csv_to_list_of_dict(input_file)
        json_response = json.dumps(dict_response,
                                   sort_keys=False,
                                   indent=4,
                                   separators=(',', ': '),
                                   encoding="utf-8")

        if mapping_file:
            mapping_dict = data_Utils.get_credentials(mapping_file,
                                                      'mapping_scheme')

            mapping_dictionary = {v: k for k, v in mapping_dict.items()}
        else:
            mapping_dictionary = {}

        result = []
        result.append(
            "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<systems>\n")
        for i in range(len(dict_response)):
            result.append("  <system name=" + "\"" + str(i + 1) + "\">\n")
            for key, value in dict_response[i].items():
                if mapping_dictionary:
                    if key in mapping_dictionary and mapping_dictionary[key]:
                        result.append(("    <{0}>{1}</{0}>\n").format(
                            mapping_dictionary[key], value))
                    else:
                        result.append(
                            ("    <{0}>{1}</{0}>\n").format(key, value))
                else:
                    result.append(("    <{0}>{1}</{0}>\n").format(
                        "_".join(key.split()), value))

            result.append("  </system>\n")
        result.append("</systems>")

        xml_res = ''.join(result)
        if type(input_file) == file:
            input_file = input_file.name

        if generate_csv:
            os.remove(input_file)

        output_dict["{0}_json_response".format(input_file.replace(
            ".csv", ''))] = json_response
        output_dict["{0}_xml_response".format(input_file.replace(
            ".csv", ''))] = xml_res

        if output_xml_file_path:
            output_xml_file = output_xml_file_path
        else:
            output_xml_file = input_file.replace(".csv", ".xml")

        if overwrite == "no":
            output_xml_file = file_Utils.addTimeDate(output_xml_file)

        f = open(output_xml_file, "wb+")
        f.write(xml_res)
        f.close()

    except Exception as exception:
        print_exception(exception)
        output_xml_file = None

    return output_xml_file, output_dict