Пример #1
0
 def Set_S3_Credentials(self, access_key, secret_key, location=None):
     """
     Initializes a connection to an S3 API endpoint
     
     Must be called before any other keywords are used
     
     _access_key_ is the AWS access key
     _secret_key_ is the AWS secret key
     _location_ is the S3 API endpoint.  If not set, defaults to AWS, can 
                be in the format https://host:port or http://host:port
     """
     if location:
         host, port, is_secure, path = self._parse_s3_location(location)
         
         logger.debug("Initializing S3Connection to host %s" % host)
         self._conn = S3Connection(access_key, secret_key,
                                   host=host, port=port, is_secure=is_secure,
                                   calling_format=OrdinaryCallingFormat(),
                                   validate_certs=False,
                                   path=path)
     else:
         logger.debug("Initializing S3Connection to AWS default")
         self._conn = S3Connection(access_key, secret_key,
                                   calling_format=OrdinaryCallingFormat(),
                                   validate_certs=False)
    def connect_to_rabbitmq (self, host, port, username = '******', password = '******', timeout = 15, alias = None):
        """
        Подключение к серверу RabbitMq.
        
        *Args:*\n
        _host_ - имя сервера;\n
        _port_ - номер порта;\n
        _username_ - имя пользователя;\n
        _password_ - пароль пользователя;\n
        _timeout_ - время ожидания соединения;\n
        _alias_ - псевдоним соединения;\n 
        
        *Returns:*\n
        Индекс текущего соединения.
        
        *Raises:*\n
        socket.error в том случае, если невозможно создать соединение.
        
        *Example:*\n
        | Connect To Rabbitmq | my_host_name | 15672 | guest | guest | alias=rmq |
        """

        port=int (port)
        timeout=int (timeout)
        logger.debug ('Connecting using : host=%s, port=%d, username=%s, password=%s, timeout=%d, alias=%s '%(host, port, username, password, timeout, alias))
        self.headers={"Authorization":"Basic "+base64.b64encode(username+":"+password)}
        try:
            self._connection=httplib.HTTPConnection (host, port, timeout)
            self._connection.connect()
            return self._cache.register(self._connection, alias)
        except socket.error, e:
            raise Exception ("Could not connect to RabbitMq", str(e))
Пример #3
0
    def start_process(self, command, *arguments, **configuration):
        """Starts a new process on background.

        See `Specifying command and arguments` and `Process configuration`
        for more information about the arguments.

        Makes the started process new `active process`. Returns an identifier
        that can be used as a handle to active the started process if needed.
        """
        config = ProcessConfig(**configuration)
        executable_command = self._cmd(arguments, command, config.shell)
        logger.info('Starting process:\n%s' % executable_command)
        logger.debug('Process configuration:\n%s' % config)
        process = subprocess.Popen(executable_command,
                                   stdout=config.stdout_stream,
                                   stderr=config.stderr_stream,
                                   stdin=subprocess.PIPE,
                                   shell=config.shell,
                                   cwd=config.cwd,
                                   env=config.env,
                                   universal_newlines=True)
        self._results[process] = ExecutionResult(process,
                                                 config.stdout_stream,
                                                 config.stderr_stream)
        return self._processes.register(process, alias=config.alias)
Пример #4
0
def docker_process_args(passed_args_dict, default_args_dict, method_name):
    """Accepts two dicts and combines them preferring passed
        args while filling unspecified args with default values.

    Parameters:
        :param passed_args_dict: A dict of the passed keyword args for the method.

        :param default_args_dict: A dict of the default keyword args for the method.

    Returns:
        :returns dict: returns dict containing passed args, with
        defaults for all other keyword args.
    """
    logger.info("Processing args for %s method" % method_name)
    logger.info(passed_args_dict)
    logger.info(default_args_dict)
    processed_args_dict = {}

    if passed_args_dict is None:
        passed_args_dict = {}

    try:
        for key in default_args_dict:
            if key in passed_args_dict:
                processed_args_dict[key] = passed_args_dict[key]
            else:
                processed_args_dict[key] = default_args_dict[key]
    except TypeError:
        logger.debug("Error: One or both of the passed arguments is not a dictionary")

    return processed_args_dict
    def query(self, selectStatement, **named_args):
        """
        Uses the input `selectStatement` to query for the values that
        will be returned as a list of tuples.

        Tip: Unless you want to log all column values of the specified rows,
        try specifying the column names in your select statements
        as much as possible to prevent any unnecessary surprises with schema
        changes and to easily see what your [] indexing is trying to retrieve
        (i.e. instead of `"select * from my_table"`, try
        `"select id, col_1, col_2 from my_table"`).

        For example, given we have a table `person` with the following data:
        | id | first_name  | last_name |
        |  1 | Franz Allan | See       |

        When you do the following:
        | @{queryResults} | Query | SELECT * FROM person |
        | Log Many | @{queryResults} |

        You will get the following:
        [1, 'Franz Allan', 'See']

        Also, you can do something like this:
        | ${queryResults} | Query | SELECT first_name, last_name FROM person |
        | Log | ${queryResults[0][1]}, ${queryResults[0][0]} |

        And get the following
        See, Franz Allan
        """
        logger.debug("Running query: {query}".format(query=selectStatement))
        logger.debug("Query parameters: {named_args}".format(named_args=named_args))
        with self._dbconnection.begin():
            logger.info(selectStatement)
            return self._dbconnection.execute(selectStatement, **named_args).fetchall()
    def read_single_value_from_table(self, tableName, columnName, whereClause):
        """
        Reads single value from table.
        If there will be more than one row that satisfies performed query, \
        then this will throw an AssertionError.

        *Arguments:*
            - tableName: string, table name.
            - columnName: string, column name or names divided by comma.

        *Return:*
            - Fetched single value.

        *Examples:*
        | @{queryResult} | Read Single Value From Table | employee \
        | name, surname | age=27 |
        """

        sqlStatement = 'SELECT %s FROM %s WHERE %s' % (columnName, tableName,
                                                       whereClause)

        result = self.query(sqlStatement)

        assert len(result) == 1, \
            ("Expected to have 1 row from '%s' "
                "but got %s rows : %s." % (sqlStatement, len(result), result))
        logger.debug("Got 1 row from %s: %s." % (sqlStatement, result))

        return result[0]
Пример #7
0
 def _print_link(self, path, rc):
     if rc == 0:
         name = path + '.png'
         logger.info('<a href="%s"><img src="%s"></a>' % (name, name), True)
     else:
         logger.debug('Message sequence generation with seqdiag failed. Linking sequence file instead.')
         logger.info('<a href="%s">Message sequence</a>' % path, True)
    def verify_number_of_rows_matching_where(self, tableName, where,
                                             rowNumValue):
        """
        Fetches rows using given where-clause from the table with given
        'tableName'.
        Verifies that number of rows equals to the given rowNumValue,
        otherwise AssertionError is thrown.
        If number of rows will be not equal to actual, \
        then this will throw an AssertionError.

        *Arguments:*
            - tableName: string, table name;
            - where: string, where-clause;
            - rowNumValue: int, expected number of rows.

        *Return:*
            - None.

        *Examples:*
        | Verify Number Of Rows Matching Where | TableName | name='John' | 12 |
        """
        selectStatement = "select * from %s where %s" % (tableName, where)
        count = self.rows_count(selectStatement)

        assert count == rowNumValue, ("Expected to get %s row(s) for where-"
                                      "clause statement '%s', but got %s." %
                                      (rowNumValue, where, count))
        logger.debug("Number of rows matching 'where %s' statement equals to"
                     "%s." % (where, rowNumValue))
Пример #9
0
    def verify_resource_id_has_attributes_in_DB(self, resource_id, label, order='', label_tag='', company_typ_cd=''):
        if company_typ_cd!='':
            statement = "Select * from Presentation where PRSNTTN_LABEL='%s' and PRSNTTN_ORDER in ('%s')" \
                    "and PRSNTTN_LABEL_TAG in ('%s') and PRSNTTN_ID in " \
                    "(select PRSNTTN_ID from RESOURC where RSRC_ID='%s' and CMPNY_TYP_CD ='%s')"%(label, order, label_tag, resource_id, company_typ_cd)
        elif order!='' and label_tag!='':
            statement = "Select * from Presentation where PRSNTTN_LABEL='%s' and PRSNTTN_ORDER in ('%s')" \
                    "and PRSNTTN_LABEL_TAG in ('%s') and PRSNTTN_ID in " \
                    "(select PRSNTTN_ID from RESOURC where RSRC_ID='%s')"%(label, order, label_tag, resource_id)
        else:
            statement = "Select * from Presentation where PRSNTTN_LABEL='%s' and PRSNTTN_ID in " \
                    "(select PRSNTTN_ID from RESOURC where RSRC_ID='%s')"%(label, resource_id)

        logger.debug("fun: verify_resource_id_has_attributes_in_DB")
        logger.debug(statement)

        try:
            self.conn.check_if_exists_in_database(statement)
            msg = "==Verified Resource '%s' has attributes in DB Succeed! \nRSRC_ID %s has Label-->'%s' + Order-->'%s' + Tag-->'%s' and Company_Type_CD-->'%s'\n %s"%(resource_id, resource_id, label, order, label_tag, company_typ_cd, statement)
            logger.info(msg)
        except AssertionError, e:
            query = "select PRSNTTN_LABEL, PRSNTTN_ORDER, PRSNTTN_LABEL_TAG from Presentation where PRSNTTN_ID in "\
                    "(select PRSNTTN_ID from RESOURC where RSRC_ID='%s')"%(resource_id)
            result = self.conn.query(query)
            admin = self.conn.query("SELECT CMPNY_TYP_CD FROM RESOURC WHERE RSRC_ID='%s'"%resource_id)
            msg = "!!!Expected:['%s', '%s', '%s', '%s'] --> Actual: [%s, %s]"%(label, order, label_tag, company_typ_cd, result[0], admin[0][0])

            logger.warn(e.message)
            raise AssertionError(msg)
    def get_json_file(self, path):
        """Returns [http://goo.gl/o0X6Pp|JSON] object from [http://goo.gl/o0X6Pp|JSON] file
        with all variables replaced.

        Arguments:
        - ``path``: The path to JSON file.

        Examples:

        _request.json_
        | [{
        |     "bad": ${false},
        |     "good": ${true},
        |     "key": 5.5,
        |     "key2": "value2"
        | }]

        | @{var} = | Get JSON File | request.json |
        | # [{'key2': 'value2', 'bad': False, 'good': True, 'key': Decimal('5.5')}] |
        """
        content = self._os.get_file(path)
        content = self._builtin.replace_variables(content)
        content = sub(r'(False|True)', lambda match: match.group(1).lower(), content)
        logger.debug(content)
        return self.json_loads(content)
    def _execute_sql(self, sqlStatement, commitNeeded=False):
        """
        Executes sql.

        *Arguments:*
            - sqlStatement: string, sql statement.
            - commitNeeded: bool, if True - commit will be performed after
             executing statement.

        *Return:*
            - Database cursor object.
        """

        try:
            cur = self._connectionCache.current.connection.cursor()

            logger.debug("Executing: %s" % sqlStatement)
            cur.execute(sqlStatement)
            if commitNeeded:
                self._connectionCache.current.connection.commit()
            return cur

        finally:
            if cur:
                logger.debug("Rolling back: %s" % sqlStatement)
                self._connectionCache.current.connection.rollback()
Пример #12
0
 def update_entity(self, type, id, update_field_value_map, parent_type=None, parent_id=None):
     """
     The lock/unlock check-out/check-in are implemented as per the manual, but we always get 404 error
     and the update is actually successful without such operation. so we comment these for actions for now
     :param type:
     :param id:
     :param update_field_value_map:
     :param parent_type:
     :param parent_id:
     :return:
     """
     url = self._base_url
     if parent_type != None:
         url += '/%ss/%s' % (parent_type, parent_id)  # hacky~~ again TODO
     url += '/%ss/%s' % (type, id)
     # lock entity first
     # self.lock_entity(url)
     # self.check_out_entity(url,'Checking out by auto test script')
     update_field_value_map['_type'] = type
     xml = self._convert_entity_to_xml(update_field_value_map)
     logger.debug(url)
     logger.debug(xml)
     local_headers = self.headers.copy()
     local_headers['Content-type'] = 'application/xml'
     http = httplib2.Http()
     resp_header, resp_content = http.request(url, 'PUT', body=xml, headers=local_headers)
     if resp_header['status'] != '200':  # OK
         # raise RuntimeError(
         logger.warn(resp_header)
         logger.warn(resp_content)
         logger.warn(
             'Not able to update entity with fields %s, see the log for response details' % update_field_value_map)
     # self.check_in_entity(url, 'Checking out by auto test script')
     # self.unlock_entity(url)
     return self._parse_alm_entities(resp_content)
    def table_must_contain_more_than_number_of_rows(self, tableName,
                                                    rowsNumber):
        """
        Checks that table contains more number of rows than specified.
        If table contains less or equal number of rows than specified, \
        then this will throw an AssertionError.

        *Arguments:*
            - tableName: string, table name.
            - rowsNumber: int, rows number.

        *Return:*
            - None

        *Examples:*
        | Table Must Contain More Than Number Of Rows | employee | 1000 |
        """

        selectStatement = "SELECT * FROM %s" % tableName

        rowsCount = self.rows_count(selectStatement)
        assert rowsCount > rowsNumber,\
            ('Table %s has %s row(s) but should have more than %s row(s).'
                % (tableName, rowsCount, rowsNumber))
        logger.debug("Table %s contains %s row(s)." % (tableName, rowsCount))
Пример #14
0
 def select_user(self, user_id, error=None):
     """
     Assumes current browser is on the user selection page
     :param user_id:
     :return:
     """
     xpath_candidates = ("xpath=//select[@name='dnqualifier']",)
     for xpath in xpath_candidates:
         logger.debug("Try xpath %s " % xpath)
         logger.warn("Try xpath %s " % xpath)
         ele = self._element_find(xpath, True, False)
         if ele is not None:
             ele = self._element_find("xpath=//option[@value='%s']" % user_id, True, False)
             if ele is not None:
                 self.select_from_list(xpath, user_id)
             else:
                 logger.warn("xpath=//option[contains(@value,'%s')]" % user_id, True)
                 ele = self._element_find("xpath=//option[contains(@value,'%s')]" % user_id, True, False)
                 if ele is not None:
                     full_value = ele.get_attribute("value")
                     self.select_from_list(xpath, full_value)
                 else:
                     raise RuntimeError("Not able to find user selection by value %s " % user_id)
             return
     raise AssertionError("Could not find user selection")
    def get_transaction_isolation_level(self):
        """
        Gets transaction isolation level.

        *Note:* This method works only with 'psycopg2' driver.

        *Arguments:*
            - None

        *Return:*
            - Value that contains the name of the transaction isolation level
            Possible return values are: TRANSACTION_READ_UNCOMMITTED, \
            TRANSACTION_READ_COMMITTED, TRANSACTION_REPEATABLE_READ, \
            TRANSACTION_SERIALIZABLE or TRANSACTION_NONE.

        *Examples:*
        | ${isolation_level} | Get Transaction Isolation Level |
        """

        # // TODO: Extend this method to work with other drivers.
        driverName = self._connectionCache.current.driverName
        assert driverName == 'psycopg2',\
            "Impossible to use this keyword with '%s' driver." % driverName

        selectStatement = "SELECT CURRENT_SETTING('transaction_isolation');"

        result = 'TRANSACTION_%s' % \
                 self.query(selectStatement)[0][0].replace(' ', '_').upper()
        logger.debug("Transaction isolation level is %s." % result)

        return result
    def transaction_isolation_level_must_be(self, transactionLevel):
        """
        Checks that transaction isolation level is equal to specified.
        If transaction isolation level is not equal to specified, then this \
        will throw an AssertionError.

        *Note:* This method works only with 'psycopg2' driver.

        *Arguments:*
            - transactionLevel: string, transaction isolation level.
            Possible return values are: TRANSACTION_READ_UNCOMMITTED, \
            TRANSACTION_READ_COMMITTED, TRANSACTION_REPEATABLE_READ, \
            TRANSACTION_SERIALIZABLE or TRANSACTION_NONE.

        *Return:*
            - None

        *Examples:*
        | ${isolation_level} | Get Transaction Isolation Level |
        """

        result = self.get_transaction_isolation_level()

        assert transactionLevel.upper() == result, \
            ("Expected transaction isolation level '%s' "
                "but current is '%s'." % (transactionLevel.upper(), result))
        logger.debug("Transaction isolation level is %s." % result)
Пример #17
0
 def load_template(self, template, *args, **kws):
    msg = Pos3Msg(self.templatemap[template])
    
    for arg in args:
       if arg.count('=') == 1:
          k, v = arg.split('=')
          kws[k] = v
    kws.update(self.fields)
    for k, v in kws.items():
          msg.setField(k, v)
          
    if not self.checkedin and template not in ['TEMPLATE_POSCHECKIN_1', 'TEMPLATE_POSCHECKIN_2']:
       self.check_in()
          
    if not kws or 'voucherNo' not in kws.keys():
       msg.setField('voucherNo', self._get_next_voucherNo())
    
    if self.batchNo and (not kws or 'batchNo' not in kws.keys()):
       msg.setField('batchNo', self.batchNo)
     
    if not kws or 'transactionRandId' not in kws.keys():
       msg.setField('transactionRandId', self.generate_transactionRandId())
                 
    self.sendmsg = msg.constructMsg()
    logger.debug('===Sending Message:'+self.sendmsg)
    return self.sendmsg
    def check_primary_key_columns_for_table(self, tableName, columns):
        """
        Checks that specified primary key columns equal to table key columns.
        If table contains less or greater number of rows than specified,
        then this will throw an AssertionError.

        *Note:* This method works only with 'psycopg2' driver.

        *Arguments:*
            - tableName: string, table name.
            - columns: list or string divided by comma, table columns.

        *Return:*
            - Primary key columns.

        *Examples:*
        | ${key_columns} | Check Primary Key Columns For Table \
        | id, password_hash |
        """

        tableColumns = self.get_primary_key_columns_for_table(tableName)

        if not isinstance(columns, list):
            columns = [c.strip().lower() for c in columns.split(',')]

        assert set(tableColumns) == set(columns),\
            ('Primary key columns %s in table %s do not match '
             'with specified %s.' % (tableColumns, tableName, columns))
        logger.debug("Primary key columns %s in table %s match with specified"
                     " %s." % (tableColumns, tableName, columns))
Пример #19
0
def get_all_stats(path):
    logger.info('Getting stats from <a href="file://%s">%s</a>' % (path, path),
                html=True)
    stats_line = _get_stats_line(path)
    logger.debug('Stats line: %s' % stats_line)
    total, tags, suite = eval(stats_line)
    return total, tags, suite
Пример #20
0
 def __set_isolation_level(self, level):
     try:
         self._dbconnection.set_isolation_level(level)
     except:
         logger.debug("Error setting isolation level")
         self._dbconnection.rollback()
         raise
    def createTestCase(self, testProjectId, testSuiteId, testCaseName, summary, authorlogin, steps = ''):
        """
        Создание нового тесткейса.
        
        *Args:*\n
        _testProjectId_ - ID проекта, в который необходимо добавить тесткейс;\n
        _testSuiteId_ - ID тестсьюты, в которую необходимо добавить тесткейс;\n
        _testCaseName_ - имя тесткейса;\n
        _summary_ - описание тесткейса;\n
        _authorlogin_ - имя пользователя, от которого создаётся тесткейс;\n
        _steps_ - шаги выполнения тесткейса;
        
        *Returns:*\n
        ID созданного тесткейса.
        """

        data={"devKey": self.devKey,
                "testprojectid": testProjectId,
                "testsuiteid": testSuiteId,
                "testcasename":testCaseName,
                "summary":summary,
                "authorlogin":authorlogin}
        data['actiononduplicatedname']='block'
        data['checkduplicatedname']='true'
        data['steps']=steps
        answer=self.server.tl.createTestCase(data)
        out=answer[0]
        logger.debug (out)
        testcaseid=out ['id']
        return    testcaseid
    def db_elements_are_not_equal(self, selectStatement, firstAliasOrIndex,
                                  secondAliasOrIndex):
        """
        Checks that fetched results of performed query on two
        databases are not equal.
        If DB elements will be equal, then this will
        throw an AssertionError.

        *Arguments:*
            - selectStatement: string, sql select statement.
            - firstAliasOrIndex: string or int, alias or index of \
            first database.
            - secondAliasOrIndex: string or int, alias or index of \
            second database.

        *Return:*
            - None

        *Examples:*
        | DB Elements Are Not Equal | select name, surname from employee | \
        SomeCompanyDB1 | SomeCompanyDB1 |
        """

        result = self._get_elements_from_two_db(selectStatement,
                                                firstAliasOrIndex,
                                                secondAliasOrIndex)

        assert set(result[0]) != set(result[1]), \
            ("Expected to have not equal elements but they equal. "
             "Query result:'%s'" % (result[0]))
        logger.debug("Results fetched from %s on %s and %s databases are"
                     " not equal." % (selectStatement, firstAliasOrIndex,
                                      secondAliasOrIndex))
Пример #23
0
 def _cmp_bool(src, dst):
     try:
         if bool(src) == bool(dst):
             return True
     except:
         logger.debug("_cmp_bool: expect bool but get %s" % str(type(src)))
         logger.debug("_cmp_bool: Can not convert to bool: %s" % str(src))
     return False
 def run_ansible_extended_params(self, paramDict):
     """
     Run Ansible by passing Runner a dict of parameters
     @param paramDict:
     """
     result = ansible.runner.Runner(**paramDict).run()
     logger.debug(result)
     return result
Пример #25
0
 def Create_Bucket(self, bucket):
     """
     Creates a bucket
     
     _bucket_: name of bucket to create
     """
     logger.debug("Creating bucket %s" % bucket)
     self._conn.create_bucket(bucket)
Пример #26
0
 def user_should_not_have_permissions_in_xroads7_web_service(self, user_login_id, *permissions):
     all_permission = self.clt.service.getAllUserPerms(user_login_id)
     logger.debug("All permissions: %s " % ', '.join(all_permission))
     actually_in = Xroads7WS.none_in_long(list(permissions), all_permission)
     logger.debug("Permissions not expected but assigned : %s " % ', '.join(actually_in))
     if len(actually_in) > 0:
         raise AssertionError("User %s should NOT have any of permissions %s. But actually %s are available"
                              % (user_login_id, list(permissions), actually_in))
Пример #27
0
 def user_should_have_permissions_in_xroads7_web_service(self, user_login_id, *permissions):
     all_permission = self.clt.service.getAllUserPerms(user_login_id)
     logger.debug("All permissions: %s " % ', '.join(all_permission))
     actually_not_in = Xroads7WS.all_in_long(list(permissions), all_permission)
     logger.debug("Permissions expected but missing: %s " % ', '.join(actually_not_in))
     if len(actually_not_in) > 0:
         raise AssertionError("User %s should have these permissions %s. But actually %s are not available"
                              % (user_login_id, list(permissions), actually_not_in))
 def _normalize(self, elements):
     # Apparently IEDriver has returned invalid data earlier and recently
     # ChromeDriver has done sometimes returned None:
     # https://github.com/SeleniumHQ/selenium/issues/4555
     if not isinstance(elements, list):
         logger.debug("WebDriver find returned %s" % elements)
         return []
     return elements
Пример #29
0
    def __init__(self, display_size=(1280, 1024)):
        logger.debug(" ... in ext_robot constructor ... ")
        if sys.platform.startswith("linux"):
            from pyvirtualdisplay import Display

            self.display = Display(visible=0, size=display_size)
        else:
            self.display = None
Пример #30
0
 def _eval_math_expr(src, dst):
     if not isinstance(src, (int, long, float)):
         logger.debug("_eval_math_expr: receives src non-int/long/float: %s" % str(src))
         return False
     for t in (int, long, float):
         if isinstance(src, t) and eval(str(t(src)).join(dst.split('y'))):
             return True
     logger.debug("_eval_math_expr: '%s' not satisfied math expr '%s'" % (str(src), dst))
     return False
Пример #31
0
 def _remotelib(self):
     if self.__remotelib is None:
         uri = BuiltIn().get_variable_value('${PABOTLIBURI}')
         logger.debug('PabotLib URI %r' % uri)
         self.__remotelib = Remote(uri) if uri else None
     return self.__remotelib
Пример #32
0
    def __init__(self, ifname):
        """
        Instantiate a new DnsmasqDhcpServerWrapper object that observes a dnsmasq DHCP server via D-Bus
        """
        self._lease_database = DhcpServerLeaseList()
        self._ifname = ifname  # We store the interface but dnsmasq does not provide information concerning the interface in its D-Bus announcements... so we cannot use it for now
        # This also means that we can have only one instance of dnsmasq on the machine, or leases for all interfaces will mix in our database

        self._watched_macaddr = None  # The MAC address on which we are currently waiting for a lease to be allocated (or renewed)
        self.watched_macaddr_got_lease_event = threading.Event(
        )  # At initialisation, event is cleared

        self._dbus_loop = gobject.MainLoop()
        self._bus = dbus.SystemBus()
        wait_bus_owner_timeout = 5  # Wait for 5s to have an owner for the bus name we are expecting
        logger.debug('Going to wait for an owner on bus name ' +
                     DnsmasqDhcpServerWrapper.DNSMASQ_DBUS_NAME)
        while not self._bus.name_has_owner(
                DnsmasqDhcpServerWrapper.DNSMASQ_DBUS_NAME):
            time.sleep(0.2)
            wait_bus_owner_timeout -= 0.2
            if wait_bus_owner_timeout <= 0:  # We timeout without having an owner for the expected bus name
                raise Exception('No owner found for bus name ' +
                                DnsmasqDhcpServerWrapper.DNSMASQ_DBUS_NAME)

        logger.debug('Got an owner for bus name ' +
                     DnsmasqDhcpServerWrapper.DNSMASQ_DBUS_NAME)
        gobject.threads_init(
        )  # Allow the mainloop to run as an independent thread
        dbus.mainloop.glib.threads_init()

        dbus_object_name = DnsmasqDhcpServerWrapper.DNSMASQ_DBUS_OBJECT_PATH
        logger.debug('Going to communicate with object ' + dbus_object_name)
        self._dnsmasq_proxy = self._bus.get_object(
            DnsmasqDhcpServerWrapper.DNSMASQ_DBUS_SERVICE_INTERFACE,
            dbus_object_name)  # Required to attach to signals
        self._dbus_iface = dbus.Interface(
            self._dnsmasq_proxy, DnsmasqDhcpServerWrapper.
            DNSMASQ_DBUS_SERVICE_INTERFACE)  # Required to invoke methods

        logger.debug("Connected to D-Bus")
        self._dnsmasq_proxy.connect_to_signal(
            "DhcpLeaseAdded",
            self._handleDhcpLeaseAdded,
            dbus_interface=DnsmasqDhcpServerWrapper.
            DNSMASQ_DBUS_SERVICE_INTERFACE,
            message_keyword='dbus_message'
        )  # Handle the IpConfigApplied signal

        self._dnsmasq_proxy.connect_to_signal(
            "DhcpLeaseUpdated",
            self._handleDhcpLeaseUpdated,
            dbus_interface=DnsmasqDhcpServerWrapper.
            DNSMASQ_DBUS_SERVICE_INTERFACE,
            message_keyword='dbus_message'
        )  # Handle the IpConfigApplied signal

        self._dnsmasq_proxy.connect_to_signal(
            "DhcpLeaseDeleted",
            self._handleDhcpLeaseDeleted,
            dbus_interface=DnsmasqDhcpServerWrapper.
            DNSMASQ_DBUS_SERVICE_INTERFACE,
            message_keyword='dbus_message'
        )  # Handle the IpConfigApplied signal

        self._dbus_loop_thread = threading.Thread(
            target=self._loopHandleDbus
        )  # Start handling D-Bus messages in a background thread
        self._dbus_loop_thread.setDaemon(
            True
        )  # D-Bus loop should be forced to terminate when main program exits
        self._dbus_loop_thread.start()

        self._bus.watch_name_owner(
            DnsmasqDhcpServerWrapper.DNSMASQ_DBUS_NAME,
            self._handleBusOwnerChanged
        )  # Install a callback to run when the bus owner changes

        self._getversion_unlock_event = threading.Event(
        )  # Create a new threading event that will allow the GetVersion() D-Bus call below to execute within a timed limit

        self._getversion_unlock_event.clear()
        self._remote_version = ''
        self._dbus_iface.GetVersion(reply_handler=self._getVersionUnlock,
                                    error_handler=self._getVersionError)
        if not self._getversion_unlock_event.wait(
                4):  # We give 4s for slave to answer the GetVersion() request
            raise Exception('TimeoutOnGetVersion')
        else:
            logger.debug('dnsmasq version: ' + self._remote_version)

        self.reset()
    def __init__(self):
        if os.path.isfile(self.get_convert_path):
            logger.debug("Convert file exits path, path used :" +
                         self.get_convert_path)
        else:
            message = "Missing file convert.exe"
            logger.debug(message)

        if os.path.isfile(self.get_compare_path):
            logger.debug("Compare file exits , path used :" +
                         self.get_compare_path)
        else:
            message = "Missing file compare.exe"
            logger.debug(message)

        if os.path.isfile(self.get_identify_path):
            logger.debug("Identify file exits, path used :" +
                         self.get_identify_path)
        else:
            message = "Missing file identify.exe"
            logger.debug(message)
Пример #34
0
 def _on_message_list(self, client, userdata, message):
     logger.debug('Received message: %s on topic: %s with QoS: %s' %
                  (str(message.payload), message.topic, str(message.qos)))
     self._messages.append(message.payload)
Пример #35
0
 def __init__(self, channel, baudrate):
     self._tag = self.__class__.__name__ + ' '
     logger.debug(self._tag + "__init__ called")
     # PCAN Setting
     self._pcanbasic = PCANBasic()
     self._channel = CanComm.CHANNELS[channel]
     self._baudrate = CanComm.BAUDRATES[baudrate]
     if platform.system() == 'Windows':
         self._hwtype = CanComm.HWTYPES['ISA-82C200']
         self._ioport = CanComm.IOPORTS['0100']
         self._interrupt = CanComm.INTERRUPTS['3']
     else:
         self._hwtype = CanComm.HWTYPES['ISA-82C200']
         self._ioport = CanComm.IOPORTS['0100']
         self._interrupt = CanComm.INTERRUPTS['11']
     # CanMsg Setting
     self._tbox011 = None
     self._sas300 = None
     self._ems302 = None
     self._ems303 = None
     self._abs330 = None
     self._ems360 = None
     self._bcm350 = None
     self._bcm365 = None
     self._bcm383 = None
     self._ic34a = None
     self._ic367 = None
     self._ic380 = None
     self._ac378 = None
     self._peps341 = None
     self._tcu328 = None
     self._bcm401 = None
     # Timer Setting
     # self._tmr_read = None
     # Threading Setting
     self._lock = threading.RLock()
     self._alive = False
     self._transmitter_thread = None
     self._transmit_by_cycle = []
     self._can_matrix_dict = {
         # 左前门开关状态
         'LF_DOOR_REQ':
         self._on_request_lf_door,
         # 右前门开关状态
         'RF_DOOR_REQ':
         self._on_request_rf_door,
         # 左后门开关状态
         'LR_DOOR_REQ':
         self._on_request_lr_door,
         # 右后门开关状态
         'RR_DOOR_REQ':
         self._on_request_rr_door,
         # 后尾箱开关状态
         'TRUNK_DOOR_REQ':
         self._on_request_trunk_door,
         # 左前窗开关状态
         'LF_WINDOW_REQ':
         self._on_request_lf_window,
         # 右前窗开关状态
         'RF_WINDOW_REQ':
         self._on_request_rf_window,
         # 左后窗开关状态
         'LR_WINDOW_REQ':
         self._on_request_lr_window,
         # 右后窗开关状态
         'RR_WINDOW_REQ':
         self._on_request_rr_window,
         # 天窗开关状态
         'ROOF_WINDOW_REQ':
         self._on_request_roof_window,
         # 空调开关状态
         'AC_REQ':
         self._on_request_ac,
         # 空调前除霜开关状态
         'FRONT_DEFROST_REQ':
         self._on_request_front_defrost,
         # 空调后除霜开关状态
         'REAR_DEFROST_REQ':
         self._on_request_rear_defrost,
         # 空调温度
         'AC_TEMPERATURE_REQ':
         self._on_request_ac_temperature,
         # 驾驶员左前门锁开关状态
         'LOCK_DOOR_REQ':
         self._on_request_lock_door,
         # 发动机状态
         'ENGINE_REQ':
         self._on_request_engine,
         # 雨刷开关状态
         'WIPER_REQ':
         self._on_request_wiper,
         # 手刹状态
         'HANDBRAKE_REQ':
         self._on_request_handbrake,
         # 前除霜状态
         'FRONT_DEFROST_STS':
         self._on_front_defrost_status,
         # PEPS电源状态
         'PEPS_POWER_REQ':
         self._on_request_peps_power,
         # 档位
         'GEAR_POS_REQ':
         self._on_request_gear_pos,
         # 左前胎压
         'LF_TIRE_PRESSURE_REQ':
         self._on_request_lf_tire_pressure,
         # 左后胎压
         'LR_TIRE_PRESSURE_REQ':
         self._on_request_lr_tire_pressure,
         # 右前胎压
         'RF_TIRE_PRESSURE_REQ':
         self._on_request_rf_tire_pressure,
         # 右后胎压
         'RR_TIRE_PRESSURE_REQ':
         self._on_request_rr_tire_pressure,
         # 蓄电池电压
         'BATTERY_VOLTAGE_REQ':
         self._on_request_battery_voltage,
         # 剩余油量
         'FUEL_LEVEL_REQ':
         self._on_request_fuel_level,
         # 剩余里程
         'REMAIN_MILEAGE_REQ':
         self._on_request_remain_mileage,
         # 是否系安全带
         'BELT_REQ':
         self._on_request_belt,
         # 近光灯状态
         'FRONT_FOG_LAMP_REQ':
         self._on_request_front_fog_lamp,
         # 远光灯状态
         'REAR_FOG_LAMP_REQ':
         self._on_request_rear_fog_lamp,
         # G值
         'G_VALUE_REQ':
         self._on_request_g_value,
         # 光照强度
         'LIGHT_INTENSITY_REQ':
         self._on_request_light_intensity,
         # 瞬时油耗
         'CURR_FUEL_CONSUMPTION_REQ':
         self._on_request_curr_fuel_consumption,
         # 当前速度
         'CURR_SPEED_REQ':
         self._on_request_curr_speed,
         # 当前转速
         'ENGINE_SPEED_REQ':
         self._on_request_engine_speed,
         # 方向盘转角,左为正,右为负
         'STEERING_ANGLE_REQ':
         self._on_request_steering_angle,
         # 油门脚踏板角度
         'ACCELERATOR_PEDAL_ANGLE_REQ':
         self._on_request_accelerator_pedal_angle,
         # 刹车板角度
         'BRAKE_PEDAL_ANGLE_REQ':
         self._on_request_brake_pedal_angle,
         # 离合器角度
         'CLUTCH_PEDAL_ANGLE_REQ':
         self._on_request_clutch_pedal_angle,
         # 总里程
         'TOTAL_MILEAGE_REQ':
         self._on_request_total_mileage,
         # 车辆位置
         # 当前追踪状态
         # 平均油耗
         'AVERAGE_FUEL_CONSUMPTION_REQ':
         self._on_request_average_fuel_consumption,
     }
Пример #36
0
        def pre_process_request(self):
            if len(list(self.request_headers.items())) > 0:
                logger.debug("Request headers:")
                for name, value in self.request_headers.items():
                    logger.debug("%s: %s" % (name, value))
            else:
                logger.debug("No request headers set")

            if self.request_body is None:
                logger.debug("No request body set")
            else:
                logger.debug("Request body:")
                logger.debug(self.request_body)
Пример #37
0
    def _create_session(self, alias, url, headers, cookies, auth, timeout,
                        proxies, verify, debug, max_retries, backoff_factor,
                        disable_warnings, retry_status_list,
                        retry_method_list):

        logger.debug('Creating session: %s' % alias)
        s = session = requests.Session()
        s.headers.update(headers)
        s.auth = auth if auth else s.auth
        s.proxies = proxies if proxies else s.proxies

        try:
            max_retries = int(max_retries)
            retry_status_list = [int(x) for x in retry_status_list
                                 ] if retry_status_list else None
        except ValueError as err:
            raise ValueError("Error converting session parameter: %s" % err)

        if max_retries > 0:
            retry = Retry(total=max_retries,
                          backoff_factor=backoff_factor,
                          status_forcelist=retry_status_list,
                          method_whitelist=retry_method_list)
            http = requests.adapters.HTTPAdapter(max_retries=retry)
            https = requests.adapters.HTTPAdapter(max_retries=retry)

            # Replace the session's original adapters
            s.mount('http://', http)
            s.mount('https://', https)

        # Disable requests warnings, useful when you have large number of testcase
        # you will observe drastical changes in Robot log.html and output.xml files size
        if disable_warnings:
            logging.basicConfig(
            )  # you need to initialize logging, otherwise you will not see anything from requests
            logging.getLogger().setLevel(logging.ERROR)
            requests_log = logging.getLogger("requests")
            requests_log.setLevel(logging.ERROR)
            requests_log.propagate = True
            if not verify:
                requests.packages.urllib3.disable_warnings()

        # verify can be a Boolean or a String
        if isinstance(verify, bool):
            s.verify = verify
        elif isinstance(verify, str) or isinstance(verify, unicode):
            if verify.lower() == 'true' or verify.lower() == 'false':
                s.verify = self.builtin.convert_to_boolean(verify)
            else:
                # String for CA_BUNDLE, not a Boolean String
                s.verify = verify
        else:
            # not a Boolean nor a String
            s.verify = verify

        # cant pass these into the Session anymore
        self.timeout = float(timeout) if timeout is not None else None
        self.cookies = cookies
        self.verify = verify if self.builtin.convert_to_boolean(
            verify) != True else None

        s.url = url

        # Enable http verbosity
        if int(debug) >= 1:
            self.debug = int(debug)
            httplib.HTTPConnection.debuglevel = self.debug

        self._cache.register(session, alias=alias)
        return session
Пример #38
0
def my_keyword(arg):
    logger.debug('Got argument %s.' % arg)
    logger.info('<i>This</i> is a boring example.', html=True)
    logger.debug('<i>This</i> is a boring example.', html=True)
Пример #39
0
    def completeRegister(self, completeUid):
        '''complete register key worlds
        :param completeUid
        :return completeRegisterResp'''
        '''/ITAS/reg/registration/request/listPage.do
        get current userid'''

        path = '/ITAS/reg/registration/request/listPage.do'
        listPageUrl = _globalVariables.BASEURL + path
        listPageResp = _globalVariables.SESSION.get(listPageUrl)

        rightBoundary = 'currentUserUid\" value=\''
        leftBoundary = '\'/>'
        currentUserUid = _GloableKeywords.subString(listPageResp.text,
                                                    rightBoundary,
                                                    leftBoundary)
        '''search data'''
        searchDict = self.searchRegister(completeUid)

        completeRegisterData['parameter.requestUid'] = searchDict[
            'creg08RequestUid']
        completeRegisterData['taxpayerRequest.creg07RequestUid'] = searchDict[
            'creg08RequestUid']
        completeRegisterData[
            'registrationRequest.creg08RequestUid'] = searchDict[
                'creg08RequestUid']
        completeRegisterData[
            'registrationRequest.creg08RequestDate'] = searchDict[
                'creg08RequestDate']
        completeRegisterData[
            'registrationRequest.creg08CaptureOfficer'] = searchDict[
                'creg08CaptureOfficer']
        completeRegisterData['oldIdNumber'] = searchDict['creg08IdNumber']
        completeRegisterData['treg00NaturalPerson.creg00IdNo'] = searchDict[
            'creg08IdNumber']
        completeRegisterData[
            'registrationRequest.creg08Applicant'] = searchDict[
                'creg08Applicant']
        axBizProcInstId = searchDict['creg08AxBizTaskId']
        '''/ITAS/reg/registration/request/process.do'''

        path = '/ITAS/reg/registration/request/process.do'
        query ='axBizBackUrl=/ITAS/reg/registration/request/listPage.do' \
               '&axBizViewFlowFormPageUrl=com/cacss/itas/reg/registration.edit' \
               '&requestType=TXP_REG&requestUid='+completeUid+'&requestStatus=APPROVED' \
               '&axBizProcInstId='+axBizProcInstId+'&currentUserUid='+currentUserUid +\
               '&rnd='+str(random.random())

        processUrl = _globalVariables.BASEURL + path + '?' + query
        processResp = _globalVariables.SESSION.post(processUrl)
        leftBoundary = 'var reqProcId = "'
        rightBoundary = '";'
        axBizTaskId = _GloableKeywords.subString(processResp.text,
                                                 leftBoundary, rightBoundary)
        completeRegisterData[
            'treg02Individual.creg02IndividualUid'] = _GloableKeywords.subString(
                processResp.text, 'creg02IndividualUid" value="', '" />')
        completeRegisterData[
            'treg00NaturalPerson.creg00NaturalPersonUid'] = _GloableKeywords.subString(
                processResp.text, 'creg02IndividualUid" value="', '" />')
        completeRegisterData[
            'postalAddress.creg26AddressUid'] = _GloableKeywords.subString(
                processResp.text, 'postalAddress.creg26AddressUid" value="',
                '"/>')
        completeRegisterData[
            'parameter.creg01Tin'] = _GloableKeywords.subString(
                processResp.text, 'creg01Tin" value="', '"/>')
        completeRegisterData[
            'individualResidentialAddr.creg26AddressUid'] = _GloableKeywords.subString(
                processResp.text,
                'individualResidentialAddr.creg26AddressUid" value="', '"/>')
        completeRegisterData['creg01Tin'] = _GloableKeywords.subString(
            processResp.text, 'creg01Tin" value="', '"/>')
        completeRegisterData['creg01TaxpayerUid'] = _GloableKeywords.subString(
            processResp.text, 'creg01TaxpayerUid" value="', '"/>')
        completeRegisterData['creg01PostAddrUid'] = _GloableKeywords.subString(
            processResp.text, 'creg01PostAddrUid" value="', '"/>')
        completeRegisterData[
            'creg01PhysicalAddrUid" value="'] = _GloableKeywords.subString(
                processResp.text, 'creg01PhysicalAddrUid" value="', '"/>')
        '''taxtype data'''

        path = '/ITAS/reg/taxtype/config.do'
        query = 'taxpayerCategory=IND&taxpayerType=INDSP&requestUid=' + completeUid + '&branchUid=&requestStatus=APPROVED&requestType=TXP_REG'
        configTaxTypeUrl = _globalVariables.BASEURL + path + '?' + query
        configTaxTypeData = {
            'axgrid_listcols':
            'cret09TaxTypeName,registeredTaxType.creg06EffectiveDate,registeredTaxType.creg06Fin,registeredTaxType.creg06Status,registeredTaxType.taxpayer.creg06IsRegular,registeredTaxType.creg06ExpiryDate,registeredTaxType.taxpayer.functions',
            'axgridid': 'taxtypeview',
            'taxtypeview_pageSize': 10,
            'taxtypeview_sort': '',
            'taxtypeview_startIndex': 0
        }
        configTaxTypeResp = _globalVariables.SESSION.post(
            configTaxTypeUrl, configTaxTypeData)

        logger.debug('configTaxTypeResp.text' + configTaxTypeResp.text)

        convertResult = _GloableKeywords.parseRespToDictOrCuple(
            configTaxTypeResp.text)
        completeRegisterData[
            'registeredTaxType[0].cret09TaxtypeUid'] = convertResult[
                'cret09TaxtypeUid']
        completeRegisterData[
            'registeredTaxType[0].creg06EffectiveDate'] = convertResult[
                'registeredTaxType']['creg06EffectiveDate']
        completeRegisterData[
            'registeredTaxType[0].creg06RegistTaxtypeUid'] = convertResult[
                'registeredTaxType']['creg06RegistTaxtypeUid']
        completeRegisterData['registeredTaxType[0].creg06Fin'] = convertResult[
            'registeredTaxType']['creg06Fin']
        '''attachment data'''

        path = '/ITAS/reg/attachment/list.do'
        query = 'entityId='+completeUid+'&attachmentTable=TREG05_ATTACHMENT&requestUid='+completeUid+\
                '&requestStatus=APPROVED&requestType=TXP_REG&taxpayerCategory=TAXPAYER_TYPE,ID_TYPE,TAX_TYPE'
        attachmentTaxTypeUrl = _globalVariables.BASEURL + path + '?' + query
        attachmentTypeData = {
            'attachmentview_pageSize': 10,
            'attachmentview_sort': '',
            'attachmentview_startIndex': 0,
            'axgrid_listcols':
            'ccmn00DocumentTypecode,ccmn00IsUpload,ccmn00UploadDate,ccmn00Note,ccmn00ArchivedDate',
            'axgridid': 'attachmentview'
        }
        attachmentTypeResp = _globalVariables.SESSION.post(
            attachmentTaxTypeUrl, attachmentTypeData)

        attachmentResult = _GloableKeywords.parseRespToDictOrCuple(
            attachmentTypeResp.text)
        for i in range(3):
            completeRegisterData[
                'attachments[' + str(i) +
                '].ccmn00IsUpload'] = attachmentResult[i]['ccmn00IsUpload']
            completeRegisterData[
                'attachments[' + str(i) +
                '].ccmn00Barcode'] = attachmentResult[i]['ccmn00Barcode']
            completeRegisterData[
                'attachments[' + str(i) +
                '].ccmn00AttachUid'] = attachmentResult[i]['ccmn00AttachUid']
            completeRegisterData[
                'attachments[' + str(i) +
                '].ccmn00DocumentTypecode'] = attachmentResult[i][
                    'ccmn00DocumentTypecode']
            completeRegisterData[
                'attachments[' + str(i) +
                '].ccmn00Entity'] = attachmentResult[i]['ccmn00Entity']
            completeRegisterData[
                'attachments[' + str(i) +
                '].mandatory'] = attachmentResult[i]['mandatory']
            completeRegisterData[
                'attachments[' + str(i) +
                '].ccmn00IsConf'] = attachmentResult[i]['ccmn00IsConf']
            completeRegisterData[
                'attachments[' + str(i) +
                '].ccmn00UploadDate'] = attachmentResult[i]['ccmn00UploadDate']
            completeRegisterData['attachments[' + str(i) +
                                 '].ccmn00ArchivedDate'] = attachmentResult[i][
                                     'ccmn00ArchivedDate']
        '''account data'''

        path = '/ITAS/reg/account/list.do'
        query = 'taxpayerUid=&requestStatus=APPROVED&requestType=TXP_REG&requestUid=' + completeUid
        accountUrl = _globalVariables.BASEURL + path + '?' + query
        accountData = {
            'accountview_pageSize': 10,
            'accountview_sort': '',
            'accountview_startIndex': 0,
            'axgrid_listcols':
            'creg04BankName,creg04BranchName,creg04BranchNo,creg04AccountType,creg04AccountNo,creg04Holder,creg04IsShared',
            'axgridid': 'accountview'
        }
        accountResp = _globalVariables.SESSION.post(accountUrl, accountData)

        accountResult = _GloableKeywords.parseRespToDictOrCuple(
            accountResp.text)
        completeRegisterData['accounts[0].creg04AccountNo'] = accountResult[
            'creg04AccountNo']
        completeRegisterData['accounts[0].creg04Holder'] = accountResult[
            'creg04Holder']
        completeRegisterData['accounts[0].creg04BankName'] = accountResult[
            'creg04BankName']
        completeRegisterData['accounts[0].creg04AccountType'] = accountResult[
            'creg04AccountType']
        completeRegisterData['accounts[0].creg04BranchName'] = accountResult[
            'creg04BranchName']
        completeRegisterData['accounts[0].creg04BranchNo'] = accountResult[
            'creg04BranchNo']
        completeRegisterData['oldFirstName'] = accountResult['creg04Holder']
        completeRegisterData['parameter.account_holder_name'] = accountResult[
            'creg04Holder']
        completeRegisterData[
            'treg00NaturalPerson.creg00FirstName'] = accountResult[
                'creg04Holder']
        completeRegisterData[
            'treg00NaturalPerson.creg00Email'] = accountResult[
                'creg04Holder'] + '@qq.com'
        completeRegisterData['accounts[0].creg04AccountUid'] = accountResult[
            'creg04AccountUid']
        '''deal with lastname'''

        completeRegisterData['oldLastName'] = searchDict[
            'creg08Applicant'].split(accountResult['creg04Holder'])[1][1:]
        completeRegisterData[
            'treg00NaturalPerson.creg00LastName'] = searchDict[
                'creg08Applicant'].split(accountResult['creg04Holder'])[1][1:]
        '''submit.do'''

        path = '/ITAS/UserFlow/submit.do'
        query = 'axBizIsNew=false&axBizIsFlow=true&axBizFlowAction=complete&axBizTaskId='+axBizTaskId+ \
                '&axBizEntityType=com.cacss.itas.reg.entity.Taxpayer'
        completeUrl = _globalVariables.BASEURL + path + '?' + query
        completeResp = _globalVariables.SESSION.post(completeUrl,
                                                     completeRegisterData)

        return completeResp.text
Пример #40
0
    def get_elements_from_dom_content(*args, **kwargs):
        try:
            args, kwargs, locator = _equal_sign_handler(args, kwargs, fn)
            msg = None
            params = signature(fn).parameters
            args, kwargs = _args_to_kwargs(params, args, kwargs)
            timeout = get_timeout(**kwargs)
            logger.debug('Timeout is {} sec'.format(timeout))

            try:
                if 'go_to' not in str(fn) and 'switch_window' not in str(fn):
                    frame.wait_page_loaded()
            except UnexpectedAlertPresentException as e:
                if not CONFIG["HandleAlerts"]:
                    raise QWebUnexpectedAlert(str(e))
                logger.debug('Got {}. Trying to retry..'.format(e))
                time.sleep(SHORT_DELAY)
            start = time.time()
            while time.time() < timeout + start:
                try:
                    kwargs['timeout'] = float(timeout + start - time.time())
                    config.set_config('FrameTimeout',
                                      float(timeout + start - time.time()))
                    return fn(*args, **kwargs)
                except (QWebUnexpectedConditionError, QWebTimeoutError) as e:
                    logger.warn('Got {}'.format(e))
                except (InvalidSelectorException, NoSuchElementException,
                        QWebElementNotFoundError,
                        UnexpectedAlertPresentException,
                        QWebStalingElementError,
                        StaleElementReferenceException,
                        QWebIconNotFoundError) as e:
                    time.sleep(SHORT_DELAY)
                    logger.debug(
                        'Got exception: {}. Trying to retry..'.format(e))
                except InvalidSessionIdException:
                    CONFIG.set_value("OSScreenshots", True)
                    raise QWebBrowserError(
                        "Browser session lost. Did browser crash?")
                except (WebDriverException, QWebDriverError) as e:
                    if any(s in str(e) for s in FATAL_MESSAGES):
                        CONFIG.set_value("OSScreenshots", True)
                        raise QWebBrowserError(e)
                    logger.info(
                        'From timeout decorator: Webdriver exception. Retrying..'
                    )
                    logger.info(e)
                    time.sleep(SHORT_DELAY)
                    err = QWebDriverError
                    msg = e
                except QWebValueError as ve:
                    logger.debug(
                        'Got QWebValueError: {}. Trying to retry..'.format(ve))
                    err = QWebValueError
                    msg = ve
                    time.sleep(SHORT_DELAY)
            if msg:
                raise err(msg)
            if 'count' in str(fn):
                return 0
            if 'is_text' in str(fn) or 'is_no_text' in str(fn):
                return False
            raise QWebElementNotFoundError(
                'Unable to find element for locator {} in {} sec'.format(
                    locator, timeout))
        except QWebSearchingMode:
            pass
Пример #41
0
 def _encode_message(self, message_fields, header_fields):
     msg = self._get_message_template().encode(message_fields,
                                               header_fields)
     logger.debug('%s' % repr(msg))
     return msg
Пример #42
0
 def _log_start(self, command, config):
     if is_list_like(command):
         command = self.join_command_line(command)
     logger.info('Starting process:\n%s' % system_decode(command))
     logger.debug('Process configuration:\n%s' % config)
Пример #43
0
def open_browser(executable_path="chromedriver",
                 chrome_args=None,
                 desired_capabilities=None,
                 **kwargs):
    """Open Chrome browser instance and cache the driver.

    Parameters
    ----------
    executable_path : str (Default "chromedriver")
        path to the executable. If the default is used it assumes the
        executable is in the $PATH.
    port : int (Default 0)
        port you would like the service to run, if left as 0, a free port will
        be found.
    desired_capabilities : dict (Default None)
        Dictionary object with non-browser specific capabilities only, such as
        "proxy" or "loggingPref".
    chrome_args : Optional arguments to modify browser settings
    """
    options = Options()
    logger.debug('opt: {}'.format(options))

    # If user wants to re-use existing browser session then
    # he/she has to set variable BROWSER_REUSE_ENABLED to True.
    # If enabled, then web driver connection details are written
    # to an argument file. This file enables re-use of the current
    # chrome session.
    #
    # When variables BROWSER_SESSION_ID and BROWSER_EXECUTOR_URL are
    # set from argument file, then OpenBrowser will use those
    # parameters instead of opening new chrome session.
    # New Remote Web Driver is created in headless mode.
    chrome_path = kwargs.get(
        'chrome_path', None) or BuiltIn().get_variable_value('${CHROME_PATH}')
    if chrome_path:
        options.binary_location = chrome_path
    browser_reuse, session_id, executor_url = check_browser_reuse(**kwargs)
    logger.debug('browser_reuse: {}, session_id: {}, executor_url:  {}'.format(
        browser_reuse, session_id, executor_url))
    if browser_reuse and session_id and executor_url:
        options.add_argument("headless")

        # Gets rid of Devtools listening .... printing
        options.add_experimental_option('excludeSwitches', ['enable-logging'])

        driver = Remote(command_executor=executor_url,
                        desired_capabilities=options.to_capabilities())
        BuiltIn().set_global_variable('${BROWSER_REMOTE_SESSION_ID}',
                                      driver.session_id)
        driver.session_id = session_id
    else:
        if user.is_root():
            options.add_argument("no-sandbox")
        if chrome_args:
            if any('headless' in _.lower() for _ in chrome_args):
                CONFIG.set_value('Headless', True)
            for item in chrome_args:
                options.add_argument(item.lstrip())
        # options.add_argument("start-maximized")
        options.add_argument("--disable-notifications")
        if 'headless' in kwargs:
            CONFIG.set_value('Headless', True)
            options.add_argument("headless")
        if 'prefs' in kwargs:
            if isinstance(kwargs.get('prefs'), dict):
                prefs = kwargs.get('prefs')
            else:
                prefs = util.prefs_to_dict(kwargs.get('prefs').strip())
            options.add_experimental_option('prefs', prefs)

        driver = Chrome(BuiltIn().get_variable_value('${CHROMEDRIVER_PATH}')
                        or executable_path,
                        options=options,
                        desired_capabilities=desired_capabilities)

        browser_reuse_enabled = util.par2bool(
            BuiltIn().get_variable_value('${BROWSER_REUSE_ENABLED}')) or False
        if browser_reuse_enabled:
            # Write WebDriver session info to RF arguments file for re-use
            write_browser_session_argsfile(driver.session_id,
                                           driver.command_executor._url)  # pylint: disable=protected-access

            # Clear possible existing global values
            BuiltIn().set_global_variable('${BROWSER_SESSION_ID}', None)
            BuiltIn().set_global_variable('${BROWSER_EXECUTOR_URL}', None)

    browser.cache_browser(driver)
    return driver
Пример #44
0
 def _log_response(method, response):
     logger.debug('%s Response : status=%s, reason=%s\n' %
                  (method.upper(), response.status_code, response.reason) +
                  response.text)
Пример #45
0
def LoadYaml(filepath):
    """Load YAML file and return dict"""
    with open(filepath, 'r') as f:
        doc = yaml.load(f)
    logger.debug(doc)
    return doc
 def _normalize_result(self, elements):
     if not isinstance(elements, list):
         logger.debug("WebDriver find returned %s" % elements)
         return []
     return elements
Пример #47
0
def GetPID():
    """Returns PID of this process"""
    mypid = os.getpid()
    logger.debug("My PID is %d" % mypid)
    return mypid
Пример #48
0
def payload_is_superset_of_expected(payload, expected, **kwargs):
    """
    Checks that given payload is contained in the expected result.
    In other words: payload has at least everything that is expected
    AND may have even more content beyond the expected.

    # DOCTEST EXAMPLES

        ## TEST_01
        >>> a = '{"1": "one", "2": [1,2,3], "3": 3}'
        >>> b = '{"1": "one", "2": [1,2,3]}'
        >>> payload_is_superset_of_expected(a, b)
        True

        ## TEST_02
        >>> a = '{"1": "one", "2": [1]}'
        >>> b = '{"1": "one", "2": [1,2,3]}'
        >>> payload_is_superset_of_expected(a, b)
        Traceback (most recent call last):
        jsonlib.JsonCompareError: Actual payload dosn't meet expectation!

    TODO: create a dictionary with proper names for relevant changes
    # changes that are relevant / test shoul FAIL
    # change                     meaning
    #   type_changes               expected vs. got
    #   values_changed             expected vs. got
    #      new_value                  expected value
    #      old_value                  recieved value
    #   repetition_change          expected vs. got
    #   dictionary_item_added      missing in response but was expected
    #   iterable_item_added        missing in response but was expected

    # changes that can be ignored / test should PASS
    # changed                    meaning
    #   dictionary_item_removed    is in response bu was NOT expected
    #   set_item_removed           is in response bu was NOT expected
    #   iterable_item_removed      is in response bu was NOT expected
    #   unprocessed                ? if occurs should be handled extra
    """

    logger.debug(f"type(payload): {type(payload)}")
    logger.debug(f"type(expected): {type(expected)}")

    diff = compare_jsons(payload, expected, **kwargs)

    changes = [
        "type_changes",
        "values_changed",
        "repetition_change",
        "dictionary_item_added",
        "iterable_item_added",
        "set_item_removed",
        "dictionary_item_removed",
        "iterable_item_removed",
    ]

    changes_to_ignore = [
        "set_item_removed",
        "dictionary_item_removed",
        "iterable_item_removed",
    ]

    critical_changes = [
        "type_changes",
        "values_changed",
        "repetition_change",
        "dictionary_item_added",
        "iterable_item_added",
    ]

    if diff != {}:
        for change in changes:
            # check if change are relevant or can be ignored
            if change in critical_changes and change in diff:
                logger.error("Critical changes detected!")
                raise JsonCompareError("Actual payload dosn't meet expectation!")

            elif change in changes_to_ignore and change in diff:
                logger.info("Changes detected, but not relevant.")
                return True
    else:
        logger.info("NO difference between payloads.")
        return True
Пример #49
0
 def _log_response(method, response):
     # TODO big responses should be truncated to avoid huge logs
     logger.debug('%s Response : status=%s, reason=%s\n' %
                  (method.upper(), response.status_code, response.reason) +
                  response.text)
Пример #50
0
def compare_jsons(
    json_1,
    json_2,
    exclude_paths=None,
    ignore_order=True,
    report_repetition=False,
    ignore_string_case=False,
    ignore_type_subclasses=False,
    verbose_level=2,
    **kwargs,
):
    """
    :json_1: valid JSON string \n
    :json_2: valid JSON string \n

    # DOCTEST EXAMPLES

        ## TEST_01
        >>> a = '{"1": "one", "2": 2, "3": null}'
        >>> b = '{"1": "one", "2": 2, "3": null}'
        >>> compare_jsons(a, b)
        {}

        ## TEST_02
        >>> a = '{"1": "one", "2": 2}'
        >>> b = '{"1": "one", "2": 22}'
        >>> compare_jsons(a, b, exclude_paths="root['2']")
        {}

        ## TEST_03
        >>> a = '{"1": "one"}'
        >>> b = '{"1": "ONE"}'
        >>> compare_jsons(a, b, ignore_string_case=True)
        {}
    """

    logger.debug("BEFORE TRY BLOCK")
    logger.debug("json_1 type: {}".format(type(json_1)))
    logger.debug("json_2 type: {}".format(type(json_2)))

    # if inputs are dictionaries take them as they are otherwise
    # try to convert to a python object (dict)
    if isinstance(json_1, dict):
        actual = json_1
    else:
        try:
            actual = json.loads(json_1)
        except (JSONDecodeError, TypeError) as error:
            raise JsonCompareError(f"Only VALID JSON strings accepted! ERROR: {error}")
    if isinstance(json_2, dict):
        expected = json_2
    else:
        try:
            expected = json.loads(json_2)
        except (JSONDecodeError, TypeError) as error:
            raise JsonCompareError(f"Only VALID JSON strings accepted! ERROR: {error}")

    logger.debug("AFTER TRY BLOCK")
    logger.debug(f"ACTUAL: {type(actual)}")
    logger.debug(f"EXPECTED: {type(expected)}")

    logger.debug(f"EXCLUDED PATHS: {exclude_paths} - (type: {type(exclude_paths)})")
    logger.debug(f"IGNORE ORDER: {ignore_order} - (type: {type(ignore_order)})")
    logger.debug(
        f"IGNORE_STRING_CASE: {ignore_string_case} - (type: {type(ignore_string_case)})"
    )
    logger.debug(f"IGNORE_TYPE_SUBCLASSES: {ignore_type_subclasses}")
    logger.debug(f"VERBOSE_LEVEL: {verbose_level}")
    logger.debug(f"KWARGS: {kwargs}")

    diff = DeepDiff(
        actual,
        expected,
        exclude_paths=exclude_paths,
        ignore_order=ignore_order,
        report_repetition=report_repetition,
        ignore_string_case=ignore_string_case,
        ignore_type_subclasses=ignore_type_subclasses,
        verbose_level=verbose_level,
        **kwargs,
    )

    # logger.debug(f"DIFF: {diff}")

    changes = [
        "type_changes",
        "values_changed",
        "repetition_change",
        "dictionary_item_added",
        "iterable_item_added",
        "dictionary_item_removed",
        "iterable_item_removed",
    ]

    change_counter = 0
    for change in changes:
        if change in diff:
            change_counter += 1
            logger.debug(f"{change_counter}. CHANGE ({change}): \n{diff[change]}\n\n")

    return diff.to_dict()
Пример #51
0
 def _on_message(self, client, userdata, message):
     logger.debug('Received message: %s on topic: %s with QoS: %s' %
                  (str(message.payload), message.topic, str(message.qos)))
     self._verified = re.match(self._payload, str(message.payload))
Пример #52
0
def Check_JSON(j):
    d = demjson.decode(j, strict=True)
    logger.debug('got json %s' % d)
    assert len(d) > 0
    assert 'error' not in d
    return d
Пример #53
0
 def debug(self, msg, html=False):
     logger.debug(msg, html)
Пример #54
0
def debug(msg: Any, html=False):
    logger.debug(msg, html)
Пример #55
0
 def on_request(self, item, data):
     logger.debug(self._tag + ": on_request ")
     self._can_matrix_dict[item](data)
     return True
Пример #56
0
def _read_file(path, title):
    with open(path) as file:
        content = file.read()
    logger.debug('%s:\n%s' % (title, content))
    return content.splitlines()
Пример #57
0
    def end_test(self, data, _result):
        """ Called when a test case ends. """
        logger.debug('end_test')

        result = Result(tcid=data.name)
        result.campaign = os.environ.get('JOB_NAME', data.parent.longname)
        if _result.message:
            result.execution.note = _result.message
        result.execution.duration = float(_result.elapsedtime)
        result.execution.environment.framework.name = __robot_info__.project_name
        result.execution.environment.framework.version = __robot_info__.version
        result.execution.sut.commit_id = os.environ.get('GIT_COMMIT', "")
        result.execution.sut.branch = os.environ.get('GIT_BRANCH', "")
        result.job.id = os.environ.get('BUILD_TAG', str(uuid.uuid1()))
        try:
            result.execution.verdict = _result.status.lower()
        except Exception as error:  # pylint: disable=broad-except
            logger.warn(error)
            result.execution.verdict = 'inconclusive'

        profiling = dict(
            suite=dict(duration=_result.parent.elapsedtime,
                       numtests=_result.parent.test_count),
            generated_at=datetime.datetime.now().isoformat(),
        )
        if _result.tags:
            profiling['tags'] = []
        for tag in _result.tags:
            profiling['tags'].append(tag)
        result.execution.profiling = profiling

        for key in self._variables:
            value = self._variables[key]
            if ['LOG_FILE', 'OUTPUT_FILE', 'REPORT_FILE'].__contains__(key):
                if not self._store_logs:
                    logger.debug('Ignore logs')
                    continue
                if not os.path.exists(value):
                    logger.debug(f"{key} file {value} not exists")
                    continue
                file_stats = os.stat(value)
                if file_stats.st_size / (1024 * 1024) > 1.0:
                    logger.debug('avoid uploading huge log files')
                    continue
                file = File()
                file.mime_type = mimetypes.guess_type(value)[0]
                file.name = os.path.basename(value)
                fobj = open(value, "r")
                data = fobj.read()
                fobj.close()
                file.set_data(data)
                continue

        dut = None
        for key in self._metadata:
            value = self._metadata[key]

            # Dut
            if key.startswith('DUT') and not dut:
                dut = Dut()
                dut.type = 'hw'
                result.execution.append_dut(dut)
            if key == 'DUT_TYPE':
                dut.type = value
            if key == 'DUT_SERIAL_NUMBER':
                dut.serial_number = value
            # elif key == 'DUT_PLATFORM':
            #   dut.platform = value
            elif key == 'DUT_VERSION':
                dut.ver = value
            elif key == 'DUT_VENDOR':
                dut.vendor = value
            elif key == 'DUT_MODEL':
                dut.model = value
            elif key == 'DUT_PROVIDER':
                dut.provider = Provider()
                dut.provider.name = value

            # Sut
            elif key == 'SUT_COMPONENT':
                result.execution.sut.append_cut(value)
            elif key == 'SUT_FEATURE':
                result.execution.sut.append_fut(value)
            elif key == 'SUT_COMMIT_ID':
                result.execution.sut.commit_id = value
            elif key == 'SUT_BRANCH':
                result.execution.sut.branch = value

        self._results.append(result)
Пример #58
0
def GetType(oftype):
    """Returns type of object"""
    mtype = type(oftype)
    logger.debug("Get type: %s" % mtype)
    return mtype
Пример #59
0
 def _my_id(self):
     if self.__my_id is None:
         my_id = BuiltIn().get_variable_value('${CALLER_ID}')
         logger.debug('Caller ID is  %r' % my_id)
         self.__my_id = my_id if my_id else None
     return self.__my_id
Пример #60
0
 def emit(self, record):
     message, error = self._get_message(record)
     method = self._get_logger_method(record.levelno)
     method(message)
     if error:
         logger.debug(error)