Esempio n. 1
0
 def linear_solver(self, old_par, A, b, npar, na):
     """
     if invert for moment tensor with zero-trace constraints
     or no constraint
     """
     AA = np.zeros([na, na])
     bb = np.zeros(na)
     AA[0:npar, 0:npar] = A
     bb[0:npar] = b
     if self.config.zero_trace:
         bb[na - 1] = - np.sum(old_par[0:3])
         AA[0:6, na - 1] = np.array([1, 1, 1, 0, 0, 0])
         AA[na - 1, 0:6] = np.array([1, 1, 1, 0, 0, 0])
         AA[na - 1, na - 1] = 0.0
     try:
         dm = np.linalg.solve(AA, bb)
     except:
         logger.error('Matrix is singular...LinearAlgError')
         raise ValueError("Check Matrix Singularity")
     new_par = old_par[0:npar] + dm[0:npar]
     return new_par
def run_command_with_output(command_description, command):
    process_output = ""

    logger.info("Starting process: " + command_description)
    logger.info("Running command: " + command)

    with subprocess.Popen(command,
                          stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE,
                          bufsize=1,
                          universal_newlines=True,
                          shell=True) as process:
        for line in process.stdout:
            process_output += line
        errors = os.linesep.join(process.stderr.readlines())
    if process.returncode != 0:
        logger.error(command_description + " failed!" + os.linesep + errors)
        raise subprocess.CalledProcessError(process.returncode, process.args)
    else:
        logger.info(command_description + " completed successfully")
    return process_output
Esempio n. 3
0
    def add_to_cluster(self, cluster, res):
        """ Specification:
          0. Search and handle `master` tag in `cluster_name`
          1. Imports `cluster_name`, seeks and sets (`install` xor `setup`) and (serve` or `start`) callables
          2. Installs `cluster_name`
          3. Serves `cluster_name`
        """
        args = cluster['args'] if 'args' in cluster else tuple()

        kwargs = update_d({
            'domain': self.dns_name,
            'node_name': self.node_name,
            'public_ipv4': self.node.public_ips[-1],
            'cache': {},
            'cluster_name': cluster.get('cluster_name')
        }, cluster['kwargs'] if 'kwargs' in cluster else {})
        cluster_type = cluster['module'].replace('-', '_')
        cluster_path = '/'.join(ifilter(None, (cluster_type, kwargs['cluster_name'])))
        kwargs.update(cluster_path=cluster_path)

        if ':' in cluster_type:
            cluster_type, _, tag = cluster_type.rpartition(':')
            del _
        else:
            tag = None

        kwargs.update(tag=tag)

        if tag == 'master':
            kwargs.update(master=True)
        if hasattr(self.node, 'private_ips') and len(self.node.private_ips):
            kwargs.update(private_ipv4=self.node.private_ips[-1])

        guessed_os = self.guess_os()

        # import `cluster_type`
        try:
            setattr(self, 'fab', getattr(__import__(cluster_type, globals(), locals(), [guessed_os], -1), guessed_os))
        except AttributeError as e:
            if e.message != "'module' object has no attribute '{os}'".format(os=guessed_os):
                raise
            raise ImportError('Cannot `import {os} from {cluster_type}`'.format(os=guessed_os,
                                                                                cluster_type=cluster_type))
        fab_dir = dir(self.fab)
        # Sort functions like so: `step0`, `step1`
        func_names = sorted(
            (j for j in fab_dir if not j.startswith('_') and str.isdigit(j[-1])),
            key=lambda s: int(''.join(takewhile(str.isdigit, s[::-1]))[::-1] or -1)
        )
        if 'run_cmds' in cluster:
            mapping = {'>=': operator.ge, '<': operator.lt,
                       '>': operator.gt, '<=': operator.le}  # TODO: There must be a full list somewhere!

            def dict_type(run_cmds, func_names):
                op = mapping[run_cmds['op']]
                return [func_name for func_name in func_names
                        if op(int(''.join(takewhile(str.isdigit, func_name[::-1]))[::-1]),
                              int(run_cmds['val']))]

            run_cmds_type = type(cluster['run_cmds'])
            if 'exclude' in cluster['run_cmds']:
                func_names = tuple(ifilter(lambda func: func not in cluster['run_cmds']['exclude'], func_names))
            func_names = dict_type(cluster['run_cmds'], func_names)

            '''{
                DictType: dict_type(cluster['run_cmds'], func_names)
            }.get(run_cmds_type, raise_f(NotImplementedError, '{!s} unexpected for run_cmds'.format(run_cmds_type)))'''

        if not func_names:
            try:
                get_attr = lambda a, b: a if hasattr(self.fab, a) else b if hasattr(self.fab, b) else raise_f(
                    AttributeError, '`{a}` nor `{b}`'.format(a=a, b=b))
                func_names = (
                    get_attr('install', 'setup'),
                    get_attr('serve', 'start')
                )
            except AttributeError as e:
                logger.error('{e} found in {cluster_type}'.format(e=e, cluster_type=cluster_type))
                raise AttributeError(
                    'Function names in {cluster_type} must end in a number'.format(cluster_type=cluster_type)
                )  # 'must'!

            logger.warn('Deprecation: Function names in {cluster_type} should end in a number'.format(
                cluster_type=cluster_type)
            )

        self.handle_deprecations(func_names)

        for idx, step in enumerate(func_names):
            exec_output = execute(getattr(self.fab, step), *args, **kwargs)[self.dns_name]

            if idx == 0:
                res[self.dns_name] = {cluster_path: {step: exec_output}}
                if tag == 'master':
                    save_node_info('master', [self.node_name], folder=cluster_type, marshall=json)
            else:
                res[self.dns_name][cluster_path][step] = exec_output

        save_node_info(self.node_name, node_to_dict(self.node), folder=cluster_path, marshall=json)
def test_second():
    global supported_commands
    try:
        tests_file_path = 'tests_descriptions.json'
        logger.info(
            f'---------------------- run test {tests_file_path} ----------------------'
        )
        saved_values = {}
        test_data = {}
        with open(tests_file_path, 'r', encoding='utf-8') as file:
            test_data = json.load(file)

        for query in test_data:
            if query['commands_before_running'] != {}:
                logger.info(
                    f"commands_before_running: {query['commands_before_running']}"
                )
                for command in query['commands_before_running']:
                    logger.info(
                        f"run: {query['commands_before_running']}{query['commands_before_running'][command]}"
                    )
                    supported_commands[command]['func'](
                        **(query['commands_before_running'][command]))

            if 'saved_before' in query['params'].keys():
                for type_param, array_name_params in query['params'][
                        'saved_before'].items():
                    for name in array_name_params:
                        query['params'][type_param][name] = saved_values[name]

            if 'saved_before' in query['custom_headers'].keys():
                for name_header in query['custom_headers']['saved_before']:
                    query['custom_headers']['headers'][
                        name_header] = saved_values[name_header]

            # print(f"params: {query['params']}")
            # print(f"custom_headers: {query['custom_headers']}")

            url = str(query['url']).format(**query['params']['path'])
            response = http_query.http_query_by_type[str(
                query['method']).lower()](
                    url=url,
                    headers=query['custom_headers']['headers'],
                    query_params=query['params']['query'],
                    json_data=query['request_body_json'],
                )
            # checks.check_mimetype(mimetype=response.content_type)
            checks.check_mimetype(mimetype=response.headers['content-type'])
            checks.check_http_code(code=response.status_code)
            response_data_json = response.json()
            checks.check_structure_successful_response(
                response=response_data_json)
            for scan_area, pattern in query['checks'].items():
                if pattern is not None and pattern != {} and pattern != []:
                    area = str(scan_area).replace('in_', '')
                    ok, res_comparison = compare(pattern,
                                                 response_data_json[area])

                    assert ok is True, f'A result that does not match the pattern was found.\nResult of comparing the url [{url}] response:\n{res_comparison}'

            for param_for_save in query['saves_from_body']:
                saved_values[param_for_save] = response_data_json['body'][
                    param_for_save]
        # show_dialog()
        return True
    except AssertionError as ex:
        logger.error(ex)
        print(f'[Error]{ex}')
        raise MyValueError(f'Тест не пройден: {ex}')