Exemple #1
0
    def test_parse_semver_update_major(self, mock_requests_get: mock.MagicMock):
        """ Check whether the value of the 'derived-semantic-version' property was set correctly.
        Module compilation status is 'passed-with-warnings' so a major version update is expected.
        Expected 'derived-semantic-version': 2.0.0

        Arguments:
        :param mock_requests_get    (mock.MagicMock) requests.get() method is patched to return only the necessary modules
        """
        modules = self.payloads['modulesComplicatedAlgorithms_prepare_json']['module']
        modules = sorted(modules, key=lambda k: k['revision'])
        # List od modules returned from patched /api/search/modules GET request
        modules[1].pop('derived-semantic-version')
        existing_modules = {}
        existing_modules['module'] = deepcopy(modules[:2])
        mock_requests_get.return_value.json.return_value = existing_modules

        module_to_parse = modules[1]
        all_modules = {}
        all_modules['module'] = [module_to_parse]

        complicatedAlgorithms = ModulesComplicatedAlgorithms(yc_gc.logs_dir, self.yangcatalog_api_prefix,
                                                             yc_gc.credentials, self.save_file_dir,
                                                             yc_gc.temp_dir, all_modules, yc_gc.yang_models, yc_gc.temp_dir,
                                                             yc_gc.json_ytree)

        complicatedAlgorithms.parse_semver()

        self.assertNotEqual(len(complicatedAlgorithms.new_modules), 0)
        name = module_to_parse['name']
        revision = module_to_parse['revision']
        new_module = complicatedAlgorithms.new_modules[name].get(revision, {})
        self.assertEqual(new_module.get('derived-semantic-version'), '2.0.0')
Exemple #2
0
    def test_parse_semver_update_versions(self, mock_requests_get: mock.MagicMock):
        """ Check whether the value of the 'derived-semantic-version' property was set correctly.
        Module between two other revisions is parsed, which means, it will loop through all the available
        module revisions and assign 'derived-semantic-version' to them.
        Expected 'derived-semantic-version' order: '1.0.0', '2.0.0', '3.0.0', '4.0.0', '4.1.0', '4.1.1'

        Arguments:
        :param mock_requests_get    (mock.MagicMock) requests.get() method is patched to return only the necessary modules
        """
        expected_semver_order = ['1.0.0', '2.0.0', '3.0.0', '4.0.0', '4.1.0', '4.1.1']
        modules = self.payloads['modulesComplicatedAlgorithms_prepare_json']['module']
        modules = sorted(modules, key=lambda k: k['revision'])
        # List of modules returned from patched /api/search/modules GET request
        existing_modules = {}
        existing_modules['module'] = deepcopy(modules[:4] + modules[5:])

        mock_requests_get.return_value.json.return_value = existing_modules

        module_to_parse = modules[4]
        all_modules = {}
        all_modules['module'] = [module_to_parse]

        complicatedAlgorithms = ModulesComplicatedAlgorithms(yc_gc.logs_dir, self.yangcatalog_api_prefix,
                                                             yc_gc.credentials, self.save_file_dir,
                                                             yc_gc.temp_dir, all_modules, yc_gc.yang_models, yc_gc.temp_dir,
                                                             yc_gc.json_ytree)

        complicatedAlgorithms.parse_semver()

        self.assertIn('2020-05-01', complicatedAlgorithms.new_modules['semver-test'])
        self.assertEqual(complicatedAlgorithms.new_modules['semver-test']['2020-05-01']['derived-semantic-version'], '4.1.0')
Exemple #3
0
    def test_parse_non_requests_combined(self, mock_requests_get: mock.MagicMock):
        module = self.payloads['parse_tree_type']['module'][2]
        all_modules = {'module': [module]}
        mock_requests_get.return_value.json.return_value = {'module': []}

        complicatedAlgorithms = ModulesComplicatedAlgorithms(yc_gc.logs_dir, self.yangcatalog_api_prefix,
                                                             yc_gc.credentials, self.save_file_dir,
                                                             yc_gc.temp_dir, all_modules, yc_gc.yang_models, yc_gc.temp_dir,
                                                             yc_gc.json_ytree)
        complicatedAlgorithms.parse_non_requests()
        name = module['name']
        revision = module['revision']
        self.assertEqual(complicatedAlgorithms.new_modules[name][revision]['tree-type'], 'nmda-compatible')
Exemple #4
0
    def test_parse_dependents(self, mock_requests_get: mock.MagicMock):
        payload = self.payloads['parse_dependents']
        all_modules = {'module': payload[0]['new']}
        mock_requests_get.return_value.json.return_value = {'module': payload[0]['existing']}

        complicatedAlgorithms = ModulesComplicatedAlgorithms(yc_gc.logs_dir, self.yangcatalog_api_prefix,
                                                             yc_gc.credentials, self.save_file_dir,
                                                             yc_gc.temp_dir, all_modules, yc_gc.yang_models, yc_gc.temp_dir,
                                                             yc_gc.json_ytree)
        complicatedAlgorithms.parse_requests()
        new = complicatedAlgorithms.new_modules
        self.assertIn({'name': 'n1', 'revision': '1'}, new['e1']['1']['dependents'])
        self.assertIn({'name': 'n2', 'revision': '1'}, new['e1']['1']['dependents'])
        self.assertNotIn('1', new['e2'])
        self.assertIn({'name': 'n2', 'revision': '1'}, new['n1']['1']['dependents'])
        self.assertIn({'name': 'e2', 'revision': '1'}, new['n1']['1']['dependents'])
        self.assertNotIn('1', new['n2'])
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    log_directory = scriptConf.log_directory
    LOGGER = log.get_logger('reviseTreeType',
                            '{}/parseAndPopulate.log'.format(log_directory))
    LOGGER.info('Starting Cron job for reviseTreeType')
    api_protocol = scriptConf.api_protocol
    ip = scriptConf.ip
    api_port = scriptConf.api_port
    is_uwsgi = scriptConf.is_uwsgi
    separator = ':'
    suffix = api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(api_protocol, ip, separator,
                                                   suffix)
    credentials = scriptConf.credentials
    save_file_dir = scriptConf.save_file_dir
    direc = '/var/yang/tmp'
    yang_models = scriptConf.yang_models
    temp_dir = scriptConf.temp_dir
    json_ytree = scriptConf.json_ytree
    complicatedAlgorithms = ModulesComplicatedAlgorithms(
        log_directory, yangcatalog_api_prefix, credentials, save_file_dir,
        direc, {}, yang_models, temp_dir, json_ytree)
    response = requests.get('{}search/modules'.format(yangcatalog_api_prefix))
    if response.status_code != 200:
        LOGGER.error('Failed to fetch list of modules')
        job_log(start_time,
                temp_dir,
                os.path.basename(__file__),
                error=response.text,
                status='Fail')
        return
    modules_revise = []
    modules = response.json()['module']
    for module in modules:
        if module.get('tree-type') == 'nmda-compatible':
            if not complicatedAlgorithms.check_if_latest_revision(module):
                modules_revise.append(module)
    LOGGER.info('Resolving tree-types for {} modules'.format(
        len(modules_revise)))
    complicatedAlgorithms.resolve_tree_type({'module': modules_revise})
    complicatedAlgorithms.populate()
    LOGGER.info('Job finished successfully')
    job_log(start_time, temp_dir, os.path.basename(__file__), status='Success')
Exemple #6
0
def on_request(ch, method, props, body):
    """Function called when something was sent from API sender. This function
    will process all the requests that would take too long to process for API.
    When the processing is done we will sent back the result of the request
    which can be either 'Failed' or 'Finished successfully' with corespondent
    correlation id. If the request 'Failed' it will sent back also a reason why
    it failed.
            Arguments:
                :param body: (str) String of arguments that need to be processed
                separated by '#'.
    """
    try:
        if sys.version_info >= (3, 4):
            body = body.decode(encoding='utf-8', errors='strict')
        LOGGER.info('Received request with body {}'.format(body))
        arguments = body.split('#')
        if body == 'run_ietf':
            LOGGER.info('Running all ietf and openconfig modules')
            final_response = run_ietf()
        elif 'github' == arguments[-1]:
            LOGGER.info('Github automated message starting to populate')
            paths_plus = arguments[arguments.index('repoLocalDir'):]
            LOGGER.info('paths plus {}'.format(paths_plus))
            arguments = arguments[:arguments.index('repoLocalDir')]
            LOGGER.info('arguments {}'.format(arguments))
            paths = paths_plus[1:-2]
            LOGGER.info('paths {}'.format(paths))
            try:
                for path in paths:
                    with open(temp_dir + "/log_trigger.txt", "w") as f:
                        local_dir = paths_plus[-2]
                        arguments = arguments + [
                            "--dir", local_dir + "/" + path
                        ]
                        subprocess.check_call(arguments, stderr=f)
                final_response = __response_type[1]
            except subprocess.CalledProcessError as e:
                final_response = __response_type[0]
                mf = messageFactory.MessageFactory()
                mf.send_automated_procedure_failed(
                    arguments, temp_dir + "/log_no_sdo_api.txt")
                LOGGER.error(
                    'check log_trigger.txt Error calling process populate.py because {}\n\n with error {}'
                    .format(e.stdout, e.stderr))
            except:
                final_response = __response_type[0]
                LOGGER.error(
                    "check log_trigger.txt failed to process github message with error {}"
                    .format(sys.exc_info()[0]))
        else:
            global all_modules
            all_modules = None
            if arguments[-3] == 'DELETE':
                LOGGER.info('Deleting single module')
                if 'http' in arguments[0]:
                    final_response = process_module_deletion(arguments)
                    credentials = arguments[3:5]
                else:
                    final_response = process_vendor_deletion(arguments)
                    credentials = arguments[7:9]
            elif arguments[-3] == 'DELETE_MULTIPLE':
                LOGGER.info('Deleting multiple modules')
                final_response = process_module_deletion(arguments, True)
                credentials = arguments[3:5]
            elif '--sdo' in arguments[2]:
                final_response = process_sdo(arguments)
                credentials = arguments[11:13]
                direc = arguments[6]
                shutil.rmtree(direc)
            else:
                final_response = process_vendor(arguments)
                credentials = arguments[10:12]
                direc = arguments[5]
                shutil.rmtree(direc)
            if final_response.split('#split#')[0] == __response_type[1]:
                res = make_cache(credentials)
                if res.status_code != 201:
                    final_response = __response_type[
                        0] + '#split#Server error-> could not reload cache'

                if all_modules:
                    complicatedAlgorithms = ModulesComplicatedAlgorithms(
                        log_directory, yangcatalog_api_prefix, credentials,
                        confd_protocol, confd_ip, confdPort, save_file_dir,
                        None, all_modules, yang_models, temp_dir)
                    complicatedAlgorithms.parse_non_requests()
                    complicatedAlgorithms.parse_requests()
                    complicatedAlgorithms.populate()
    except Exception as e:
        final_response = __response_type[0]
        LOGGER.error("receiver failed with message {}".format(e))
    LOGGER.info('Receiver is done with id - {} and message = {}'.format(
        props.correlation_id, str(final_response)))

    f = open('{}/correlation_ids'.format(temp_dir), 'r')
    lines = f.readlines()
    f.close()
    with open('{}/correlation_ids'.format(temp_dir), 'w') as f:
        for line in lines:
            if props.correlation_id in line:
                new_line = '{} -- {} - {}\n'.format(datetime.now().ctime(),
                                                    props.correlation_id,
                                                    str(final_response))
                f.write(new_line)
            else:
                f.write(line)
Exemple #7
0
                 'Request with body on path {} failed with {}'.format(
                     json_implementations_data, url, response.text))
 if body_to_send != '':
     LOGGER.info('Sending files for indexing')
     send_to_indexing(body_to_send,
                      args.credentials,
                      set_key=key,
                      apiIp=args.api_ip)
 if not args.api:
     thread = None
     if not args.force_indexing:
         thread = threading.Thread(target=reload_cache_in_parallel)
         thread.start()
         LOGGER.info('Run complicated algorithms')
         complicatedAlgorithms = ModulesComplicatedAlgorithms(
             log_directory, yangcatalog_api_prefix, args.credentials,
             args.protocol, args.ip, args.port, args.save_file_dir, direc,
             None, yang_models, temp_dir)
         complicatedAlgorithms.parse_non_requests()
         LOGGER.info('Waiting for cache reload to finish')
         thread.join()
         complicatedAlgorithms.parse_requests()
         LOGGER.info('Populating with new data of complicated algorithms')
         complicatedAlgorithms.populate()
     else:
         url = (yangcatalog_api_prefix + 'load-cache')
         LOGGER.info('{}'.format(url))
         response = requests.post(url,
                                  None,
                                  auth=(args.credentials[0],
                                        args.credentials[1]))
         if response.status_code != 201:
Exemple #8
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    api_protocol = scriptConf.api_protocol
    ip = scriptConf.ip
    api_port = scriptConf.api_port
    is_uwsgi = scriptConf.is_uwsgi
    temp_dir = scriptConf.temp_dir
    log_directory = scriptConf.log_directory
    save_file_dir = scriptConf.save_file_dir
    yang_models = scriptConf.yang_models
    credentials = scriptConf.credentials
    json_ytree = scriptConf.json_ytree

    LOGGER = log.get_logger('sandbox', '{}/sandbox.log'.format(log_directory))

    separator = ':'
    suffix = api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'

    yangcatalog_api_prefix = '{}://{}{}{}/'.format(api_protocol, ip, separator,
                                                   suffix)
    # yangcatalog_api_prefix = 'https://yangcatalog.org/api/'
    url = '{}search/modules'.format(yangcatalog_api_prefix)
    LOGGER.info('Getting all the modules from: {}'.format(url))
    response = requests.get(url, headers={'Accept': 'application/json'})

    all_existing_modules = response.json().get('module', [])

    global path
    path = '{}/semver_prepare.json'.format(temp_dir)

    all_modules = get_list_of_unique_modules(all_existing_modules)
    LOGGER.info('Number of unique modules: {}'.format(
        len(all_modules['module'])))

    # Uncomment the next line to read data from the file semver_prepare.json
    # all_modules = load_from_json(path)

    # Initialize ModulesComplicatedAlgorithms
    direc = '/var/yang/tmp'

    num_of_modules = len(all_modules['module'])
    chunk_size = 100
    chunks = (num_of_modules - 1) // chunk_size + 1
    for i in range(chunks):
        try:
            LOGGER.info('Proccesing chunk {} out of {}'.format(i, chunks))
            batch = all_modules['module'][i * chunk_size:(i + 1) * chunk_size]
            batch_modules = {'module': batch}
            recursion_limit = sys.getrecursionlimit()
            sys.setrecursionlimit(50000)
            complicatedAlgorithms = ModulesComplicatedAlgorithms(
                log_directory, yangcatalog_api_prefix, credentials,
                save_file_dir, direc, batch_modules, yang_models, temp_dir,
                json_ytree)
            complicatedAlgorithms.parse_semver()
            sys.setrecursionlimit(recursion_limit)
            complicatedAlgorithms.populate()
        except:
            LOGGER.exception(
                'Exception occured during running ModulesComplicatedAlgorithms'
            )
            continue

    messages = [{
        'label': 'Number of modules checked',
        'message': num_of_modules
    }]
    end = time.time()
    LOGGER.info(
        'Populate took {} seconds with the main and complicated algorithm'.
        format(int(end - start_time)))
    filename = os.path.basename(__file__).split('.py')[0]
    job_log(start_time,
            temp_dir,
            filename,
            messages=messages,
            status='Success')
    LOGGER.info('Job finished successfully')
    direc = '/var/yang/tmp'

    num_of_modules = len(all_existing_modules['module'])
    chunk_size = 100
    chunks = (num_of_modules - 1) // chunk_size + 1
    for i in range(chunks):
        try:
            LOGGER.info('Proccesing chunk {} out of {}'.format(i, chunks))
            batch = all_existing_modules['module'][i * chunk_size:(i + 1) *
                                                   chunk_size]
            batch_modules = {'module': batch}

            recursion_limit = sys.getrecursionlimit()
            sys.setrecursionlimit(50000)
            complicatedAlgorithms = ModulesComplicatedAlgorithms(
                log_directory, yangcatalog_api_prefix, credentials,
                save_file_dir, direc, batch_modules, yang_models, temp_dir,
                json_ytree)
            complicatedAlgorithms.parse_non_requests()
            sys.setrecursionlimit(recursion_limit)
            complicatedAlgorithms.populate()
        except:
            LOGGER.exception(
                'Exception occured during running ModulesComplicatedAlgorithms'
            )
            continue

    end = time.time()
    LOGGER.info(
        'Populate took {} seconds with the main and complicated algorithm'.
        format(int(end - start)))
Exemple #10
0
def main(scriptConf=None):
    if scriptConf is None:
        scriptConf = ScriptConfig()
    args = scriptConf.args
    log_directory = scriptConf.log_directory
    is_uwsgi = scriptConf.is_uwsgi
    yang_models = scriptConf.yang_models
    temp_dir = scriptConf.temp_dir
    cache_dir = scriptConf.cache_dir
    json_ytree = scriptConf.json_ytree
    global LOGGER
    LOGGER = log.get_logger('populate', '{}/parseAndPopulate.log'.format(log_directory))

    separator = ':'
    suffix = args.api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol, args.api_ip, separator, suffix)
    confdService = ConfdService()
    redisConnection = RedisConnection()
    LOGGER.info('Starting the populate script')
    start = time.time()
    if args.api:
        json_dir = args.dir
    else:
        json_dir = create_dir_name(temp_dir)
        os.makedirs(json_dir)
    LOGGER.info('Calling runCapabilities script')
    try:
        runCapabilities = import_module('parseAndPopulate.runCapabilities')
        script_conf = configure_runCapabilities(runCapabilities, args, json_dir)
        runCapabilities.main(scriptConf=script_conf)
    except Exception as e:
        LOGGER.exception('runCapabilities error:\n{}'.format(e))
        raise e

    body_to_send = {}
    if args.notify_indexing:
        LOGGER.info('Sending files for indexing')
        body_to_send = prepare_for_es_indexing(yangcatalog_api_prefix, os.path.join(json_dir, 'prepare.json'),
                                               LOGGER, args.save_file_dir, force_indexing=args.force_indexing)

    LOGGER.info('Populating yang catalog with data. Starting to add modules')
    with open(os.path.join(json_dir, 'prepare.json')) as data_file:
        data = data_file.read()
    modules = json.loads(data).get('module', [])
    errors = confdService.patch_modules(modules)
    redisConnection.populate_modules(modules)

    # In each json
    if os.path.exists(os.path.join(json_dir, 'normal.json')):
        LOGGER.info('Starting to add vendors')
        with open(os.path.join(json_dir, 'normal.json')) as data:
            try:
                vendors = json.loads(data.read())['vendors']['vendor']
            except KeyError as e:
                LOGGER.error('No files were parsed. This probably means the directory is missing capability xml files')
                raise e
        errors = errors or confdService.patch_vendors(vendors)
        redisConnection.populate_implementation(vendors)
    if body_to_send:
        LOGGER.info('Sending files for indexing')
        send_for_es_indexing(body_to_send, LOGGER, scriptConf.changes_cache_path, scriptConf.delete_cache_path,
                             scriptConf.lock_file)
    if modules:
        process_reload_cache = multiprocessing.Process(target=reload_cache_in_parallel,
                                                       args=(args.credentials, yangcatalog_api_prefix,))
        process_reload_cache.start()
        LOGGER.info('Running ModulesComplicatedAlgorithms from populate.py script')
        recursion_limit = sys.getrecursionlimit()
        sys.setrecursionlimit(50000)
        complicatedAlgorithms = ModulesComplicatedAlgorithms(log_directory, yangcatalog_api_prefix,
                                                             args.credentials, args.save_file_dir, json_dir, None,
                                                             yang_models, temp_dir, json_ytree)
        complicatedAlgorithms.parse_non_requests()
        LOGGER.info('Waiting for cache reload to finish')
        process_reload_cache.join()
        complicatedAlgorithms.parse_requests()
        sys.setrecursionlimit(recursion_limit)
        LOGGER.info('Populating with new data of complicated algorithms')
        complicatedAlgorithms.populate()
        end = time.time()
        LOGGER.info('Populate took {} seconds with the main and complicated algorithm'.format(int(end - start)))

        # Keep new hashes only if the ConfD was patched successfully
        if not errors:
            path = os.path.join(json_dir, 'temp_hashes.json')
            fileHasher = FileHasher('backend_files_modification_hashes', cache_dir, not args.force_parsing, log_directory)
            updated_hashes = fileHasher.load_hashed_files_list(path)
            if updated_hashes:
                fileHasher.merge_and_dump_hashed_files_list(updated_hashes)

    LOGGER.info('Populate script finished successfully')