Example #1
0
def prepare_for_es_removal(yc_api_prefix: str, modules_to_delete: list, save_file_dir: str, LOGGER: logging.Logger):
    for mod in modules_to_delete:
        name, revision_organization = mod.split('@')
        revision = revision_organization.split('/')[0]
        path_to_delete_local = '{}/{}@{}.yang'.format(save_file_dir, name, revision)
        data = {'input': {'dependents': [{'name': name}]}}

        response = requests.post('{}search-filter'.format(yc_api_prefix), json=data)
        if response.status_code == 200:
            data = response.json()
            modules = data['yang-catalog:modules']['module']
            for mod in modules:
                redis_key = '{}@{}/{}'.format(mod['name'], mod['revision'], mod['organization'])
                redisConnection = RedisConnection()
                redisConnection.delete_dependent(redis_key, name)
        if os.path.exists(path_to_delete_local):
            os.remove(path_to_delete_local)

    post_body = {}
    if modules_to_delete:
        post_body = {'modules-to-delete': modules_to_delete}
        LOGGER.debug('Modules to delete:\n{}'.format(json.dumps(post_body, indent=2)))
        mf = messageFactory.MessageFactory()
        mf.send_removed_yang_files(json.dumps(post_body, indent=4))

    return post_body
Example #2
0
def prepare_for_es_indexing(yc_api_prefix: str, modules_to_index: str, LOGGER: logging.Logger,
                            save_file_dir: str, force_indexing: bool = False):
    """ Sends the POST request which will activate indexing script for modules which will
    help to speed up process of searching. It will create a json body of all the modules
    containing module name and path where the module can be found if we are adding new
    modules.

    Arguments:
        :param yc_api_prefix        (str) prefix for sending request to API
        :param modules_to_index     (str) path to the prepare.json file generated while parsing
        :param LOOGER               (logging.Logger) formated logger with the specified name
        :param save_file_dir        (str) path to the directory where all the yang files will be saved
        :param force_indexing       (bool) Whether or not we should force indexing even if module exists in cache.
    """
    mf = messageFactory.MessageFactory()
    es_manager = ESManager()
    with open(modules_to_index, 'r') as f:
        sdos_json = json.load(f)
        LOGGER.debug('{} modules loaded from prepare.json'.format(len(sdos_json.get('module', []))))
    post_body = {}
    load_new_files_to_github = False
    for module in sdos_json.get('module', []):
        url = '{}search/modules/{},{},{}'.format(yc_api_prefix,
                                                 module['name'], module['revision'], module['organization'])
        response = requests.get(url, headers=json_headers)
        code = response.status_code

        in_es = False
        in_redis = code == 200 or code == 201 or code == 204
        if in_redis:
            in_es = es_manager.document_exists(ESIndices.MODULES, module)
        else:
            load_new_files_to_github = True

        if force_indexing or not in_es or not in_redis:
            path = '{}/{}@{}.yang'.format(save_file_dir, module.get('name'), module.get('revision'))
            key = '{}@{}/{}'.format(module['name'], module['revision'], module['organization'])
            post_body[key] = path

    if post_body:
        post_body = {'modules-to-index': post_body}
        LOGGER.debug('Modules to index:\n{}'.format(json.dumps(post_body, indent=2)))
        mf.send_added_new_yang_files(json.dumps(post_body, indent=4))
    if load_new_files_to_github:
        try:
            LOGGER.info('Calling draftPull.py script')
            module = __import__('ietfYangDraftPull', fromlist=['draftPull'])
            submodule = getattr(module, 'draftPull')
            submodule.main()
        except Exception:
            LOGGER.exception('Error occurred while running draftPull.py script')
    return post_body
Example #3
0
def trigger_populate():
    app.logger.info('Trigger populate if necessary')
    repoutil.pull(ac.d_yang_models_dir)
    try:
        assert request.json
        commits = request.json.get('commits') if request.is_json else None
        paths = set()
        new = []
        mod = []
        if commits:
            for commit in commits:
                added = commit.get('added')
                if added:
                    for add in added:
                        if 'platform-metadata.json' in add:
                            paths.add('/'.join(add.split('/')[:-1]))
                            new.append('/'.join(add.split('/')[:-1]))
                modified = commit.get('modified')
                if modified:
                    for m in modified:
                        if 'platform-metadata.json' in m:
                            paths.add('/'.join(m.split('/')[:-1]))
                            mod.append('/'.join(m.split('/')[:-1]))
        if len(paths) > 0:
            mf = messageFactory.MessageFactory()
            mf.send_new_modified_platform_metadata(new, mod)
            app.logger.info('Forking the repo')
            try:
                populate_path = os.path.join(os.environ['BACKEND'], 'parseAndPopulate/populate.py')
                arguments = ['python', populate_path, '--api-protocol', ac.g_protocol_api,
                             '--api-port', ac.w_api_port, '--api-ip', ac.w_ip,
                             '--result-html-dir', ac.w_result_html_dir,
                             '--credentials', ac.s_confd_credentials[0], ac.s_confd_credentials[1],
                             '--save-file-dir', ac.d_save_file_dir, 'repoLocalDir']
                arguments = arguments + list(paths) + [ac.d_yang_models_dir, 'github']
                ac.sender.send('#'.join(arguments))
            except:
                app.logger.exception('Could not populate after git push')
    except Exception as e:
        app.logger.error('Automated github webhook failure - {}'.format(e))

    return {'info': 'Success'}
Example #4
0
 def _patch(self, data: list, type: str, log_file: str) -> bool:
     errors = False
     chunk_size = 500
     failed_data = {}
     chunks = [data[i:i + chunk_size] for i in range(0, len(data), chunk_size)]
     path = '{}/restconf/data/yang-catalog:catalog/{}/'.format(self.confd_prefix, type)
     self.LOGGER.debug('Sending PATCH request to patch multiple {}'.format(type))
     for i, chunk in enumerate(chunks, start=1):
         self.LOGGER.debug('Processing chunk {} out of {}'.format(i, len(chunks)))
         patch_data = {type: {type.rstrip('s'): chunk}}
         patch_json = json.dumps(patch_data)
         response = requests.patch(path, patch_json, auth=(self.credentials[0], self.credentials[1]), headers=confd_headers)
         if response.status_code == 400:
             self.LOGGER.warning('Failed to batch patch {}, falling back to patching individually'.format(type))
             for datum in chunk:
                 patch_data = {type: {type.rstrip('s'): [datum]}}
                 patch_json = json.dumps(patch_data)
                 response = requests.patch(path, patch_json, auth=(self.credentials[0], self.credentials[1]), headers=confd_headers)
                 if response.status_code == 400:
                     errors = True
                     with open(os.path.join(self.log_directory, log_file), 'a') as f:
                         if type == 'modules':
                             name_revision = '{}@{}'.format(datum['name'], datum['revision'])
                             self.LOGGER.error('Failed to patch {} {}'.format(type.rstrip('s'), name_revision))
                             try:
                                 failed_data[name_revision] = json.loads(response.text)
                             except json.decoder.JSONDecodeError:
                                 self.LOGGER.exception('No test in response')
                             f.write('{}@{} error: {}\n'.format(datum['name'], datum['revision'], response.text))
                         elif type == 'vendors':
                             platform_name = datum['platforms']['platform'][0]['name']
                             vendor_platform = '{} {}'.format(datum['name'], platform_name)
                             self.LOGGER.error('Failed to patch {} {}'.format(type.rstrip('s'), vendor_platform))
                             try:
                                 failed_data[vendor_platform] = json.loads(response.text)
                             except json.decoder.JSONDecodeError:
                                 self.LOGGER.exception('No test in response')
                             f.write('{} {} error: {}\n'.format(datum['name'], platform_name, response.text))
     if failed_data:
         mf = messageFactory.MessageFactory()
         mf.send_confd_writing_failures(type, failed_data)
     return errors
Example #5
0
def check_local():
    """Authorize sender if it is Travis, if travis job was sent from yang-catalog
    repository and job passed fine and Travis run a job on pushed patch, create
    a pull request to YangModules repository. If the job passed on this pull request,
    merge the pull request and remove the repository at yang-catalog repository
            :return response to the request
    """
    app.logger.info('Starting pull request job')
    body = json.loads(request.form['payload'])
    app.logger.info('Body of travis {}'.format(json.dumps(body)))
    app.logger.info('type of job {}'.format(body['type']))
    try:
        check_authorized(request.headers['SIGNATURE'], request.form['payload'])
        app.logger.info('Authorization successful')
    except:
        app.logger.exception('Authorization failed. Request did not come from Travis')
        mf = messageFactory.MessageFactory()
        mf.send_travis_auth_failed()
        abort(401)

    github_repos_url = '{}/repos'.format(github_api)
    yang_models_url = '{}/YangModels/yang'.format(github_repos_url)

    verify_commit = False
    app.logger.info('Checking commit SHA if it is the commit sent by yang-catalog user.')
    if body['repository']['owner_name'] == 'yang-catalog':
        commit_sha = body['commit']
    else:
        commit_sha = body['head_commit']
    try:
        with open(ac.d_commit_dir, 'r') as commit_file:
            for line in commit_file:
                if commit_sha in line:
                    verify_commit = True
                    break
    except:
        abort(404)

    token_header_value = 'token {}'.format(ac.s_yang_catalog_token)
    if verify_commit:
        app.logger.info('commit verified')
        if body['repository']['owner_name'] == 'yang-catalog':
            if body['result_message'] == 'Passed':
                if body['type'] in ['push', 'api']:
                    # After build was successful only locally
                    json_body = json.loads(json.dumps({
                        'title': 'Cronjob - every day pull and update of ietf draft yang files.',
                        'body': 'ietf extracted yang modules',
                        'head': 'yang-catalog:main',
                        'base': 'main'
                    }))

                    url = '{}/pulls'.format(yang_models_url)
                    r = requests.post(url, json=json_body, headers={'Authorization': token_header_value})
                    if r.status_code == 201:
                        app.logger.info('Pull request created successfully')
                        return ({'info': 'Success'}, 201)
                    else:
                        app.logger.error('Could not create a pull request {}'.format(r.status_code))
                        abort(400)
            else:
                app.logger.warning('Travis job did not pass.')
                return ({'info': 'Failed'}, 406)
        elif body['repository']['owner_name'] == 'YangModels':
            if body['result_message'] == 'Passed':
                if body['type'] == 'pull_request':
                    # If build was successful on pull request
                    admin_token_header_value = 'token {}'.format(ac.s_admin_token)
                    pull_number = body['pull_request_number']
                    app.logger.info('Pull request was successful {}. sending review.'.format(repr(pull_number)))
                    url = '{}/repos/YangModels/yang/pulls/{}/reviews'.format(github_api, repr(pull_number))
                    data = json.dumps({
                        'body': 'AUTOMATED YANG CATALOG APPROVAL',
                        'event': 'APPROVE'
                    })
                    response = requests.post(url, data, headers={'Authorization': admin_token_header_value})
                    app.logger.info('review response code {}. Merge response {}.'.format(
                        response.status_code, response.text))
                    data = json.dumps({'commit-title': 'Travis job passed',
                                       'sha': body['head_commit']})
                    response = requests.put('{}/repos/YangModels/yang/pulls/{}/merge'.format(github_api, repr(pull_number)),
                                            data, headers={'Authorization': admin_token_header_value})
                    app.logger.info('Merge response code {}. Merge response {}.'.format(response.status_code, response.text))
                    return ({'info': 'Success'}, 201)
            else:
                app.logger.warning('Travis job did not pass. Removing pull request')
                pull_number = body['pull_request_number']
                json_body = json.loads(json.dumps({
                    'title': 'Cron job - every day pull and update of ietf draft yang files.',
                    'body': 'ietf extracted yang modules',
                    'state': 'closed',
                    'base': 'main'
                }))
                requests.patch('{}/repos/YangModels/yang/pulls/{}'.format(github_api, pull_number), json=json_body,
                               headers={'Authorization': token_header_value})
                app.logger.warning('Travis job did not pass.')
                return ({'info': 'Failed'}, 406)
        else:
            app.logger.warning('Owner name verification failed. Owner -> {}'.format(body['repository']['owner_name']))
            return ({'Error': 'Owner verfication failed'}, 401)
    else:
        app.logger.info('Commit verification failed. Commit sent by someone else.'
                        'Not doing anything.')
    return ({'Error': 'Fails'}, 500)
Example #6
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    args = scriptConf.args

    config_path = args.config_path
    config = create_config(config_path)
    token = config.get('Secrets-Section', 'yang-catalog-token')
    username = config.get('General-Section', 'repository-username')
    commit_dir = config.get('Directory-Section', 'commit-dir')
    config_name = config.get('General-Section', 'repo-config-name')
    config_email = config.get('General-Section', 'repo-config-email')
    log_directory = config.get('Directory-Section', 'logs')
    temp_dir = config.get('Directory-Section', 'temp')
    exceptions = config.get('Directory-Section', 'exceptions')
    yang_models = config.get('Directory-Section', 'yang-models-dir')
    ietf_draft_url = config.get('Web-Section', 'ietf-draft-private-url')
    ietf_rfc_url = config.get('Web-Section', 'ietf-RFC-tar-private-url')
    is_production = config.get('General-Section', 'is-prod')
    is_production = is_production == 'True'
    LOGGER = log.get_logger('draftPull',
                            '{}/jobs/draft-pull.log'.format(log_directory))
    LOGGER.info('Starting Cron job IETF pull request')

    repo_name = 'yang'
    repourl = 'https://{}@github.com/{}/{}.git'.format(token, username,
                                                       repo_name)
    commit_author = {'name': config_name, 'email': config_email}

    draftPullUtility.update_forked_repository(yang_models, LOGGER)
    repo = draftPullUtility.clone_forked_repository(repourl, commit_author,
                                                    LOGGER)

    if not repo:
        error_message = 'Failed to clone repository {}/{}'.format(
            username, repo_name)
        job_log(start_time,
                temp_dir,
                error=error_message,
                status='Fail',
                filename=os.path.basename(__file__))
        sys.exit()

    try:
        # Get rfc.tgz file
        response = requests.get(ietf_rfc_url)
        tgz_path = '{}/rfc.tgz'.format(repo.local_dir)
        extract_to = '{}/standard/ietf/RFCtemp'.format(repo.local_dir)
        with open(tgz_path, 'wb') as zfile:
            zfile.write(response.content)
        tar_opened = draftPullUtility.extract_rfc_tgz(tgz_path, extract_to,
                                                      LOGGER)
        if tar_opened:
            diff_files = []
            new_files = []

            temp_rfc_yang_files = glob.glob(
                '{}/standard/ietf/RFCtemp/*.yang'.format(repo.local_dir))
            for temp_rfc_yang_file in temp_rfc_yang_files:
                file_name = os.path.basename(temp_rfc_yang_file)
                rfc_yang_file = temp_rfc_yang_file.replace('RFCtemp', 'RFC')

                if not os.path.exists(rfc_yang_file):
                    new_files.append(file_name)
                    continue

                same = filecmp.cmp(rfc_yang_file, temp_rfc_yang_file)
                if not same:
                    diff_files.append(file_name)

            shutil.rmtree('{}/standard/ietf/RFCtemp'.format(repo.local_dir))

            with open(exceptions, 'r') as exceptions_file:
                remove_from_new = exceptions_file.read().split('\n')
            new_files = [
                file_name for file_name in new_files
                if file_name not in remove_from_new
            ]

            if args.send_message:
                if new_files or diff_files:
                    LOGGER.info(
                        'new or modified RFC files found. Sending an E-mail')
                    mf = messageFactory.MessageFactory()
                    mf.send_new_rfc_message(new_files, diff_files)

        # Experimental draft modules
        try:
            os.makedirs('{}/experimental/ietf-extracted-YANG-modules/'.format(
                repo.local_dir))
        except OSError as e:
            # be happy if someone already created the path
            if e.errno != errno.EEXIST:
                raise
        experimental_path = '{}/experimental/ietf-extracted-YANG-modules'.format(
            repo.local_dir)

        LOGGER.info('Updating IETF drafts download links')
        draftPullUtility.get_draft_module_content(ietf_draft_url,
                                                  experimental_path, LOGGER)

        LOGGER.info('Checking module filenames without revision in {}'.format(
            experimental_path))
        draftPullUtility.check_name_no_revision_exist(experimental_path,
                                                      LOGGER)

        LOGGER.info(
            'Checking for early revision in {}'.format(experimental_path))
        draftPullUtility.check_early_revisions(experimental_path, LOGGER)

        messages = []
        try:
            # Add commit and push to the forked repository
            LOGGER.info('Adding all untracked files locally')
            untracked_files = repo.repo.untracked_files
            repo.add_untracked_remove_deleted()
            LOGGER.info('Committing all files locally')
            repo.commit_all(
                'Cronjob - every day pull of ietf draft yang files.')
            LOGGER.info('Pushing files to forked repository')
            commit_hash = repo.repo.head.commit
            LOGGER.info('Commit hash {}'.format(commit_hash))
            with open(commit_dir, 'w+') as f:
                f.write('{}\n'.format(commit_hash))
            if is_production:
                LOGGER.info(
                    'Pushing untracked and modified files to remote repository'
                )
                repo.push()
            else:
                LOGGER.info(
                    'DEV environment - not pushing changes into remote repository'
                )
                LOGGER.debug(
                    'List of all untracked and modified files:\n{}'.format(
                        '\n'.join(untracked_files)))
        except GitCommandError as e:
            message = 'Error while pushing procedure - git command error: \n {} \n git command out: \n {}'.format(
                e.stderr, e.stdout)
            if 'Your branch is up to date' in e.stdout:
                LOGGER.warning(message)
                messages = [{
                    'label': 'Pull request created',
                    'message': 'False - branch is up to date'
                }]
            else:
                LOGGER.exception(
                    'Error while pushing procedure - Git command error')
                raise e
        except Exception as e:
            LOGGER.exception('Error while pushing procedure {}'.format(
                sys.exc_info()[0]))
            raise type(e)('Error while pushing procedure')
    except Exception as e:
        LOGGER.exception('Exception found while running draftPull script')
        job_log(start_time,
                temp_dir,
                error=str(e),
                status='Fail',
                filename=os.path.basename(__file__))
        raise e

    if len(messages) == 0:
        messages = [{
            'label': 'Pull request created',
            'message': 'True - {}'.format(commit_hash)
        }  # pyright: ignore
                    ]
    job_log(start_time,
            temp_dir,
            messages=messages,
            status='Success',
            filename=os.path.basename(__file__))
    LOGGER.info('Job finished successfully')
Example #7
0
    def parse_semver(self):
        def get_revision_datetime(module: dict):
            rev = module['revision'].split('-')
            try:
                date = datetime(int(rev[0]), int(rev[1]), int(rev[2]))
            except Exception:
                LOGGER.error(
                    'Failed to process revision for {}: (rev: {})'.format(
                        module['name'], rev))
                try:
                    if int(rev[1]) == 2 and int(rev[2]) == 29:
                        date = datetime(int(rev[0]), int(rev[1]), 28)
                    else:
                        date = datetime(1970, 1, 1)
                except Exception:
                    date = datetime(1970, 1, 1)
            return date

        def increment_semver(old: str, significance: int):
            versions = old.split('.')
            versions = list(map(int, versions))
            versions[significance] += 1
            versions[significance +
                     1:] = [0] * len(versions[significance + 1:])
            return '{}.{}.{}'.format(*versions)

        def update_semver(old_details: dict, new_module: dict,
                          significance: int):
            upgraded_version = increment_semver(old_details['semver'],
                                                significance)
            new_module['derived-semantic-version'] = upgraded_version
            add_to_new_modules(new_module)

        def trees_match(new, old) -> bool:
            if type(new) != type(old):
                return False
            elif isinstance(new, dict):
                new.pop('description', None)
                old.pop('description', None)
                return new.keys() == old.keys() and all(
                    (trees_match(new[i], old[i]) for i in new))
            elif isinstance(new, list):
                return len(new) == len(old) and all(
                    any((trees_match(i, j) for j in old)) for i in new)
            elif type(new) in (str, set, bool):
                return new == old
            else:
                assert False

        def get_trees(new: dict, old: dict):
            new_name_revision = '{}@{}'.format(new['name'], new['revision'])
            old_name_revision = '{}@{}'.format(old['name'], old['revision'])
            new_schema = '{}/{}.yang'.format(self._save_file_dir,
                                             new_name_revision)
            old_schema = '{}/{}.yang'.format(self._save_file_dir,
                                             old_name_revision)
            new_schema_exist = self._check_schema_file(new)
            old_schema_exist = self._check_schema_file(old)
            new_tree_path = '{}/{}.json'.format(self.json_ytree,
                                                new_name_revision)
            old_tree_path = '{}/{}.json'.format(self.json_ytree,
                                                old_name_revision)

            if old_schema_exist and new_schema_exist:
                ctx, new_schema_ctx = context_check_update_from(
                    old_schema, new_schema, self._yang_models,
                    self._save_file_dir)
                if len(ctx.errors) == 0:
                    if os.path.exists(new_tree_path) and os.path.exists(
                            old_tree_path):
                        with open(new_tree_path) as nf, open(
                                old_tree_path) as of:
                            new_yang_tree = json.load(nf)
                            old_yang_tree = json.load(of)
                    else:
                        with open(old_schema, 'r', errors='ignore') as f:
                            old_schema_ctx = ctx.add_module(
                                old_schema, f.read())
                        if ctx.opts.tree_path is not None:
                            path = ctx.opts.tree_path.split('/')
                            if path[0] == '':
                                path = path[1:]
                        else:
                            path = None
                        retry = 5
                        while retry:
                            try:
                                ctx.validate()
                                break
                            except Exception as e:
                                retry -= 1
                                if retry == 0:
                                    raise e
                        try:
                            f = io.StringIO()
                            emit_json_tree([new_schema_ctx], f, ctx)
                            new_yang_tree = f.getvalue()
                            with open(new_tree_path, 'w') as f:
                                f.write(new_yang_tree)
                        except:
                            new_yang_tree = ''
                        try:
                            f = io.StringIO()
                            emit_json_tree([old_schema_ctx], f, ctx)
                            old_yang_tree = f.getvalue()
                            with open(old_tree_path, 'w') as f:
                                f.write(old_yang_tree)
                        except:
                            old_yang_tree = '2'
                    return (new_yang_tree, old_yang_tree)
                else:
                    raise Exception

        def add_to_new_modules(new_module: dict):
            name = new_module['name']
            revision = new_module['revision']
            if (revision not in self._existing_modules_dict[name]
                    or self._existing_modules_dict[name][revision].get(
                        'derived-semantic-version') !=
                    new_module['derived-semantic-version']):
                LOGGER.info('semver {} vs {} for module {}@{}'.format(
                    self._existing_modules_dict[name].get(
                        revision, {}).get('derived-semantic-version'),
                    new_module['derived-semantic-version'], name, revision))
                if revision not in self.new_modules[name]:
                    self.new_modules[name][revision] = new_module
                else:
                    self.new_modules[name][revision][
                        'derived-semantic-version'] = new_module[
                            'derived-semantic-version']

        z = 0
        for new_module in self._all_modules.get('module', []):
            z += 1
            name = new_module['name']
            new_revision = new_module['revision']
            name_revision = '{}@{}'.format(name, new_revision)
            data = defaultdict(dict)
            # Get all other available revisions of the module
            for m in self._existing_modules_dict[new_module['name']].values():
                if m['revision'] != new_module['revision']:
                    data[m['name']][m['revision']] = deepcopy(m)

            LOGGER.info('Searching semver for {}. {} out of {}'.format(
                name_revision, z, len(self._all_modules['module'])))
            if len(data) == 0:
                # If there is no other revision for this module
                new_module['derived-semantic-version'] = '1.0.0'
                add_to_new_modules(new_module)
            else:
                # If there is at least one revision for this module
                date = get_revision_datetime(new_module)
                module_temp = {}
                module_temp['name'] = name
                module_temp['revision'] = new_revision
                module_temp['organization'] = new_module['organization']
                module_temp['compilation'] = new_module.get(
                    'compilation-status', 'PENDING')
                module_temp['date'] = date
                module_temp['schema'] = new_module['schema']
                mod_details: t.List[t.Dict[str, str]] = [module_temp]

                # Loop through all other available revisions of the module
                revision = ''
                try:
                    for mod in [
                            revision for name in data.values()
                            for revision in name.values()
                    ]:
                        module_temp = {}
                        revision = mod['revision']
                        if revision == new_module['revision']:
                            continue
                        module_temp['revision'] = revision
                        module_temp['date'] = get_revision_datetime(mod)
                        module_temp['name'] = name
                        module_temp['compilation'] = mod.get(
                            'compilation-status', 'PENDING')
                        module_temp['semver'] = mod['derived-semantic-version']
                        module_temp['organization'] = mod['organization']
                        module_temp['schema'] = mod['schema']
                        mod_details.append(module_temp)
                except KeyError as e:
                    LOGGER.error(
                        'Existing module {}@{} is missing the {} field'.format(
                            name, revision, e))
                    if str(e) == 'derived-semantic-version':
                        LOGGER.error('Cannot resolve semver for {}@{}'.format(
                            name, new_revision))
                        continue

                data[name][new_revision] = new_module
                mod_details = sorted(mod_details, key=lambda k: k['date'])
                # If we are adding a new module to the end (latest revision) of existing modules with this name
                # and all modules with this name have semver already assigned except for the last one
                if mod_details[-1]['date'] == date:
                    if mod_details[-1]['compilation'] != 'passed':
                        versions = mod_details[-2]['semver'].split('.')
                        major_ver = int(versions[0])
                        major_ver += 1
                        upgraded_version = '{}.{}.{}'.format(major_ver, 0, 0)
                        new_module[
                            'derived-semantic-version'] = upgraded_version
                        add_to_new_modules(new_module)
                    else:
                        if mod_details[-2]['compilation'] != 'passed':
                            update_semver(mod_details[-2], new_module, 0)
                        else:
                            try:
                                trees = get_trees(mod_details[-1],
                                                  mod_details[-2])
                                # if schemas do not exist, trees will be None
                                if trees:
                                    new_yang_tree, old_yang_tree = trees
                                    if trees_match(new_yang_tree,
                                                   old_yang_tree):
                                        # yang trees are the same - update only the patch version
                                        update_semver(mod_details[-2],
                                                      new_module, 2)
                                    else:
                                        # yang trees have changed - update minor version
                                        update_semver(mod_details[-2],
                                                      new_module, 1)
                            except:
                                # pyang found an error - update major version
                                update_semver(mod_details[-2], new_module, 0)
                # If we are adding new module in the middle (between two revisions) of existing modules with this name
                else:
                    name = mod_details[0]['name']
                    revision = mod_details[0]['revision']
                    mod_details[0]['semver'] = '1.0.0'
                    response = data[name][revision]
                    response['derived-semantic-version'] = '1.0.0'
                    add_to_new_modules(response)

                    for x in range(1, len(mod_details)):
                        name = mod_details[x]['name']
                        revision = mod_details[x]['revision']
                        module = data[name][revision]
                        if mod_details[x]['compilation'] != 'passed':
                            update_semver(mod_details[x - 1], module, 0)
                            mod_details[x]['semver'] = increment_semver(
                                mod_details[x - 1]['semver'], 0)
                        else:
                            # If the previous revision has the compilation status 'passed'
                            if mod_details[x - 1]['compilation'] != 'passed':
                                update_semver(mod_details[x - 1], module, 0)
                                mod_details[x]['semver'] = increment_semver(
                                    mod_details[x - 1]['semver'], 0)
                            else:
                                # Both actual and previous revisions have the compilation status 'passed'
                                try:
                                    trees = get_trees(mod_details[x],
                                                      mod_details[x - 1])
                                    # if schemas do not exist, trees will be None
                                    if trees:
                                        new_yang_tree, old_yang_tree = trees
                                        if trees_match(new_yang_tree,
                                                       old_yang_tree):
                                            # yang trees are the same - update only the patch version
                                            update_semver(
                                                mod_details[x - 1], module, 2)
                                            mod_details[x][
                                                'semver'] = increment_semver(
                                                    mod_details[x -
                                                                1]['semver'],
                                                    2)
                                        else:
                                            # yang trees have changed - update minor version
                                            update_semver(
                                                mod_details[x - 1], module, 1)
                                            mod_details[x][
                                                'semver'] = increment_semver(
                                                    mod_details[x -
                                                                1]['semver'],
                                                    1)
                                except:
                                    # pyang found an error - update major version
                                    update_semver(mod_details[x - 1], module,
                                                  0)
                                    mod_details[x][
                                        'semver'] = increment_semver(
                                            mod_details[x - 1]['semver'], 0)

        if len(self._unavailable_modules) != 0:
            mf = messageFactory.MessageFactory()
            mf.send_github_unavailable_schemas(self._unavailable_modules)
Example #8
0
def on_request(ch, method, props, body):
    """Function called when something was sent from API sender. This function
    will process all the requests that would take too long to process for API.
    When the processing is done we will sent back the result of the request
    which can be either 'Failed' or 'Finished successfully' with corespondent
    correlation id. If the request 'Failed' it will sent back also a reason why
    it failed.
            Arguments:
                :param body: (str) String of arguments that need to be processed
                separated by '#'.
    """
    try:
        if sys.version_info >= (3, 4):
            body = body.decode(encoding='utf-8', errors='strict')
        LOGGER.info('Received request with body {}'.format(body))
        arguments = body.split('#')
        if body == 'run_ietf':
            LOGGER.info('Running all ietf and openconfig modules')
            final_response = run_ietf()
        elif 'github' == arguments[-1]:
            LOGGER.info('Github automated message starting to populate')
            paths_plus = arguments[arguments.index('repoLocalDir'):]
            LOGGER.info('paths plus {}'.format(paths_plus))
            arguments = arguments[:arguments.index('repoLocalDir')]
            LOGGER.info('arguments {}'.format(arguments))
            paths = paths_plus[1:-2]
            LOGGER.info('paths {}'.format(paths))
            try:
                for path in paths:
                    with open(temp_dir + "/log_trigger.txt", "w") as f:
                        local_dir = paths_plus[-2]
                        arguments = arguments + [
                            "--dir", local_dir + "/" + path
                        ]
                        subprocess.check_call(arguments, stderr=f)
                final_response = __response_type[1]
            except subprocess.CalledProcessError as e:
                final_response = __response_type[0]
                mf = messageFactory.MessageFactory()
                mf.send_automated_procedure_failed(
                    arguments, temp_dir + "/log_no_sdo_api.txt")
                LOGGER.error(
                    'check log_trigger.txt Error calling process populate.py because {}\n\n with error {}'
                    .format(e.stdout, e.stderr))
            except:
                final_response = __response_type[0]
                LOGGER.error(
                    "check log_trigger.txt failed to process github message with error {}"
                    .format(sys.exc_info()[0]))
        else:
            global all_modules
            all_modules = None
            if arguments[-3] == 'DELETE':
                LOGGER.info('Deleting single module')
                if 'http' in arguments[0]:
                    final_response = process_module_deletion(arguments)
                    credentials = arguments[3:5]
                else:
                    final_response = process_vendor_deletion(arguments)
                    credentials = arguments[7:9]
            elif arguments[-3] == 'DELETE_MULTIPLE':
                LOGGER.info('Deleting multiple modules')
                final_response = process_module_deletion(arguments, True)
                credentials = arguments[3:5]
            elif '--sdo' in arguments[2]:
                final_response = process_sdo(arguments)
                credentials = arguments[11:13]
                direc = arguments[6]
                shutil.rmtree(direc)
            else:
                final_response = process_vendor(arguments)
                credentials = arguments[10:12]
                direc = arguments[5]
                shutil.rmtree(direc)
            if final_response.split('#split#')[0] == __response_type[1]:
                res = make_cache(credentials)
                if res.status_code != 201:
                    final_response = __response_type[
                        0] + '#split#Server error-> could not reload cache'

                if all_modules:
                    complicatedAlgorithms = ModulesComplicatedAlgorithms(
                        log_directory, yangcatalog_api_prefix, credentials,
                        confd_protocol, confd_ip, confdPort, save_file_dir,
                        None, all_modules, yang_models, temp_dir)
                    complicatedAlgorithms.parse_non_requests()
                    complicatedAlgorithms.parse_requests()
                    complicatedAlgorithms.populate()
    except Exception as e:
        final_response = __response_type[0]
        LOGGER.error("receiver failed with message {}".format(e))
    LOGGER.info('Receiver is done with id - {} and message = {}'.format(
        props.correlation_id, str(final_response)))

    f = open('{}/correlation_ids'.format(temp_dir), 'r')
    lines = f.readlines()
    f.close()
    with open('{}/correlation_ids'.format(temp_dir), 'w') as f:
        for line in lines:
            if props.correlation_id in line:
                new_line = '{} -- {} - {}\n'.format(datetime.now().ctime(),
                                                    props.correlation_id,
                                                    str(final_response))
                f.write(new_line)
            else:
                f.write(line)
Example #9
0
def prepare_to_indexing(yc_api_prefix,
                        modules_to_index,
                        credentials,
                        apiIp=None,
                        sdo_type=False,
                        delete=False,
                        from_api=True,
                        force_indexing=True,
                        LOOGER_temp=None,
                        saveFilesDir=None,
                        tempDir=None):
    """ Sends the POST request which will activate indexing script for modules which will
    help to speed up process of searching. It will create a json body of all the modules
    containing module name and path where the module can be found if we are adding new
    modules. Other situation can be if we need to delete module. In this case we are sending
    list of modules that need to be deleted.
            Arguments:
                :param apiIp: apiIp in case we can not use receiver s because other module is calling this method
                :param LOOGER_temp: LOGGER in case we can not use receiver s because other module is calling this method
                :param yc_api_prefix: (str) prefix for sending request to api
                :param modules_to_index: (json file) prepare.json file generated while parsing
                    all the modules. This file is used to iterate through all the modules.
                :param credentials: (list) Basic authorization credentials - username, password
                    respectively.
                :param sdo_type: (bool) Whether or not it is sdo that needs to be sent.
                :param delete: (bool) Whether or not we are deleting module.
                :param from_api: (bool) Whether or not api sent the request to index.
                :param set_key: (str) String containing key to confirm that it is receiver that sends data. This is
                    is verified before indexing takes place.
                :param force_indexing: (bool) Whether or not we should force indexing even if module exists in cache.
    """
    global api_ip
    global LOGGER
    global save_file_dir
    global temp_dir
    if tempDir is not None:
        temp_dir = tempDir
    if apiIp is not None:
        api_ip = apiIp
    if LOOGER_temp is not None:
        LOGGER = LOOGER_temp
    if saveFilesDir is not None:
        save_file_dir = saveFilesDir
    LOGGER.info('Sending data for indexing')
    mf = messageFactory.MessageFactory()
    if delete:
        body_to_send = json.dumps({'modules-to-delete': modules_to_index},
                                  indent=4)

        mf.send_removed_yang_files(body_to_send)
        for mod in modules_to_index:
            name, revision_organization = mod.split('@')
            revision, organization = revision_organization.split('/')
            path_to_delete_local = "{}/{}@{}.yang".format(
                save_file_dir, name, revision)
            data = {'input': {'dependents': [{'name': name}]}}

            response = requests.post(yc_api_prefix + 'search-filter',
                                     auth=(credentials[0], credentials[1]),
                                     json={'input': data})
            if response.status_code == 201:
                modules = response.json()
                for mod in modules:
                    m_name = mod['name']
                    m_rev = mod['revision']
                    m_org = mod['organization']
                    url = ('{}://{}:{}/api/config/catalog/modules/module/'
                           '{},{},{}/dependents/{}'.format(
                               confd_protocol, confd_ip, confdPort, m_name,
                               m_rev, m_org, name))
                    requests.delete(url,
                                    auth=(credentials[0], credentials[1]),
                                    headers={
                                        'Content-Type':
                                        'application/vnd.yang.data+json'
                                    })
            if os.path.exists(path_to_delete_local):
                os.remove(path_to_delete_local)
    else:
        with open(modules_to_index, 'r') as f:
            sdos_json = json.load(f)
        post_body = {}
        load_new_files_to_github = False
        if from_api:
            if sdo_type:
                prefix = 'sdo/'
            else:
                prefix = 'vendor/'

            for module in sdos_json['module']:
                url = '{}search/modules/{},{},{}'.format(
                    yc_api_prefix, module['name'], module['revision'],
                    module['organization'])
                response = requests.get(url,
                                        auth=(credentials[0], credentials[1]),
                                        headers={
                                            'Content-Type':
                                            'application/vnd.yang.data+json',
                                            'Accept':
                                            'application/vnd.yang.data+json'
                                        })
                code = response.status_code
                if force_indexing or (code != 200 and code != 201
                                      and code != 204):
                    if module.get('schema'):
                        path = prefix + module['schema'].split(
                            'githubusercontent.com/')[1]
                        path = os.path.abspath(temp_dir + '/' + path)
                    else:
                        path = 'module does not exist'
                    post_body[module['name'] + '@' + module['revision'] + '/' +
                              module['organization']] = path
        else:
            for module in sdos_json['module']:
                url = '{}search/modules/{},{},{}'.format(
                    yc_api_prefix, module['name'], module['revision'],
                    module['organization'])
                response = requests.get(url,
                                        auth=(credentials[0], credentials[1]),
                                        headers={
                                            'Content-Type':
                                            'application/vnd.yang.data+json',
                                            'Accept':
                                            'application/vnd.yang.data+json'
                                        })
                code = response.status_code

                if code != 200 and code != 201 and code != 204:
                    load_new_files_to_github = True
                if force_indexing or (code != 200 and code != 201
                                      and code != 204):
                    path = '{}/{}@{}.yang'.format(save_file_dir,
                                                  module.get('name'),
                                                  module.get('revision'))
                    post_body[module['name'] + '@' + module['revision'] + '/' +
                              module['organization']] = path

        if len(post_body) == 0:
            body_to_send = ''
        else:
            body_to_send = json.dumps({'modules-to-index': post_body},
                                      indent=4)
        if len(post_body) > 0 and not force_indexing:
            mf.send_added_new_yang_files(body_to_send)
        if load_new_files_to_github:
            LOGGER.info('Starting a new process to populate github')
            cmd = ['python', '../ietfYangDraftPull/draftPull.py']
            proc = subprocess.Popen(cmd, close_fds=True)
            LOGGER.info('Populating github with process {}'.format(proc))
    return body_to_send
Example #10
0
    def on_request_thread_safe(self, properties, body_raw: bytes):
        """Function called when something was sent from API sender. This function
        will process all the requests that would take too long to process for API.
        When the processing is done we will sent back the result of the request
        which can be either 'Failed' or 'Finished successfully' with corespondent
        correlation ID. If the request 'Failed' it will sent back also a reason why
        it failed.
                Arguments:
                    :param body: (str) String of arguments that need to be processed
                    separated by '#'.
        """
        config_reloaded = False
        status: StatusMessage
        details: str = ''

        try:
            body = body_raw.decode()
            arguments = body.split('#')
            if body == 'run_ietf':
                self.LOGGER.info('Running all ietf and openconfig modules')
                status, details = self.run_ietf()
            elif body == 'reload_config':
                status = self.load_config()
                config_reloaded = True
            elif 'run_ping' == arguments[0]:
                status = self.run_ping(arguments[1])
            elif 'run_script' == arguments[0]:
                status = self.run_script(arguments[1:])
            elif 'github' == arguments[-1]:
                self.LOGGER.info(
                    'Github automated message starting to populate')
                paths_plus = arguments[arguments.index('repoLocalDir'):]
                self.LOGGER.info('paths plus {}'.format(paths_plus))
                arguments = arguments[:arguments.index('repoLocalDir')]
                self.LOGGER.info('arguments {}'.format(arguments))
                paths = paths_plus[1:-2]
                self.LOGGER.info('paths {}'.format(paths))
                try:
                    for path in paths:
                        with open(self.temp_dir + '/log_trigger.txt',
                                  'w') as f:
                            local_dir = paths_plus[-2]
                            arguments = arguments + [
                                '--dir', local_dir + '/' + path
                            ]
                            if self._notify_indexing:
                                arguments.append('--notify-indexing')
                            subprocess.check_call(arguments, stderr=f)
                    status = StatusMessage.SUCCESS
                except subprocess.CalledProcessError as e:
                    status = StatusMessage.FAIL
                    mf = messageFactory.MessageFactory()
                    mf.send_automated_procedure_failed(
                        arguments, self.temp_dir + '/log_no_sdo_api.txt')
                    self.LOGGER.error(
                        'check log_trigger.txt Error calling process populate.py because {}\n\n with error {}'
                        .format(e.output, e.stderr))
                except Exception:
                    status = StatusMessage.FAIL
                    self.LOGGER.error(
                        'check log_trigger.txt failed to process github message with error {}'
                        .format(sys.exc_info()[0]))
            else:
                direc = ''
                if arguments[0] == 'DELETE-VENDORS':
                    status = self.process_vendor_deletion(arguments)
                    credentials = arguments[1:3]
                elif arguments[0] == 'DELETE-MODULES':
                    status, details = self.process_module_deletion(arguments)
                    credentials = arguments[1:3]
                elif arguments[0] == 'POPULATE-MODULES':
                    status, details = self.process(arguments)
                    i = arguments.index('--credentials')
                    credentials = arguments[i + 1:i + 3]
                    i = arguments.index('--dir')
                    direc = arguments[i + 1]
                    shutil.rmtree(direc)
                elif arguments[0] == 'POPULATE-VENDORS':
                    status, details = self.process(arguments)
                    i = arguments.index('--credentials')
                    credentials = arguments[i + 1:i + 3]
                    i = arguments.index('--dir')
                    direc = arguments[i + 1]
                    shutil.rmtree(direc)
                else:
                    assert False, 'Invalid request type'

                if status == StatusMessage.SUCCESS:
                    response = self.make_cache(credentials)
                    code = response.status_code
                    if code != 200 and code != 201 and code != 204:
                        status = StatusMessage.FAIL
                        details = 'Server error-> could not reload cache'
        except Exception:
            status = StatusMessage.FAIL
            self.LOGGER.exception('receiver.py failed')
        final_response = status.value if not details else '{}#split#{}'.format(
            status.value, details)
        self.LOGGER.info(
            'Receiver is done with id - {} and message = {}'.format(
                properties.correlation_id, final_response))

        f = open('{}/correlation_ids'.format(self.temp_dir), 'r')
        lines = f.readlines()
        f.close()
        with open('{}/correlation_ids'.format(self.temp_dir), 'w') as f:
            for line in lines:
                if properties.correlation_id in line:
                    new_line = '{} -- {} - {}\n'.format(
                        datetime.now().ctime(), properties.correlation_id,
                        str(final_response))
                    f.write(new_line)
                else:
                    f.write(line)
        if config_reloaded:
            assert self.channel, 'Should only be called from self.channel.start_consuming()'
            self.channel.stop_consuming()
Example #11
0
                    if not same:
                        diff_files.append(file_name)
                else:
                    new_files.append(file_name)
    shutil.rmtree(repo.localdir + '/standard/ietf/RFCtemp')
    os.remove(repo.localdir + '/rfc.tgz')

    with open(exceptions, 'r') as exceptions_file:
        remove_from_new = exceptions_file.read().split('\n')
    for remove in remove_from_new:
        if remove in new_files:
            new_files.remove(remove)

    if len(new_files) > 0 or len(diff_files) > 0:
        LOGGER.warning('new or modified RFC files found. Sending an E-mail')
        mf = messageFactory.MessageFactory()
        mf.send_new_rfc_message(new_files, diff_files)

    for key in ietf_draft_json:
        yang_file = open(
            repo.localdir + '/experimental/ietf-extracted-YANG-modules/' + key,
            'w+')
        yang_download_link = ietf_draft_json[key][2].split('href="')[1].split(
            '">Download')[0]
        yang_download_link = yang_download_link.replace(
            'new.yangcatalog.org', 'yangcatalog.org')
        try:
            yang_raw = requests.get(yang_download_link).text
            yang_file.write(yang_raw)
        except:
            LOGGER.warning('{} - {}'.format(key, yang_download_link))
Example #12
0
 def __init__(self):
     self.__mf = messageFactory.MessageFactory()
     self.month = datetime.now().date().month
     self.day = datetime.now().date().day
     self.users = RedisUsersConnection()