示例#1
0
def trigger_populate():
    app.logger.info('Trigger populate if necessary')
    repoutil.pull(ac.d_yang_models_dir)
    try:
        assert request.json
        commits = request.json.get('commits') if request.is_json else None
        paths = set()
        new = []
        mod = []
        if commits:
            for commit in commits:
                added = commit.get('added')
                if added:
                    for add in added:
                        if 'platform-metadata.json' in add:
                            paths.add('/'.join(add.split('/')[:-1]))
                            new.append('/'.join(add.split('/')[:-1]))
                modified = commit.get('modified')
                if modified:
                    for m in modified:
                        if 'platform-metadata.json' in m:
                            paths.add('/'.join(m.split('/')[:-1]))
                            mod.append('/'.join(m.split('/')[:-1]))
        if len(paths) > 0:
            mf = messageFactory.MessageFactory()
            mf.send_new_modified_platform_metadata(new, mod)
            app.logger.info('Forking the repo')
            try:
                populate_path = os.path.join(os.environ['BACKEND'], 'parseAndPopulate/populate.py')
                arguments = ['python', populate_path, '--api-protocol', ac.g_protocol_api,
                             '--api-port', ac.w_api_port, '--api-ip', ac.w_ip,
                             '--result-html-dir', ac.w_result_html_dir,
                             '--credentials', ac.s_confd_credentials[0], ac.s_confd_credentials[1],
                             '--save-file-dir', ac.d_save_file_dir, 'repoLocalDir']
                arguments = arguments + list(paths) + [ac.d_yang_models_dir, 'github']
                ac.sender.send('#'.join(arguments))
            except:
                app.logger.exception('Could not populate after git push')
    except Exception as e:
        app.logger.error('Automated github webhook failure - {}'.format(e))

    return {'info': 'Success'}
示例#2
0
def get_repo(repo_url: str, owner: str, repo_name: str) -> repoutil.RepoUtil:
    if owner == 'YangModels' and repo_name == 'yang':
        app.logger.info(
            'Using repo already downloaded from {}'.format(repo_url))
        repoutil.pull(ac.d_yang_models_dir)
        try:
            yang_models_repo = repoutil.load(ac.d_yang_models_dir, github_url)
        except InvalidGitRepositoryError:
            raise Exception("Couldn't load YangModels/yang from directory")
        return yang_models_repo
    else:
        app.logger.info('Downloading repo {}'.format(repo_url))
        try:
            repo = repoutil.ModifiableRepoUtil(repo_url)
            return repo
        except GitCommandError as e:
            abort(
                400,
                description=
                'bad request - could not clone the Github repository. Please check owner,'
                ' repository and path of the request - {}'.format(e.stderr))
示例#3
0
 def test_pull(self):
     ru.pull(self.repo.local_dir)
示例#4
0
def add_vendors():
    """Endpoint is used to add new vendors using the API.
    PUT request is used for updating each vendor in request body.
    POST request is used for creating new vendors that are not in ConfD/Redis yet.
    First it checks if the sent request is ok and if so, it will send another request
    to the receiver which will work on adding/updating vendors while this request
    will send a "job_id" in the response back to the user.
    User is able to check the success of the job using this "job_id".

    :return response with "job_id" that the user can use to check whether
            the job is still running or Failed or Finished successfully.
    """
    if not request.json:
        abort(
            400,
            description=
            'bad request - you need to input json body that conforms with'
            ' platform-implementation-metadata.yang module. Received no json')
    body = request.json

    platforms_contents = body.get('platforms')
    if platforms_contents is None:
        abort(
            400,
            description=
            'bad request - "platforms" json object is missing and is mandatory'
        )
    platform_list = platforms_contents.get('platform')
    if platform_list is None:
        abort(400,
              description=
              'bad request - "platform" json list is missing and is mandatory')

    app.logger.info('Adding vendor with body\n{}'.format(
        json.dumps(body, indent=2)))
    tree_created = False
    authorization = authorize_for_vendors(request, body)
    if authorization is not True:
        abort(401,
              description='User not authorized to supply data for this {}'.
              format(authorization))

    dst_path = os.path.join(
        ac.d_save_requests, 'vendor-{}.json'.format(
            datetime.utcnow().strftime(backup_date_format)))
    if not os.path.exists(ac.d_save_requests):
        os.mkdir(ac.d_save_requests)
    with open(dst_path, 'w') as f:
        json.dump(body, f)

    response = app.confdService.put_platform_metadata(json.dumps(body))

    if response.status_code != 200 and response.status_code != 201 and response.status_code != 204:
        abort(
            400,
            description=
            'The body you have provided could not be parsed. ConfD error text:\n{}\n'
            'Error code: {}'.format(response.text, response.status_code))

    direc_num = 0
    while os.path.isdir(os.path.join(ac.d_temp, str(direc_num))):
        direc_num += 1
    direc = os.path.join(ac.d_temp, str(direc_num))
    try:
        os.makedirs(direc)
    except OSError as e:
        # be happy if someone already created the path
        if e.errno != errno.EEXIST:
            raise

    repos: t.Dict[str, repoutil.RepoUtil] = {}
    missing_msg = 'bad request - at least one platform object is missing mandatory field {}'
    for platform in platform_list:
        module_list_file = platform.get('module-list-file')
        if module_list_file is None:
            abort(400, description=missing_msg.format('module-list-file'))
        xml_path = module_list_file.get('path')
        if xml_path is None:
            abort(400,
                  description=missing_msg.format('module-list-file["path"]'))
        file_name = os.path.basename(xml_path)
        repo_name = module_list_file.get('repository')
        if repo_name is None:
            abort(400,
                  description=missing_msg.format(
                      'module-list-file["repository"]'))
        owner = module_list_file.get('owner')
        if owner is None:
            abort(400,
                  description=missing_msg.format('module-list-file["owner"]'))
        if request.method == 'POST':
            repoutil.pull(ac.d_yang_models_dir)
            if os.path.isfile(os.path.join(ac.d_yang_models_dir, xml_path)):
                continue

        dir_in_repo = os.path.dirname(xml_path)
        repo_url = os.path.join(github_url, owner, repo_name)

        if repo_url not in repos:
            repos[repo_url] = get_repo(repo_url, owner, repo_name)

        # needed to later construct the schema
        module_list_file['commit-hash'] = repos[repo_url].get_commit_hash(
            branch=module_list_file.get('branch', 'HEAD'))

        save_to = os.path.join(direc, owner,
                               repo_name.split('.')[0], dir_in_repo)

        try:
            shutil.copytree(os.path.join(repos[repo_url].local_dir,
                                         dir_in_repo),
                            save_to,
                            ignore=shutil.ignore_patterns(
                                '*.json', '*.xml', '*.sh', '*.md', '*.txt',
                                '*.bin'))
        except OSError:
            pass
        with open('{}/{}.json'.format(save_to,
                                      file_name.split('.')[0]), 'w') as f:
            json.dump(platform, f)
        shutil.copy(
            os.path.join(repos[repo_url].local_dir, module_list_file['path']),
            save_to)
        tree_created = True

    arguments = [
        'POPULATE-VENDORS', '--dir', direc, '--api', '--credentials',
        ac.s_confd_credentials[0], ac.s_confd_credentials[1],
        repr(tree_created)
    ]
    job_id = ac.sender.send('#'.join(arguments))
    app.logger.info('Running populate.py with job_id {}'.format(job_id))
    return ({'info': 'Verification successful', 'job-id': job_id}, 202)
示例#5
0
 parser.add_argument('--config-path',
                     type=str,
                     default='/etc/yangcatalog/yangcatalog.conf',
                     help='Set path to config file')
 args = parser.parse_args()
 config_path = args.config_path
 config = ConfigParser.ConfigParser()
 config._interpolation = ConfigParser.ExtendedInterpolation()
 config.read(config_path)
 dbHost = config.get('Validate-Section', 'dbIp')
 dbName = config.get('Validate-Section', 'dbName')
 dbUser = config.get('Validate-Section', 'dbUser')
 dbPass = config.get('Validate-Section', 'dbPassword')
 dbData = connect()
 yang_models = config.get('Directory-Section', 'yang_models_dir')
 pull(yang_models)
 vendor_path = None
 sdo_path = None
 for row in dbData:
     while True:
         print('The user ' + row[5] + ' ' + row[6] + ' (' + row[1] + ')' +
               ' is from organization ' + row[4])
         vendor_access = query_yes_no('Do they need vendor access?')
         if vendor_access:
             vendor_path = query_create('What is their vendor branch ')
         sdo_access = query_yes_no('Do they need sdo (model) access?')
         if sdo_access:
             sdo_path = query_create('What is their model organization ')
         want_to_create = False
         if sdo_path or vendor_path:
             want_to_create = query_yes_no('Do you want to create user ' +
def main():
    parser = argparse.ArgumentParser(
        description='Process changed modules in a git repo')
    parser.add_argument('--config-path',
                        type=str,
                        default=os.environ['YANGCATALOG_CONFIG_PATH'],
                        help='Set path to config file')
    args = parser.parse_args()
    config_path = args.config_path
    config = create_config(config_path)
    log_directory = config.get('Directory-Section', 'logs')
    yang_models = config.get('Directory-Section', 'yang-models-dir')
    changes_cache_path = config.get('Directory-Section', 'changes-cache')
    failed_changes_cache_path = config.get('Directory-Section',
                                           'changes-cache-failed')
    delete_cache_path = config.get('Directory-Section', 'delete-cache')
    lock_file = config.get('Directory-Section', 'lock')
    lock_file_cron = config.get('Directory-Section', 'lock-cron')
    json_ytree = config.get('Directory-Section', 'json-ytree')
    save_file_dir = config.get('Directory-Section', 'save-file-dir')
    threads = int(config.get('General-Section', 'threads'))

    LOGGER = log.get_logger(
        'process_changed_mods',
        os.path.join(log_directory, 'process-changed-mods.log'))
    LOGGER.info('Starting process-changed-mods.py script')

    if os.path.exists(lock_file) or os.path.exists(lock_file_cron):
        # we can exist since this is run by cronjob every 3 minutes of every day
        LOGGER.warning(
            'Temporary lock file used by something else. Exiting script !!!')
        sys.exit()
    try:
        open(lock_file, 'w').close()
        open(lock_file_cron, 'w').close()
    except Exception:
        os.unlink(lock_file)
        os.unlink(lock_file_cron)
        LOGGER.error(
            'Temporary lock file could not be created although it is not locked'
        )
        sys.exit()

    changes_cache = load_changes_cache(changes_cache_path)
    delete_cache = load_delete_cache(delete_cache_path)

    if not changes_cache and not delete_cache:
        LOGGER.info('No new modules are added or removed. Exiting script!!!')
        os.unlink(lock_file)
        os.unlink(lock_file_cron)
        sys.exit()

    LOGGER.info('Pulling latest YangModels/yang repository')
    repoutil.pull(yang_models)

    LOGGER.info('Trying to initialize Elasticsearch indices')
    es_manager = ESManager()
    for index in ESIndices:
        if not es_manager.index_exists(index):
            es_manager.create_index(index)

    logging.getLogger('elasticsearch').setLevel(logging.ERROR)

    LOGGER.info('Running cache files backup')
    backup_cache_files(delete_cache_path)
    backup_cache_files(changes_cache_path)
    os.unlink(lock_file)

    if delete_cache:
        for module in delete_cache:
            name, rev_org = module.split('@')
            revision, organization = rev_org.split('/')
            revision = validate_revision(revision)

            module = {
                'name': name,
                'revision': revision,
                'organization': organization
            }
            es_manager.delete_from_indices(module)

    if changes_cache:
        recursion_limit = sys.getrecursionlimit()
        sys.setrecursionlimit(50000)
        x = 0
        try:
            for module_key, module_path in changes_cache.items():
                x += 1
                name, rev_org = module_key.split('@')
                revision, organization = rev_org.split('/')
                revision = validate_revision(revision)
                name_revision = '{}@{}'.format(name, revision)

                module = {
                    'name': name,
                    'revision': revision,
                    'organization': organization,
                    'path': module_path
                }
                LOGGER.info('yindex on module {}. module {} out of {}'.format(
                    name_revision, x, len(changes_cache)))
                check_file_availability(module, LOGGER)

                try:
                    build_indices(es_manager, module, save_file_dir,
                                  json_ytree, threads, LOGGER)
                except Exception:
                    LOGGER.exception(
                        'Problem while processing module {}'.format(
                            module_key))
                    try:
                        with open(failed_changes_cache_path, 'r') as reader:
                            failed_modules = json.load(reader)
                    except (FileNotFoundError, json.decoder.JSONDecodeError):
                        failed_modules = {}
                    if module_key not in failed_modules:
                        failed_modules[module_key] = module_path
                    with open(failed_changes_cache_path, 'w') as writer:
                        json.dump(failed_modules, writer)
        except Exception:
            sys.setrecursionlimit(recursion_limit)
            os.unlink(lock_file_cron)
            LOGGER.exception('Error while running build_yindex.py script')
            LOGGER.info('Job failed execution')
            sys.exit()

        sys.setrecursionlimit(recursion_limit)
    os.unlink(lock_file_cron)
    LOGGER.info('Job finished successfully')