Exemple #1
0
def extract_files(request,  url: str, latest: bool, working_dir: str, remove_working_dir=True):
    config = create_config()
    yang_models = config.get('Directory-Section', 'save-file-dir')
    xym_parser = XymParser(url, working_dir)
    extracted_modules, xym_response = xym_parser.parse_and_extract()
    return create_output(request, yang_models, url, latest, working_dir, extracted_modules, xym_response,
                         remove_working_dir=remove_working_dir)
    def get_dependencies(self):
        self.__ctx.opts.depend_recurse = True
        self.__ctx.opts.depend_ignore = []
        for p in plugin.plugins:
            p.setup_ctx(self.__ctx)
        with open(self.__infile, 'r', encoding='utf-8') as yang_file:
            module = yang_file.read()
            if module is None:
                self.LOG.info('no module provided')
            m = self.__ctx.add_module(self.__infile, module)
            if m is None:
                m = []
            else:
                m = [m]
        self.__ctx.validate()

        config = create_config()
        yang_models = config.get('Directory-Section', 'save-file-dir')
        try:
            f = io.StringIO()
            emit_depend(self.__ctx, m, f)
            out = f.getvalue()
        except Exception as e:
            out = ''

        if len(out.split(':')) == 2 and out.split(':')[1].strip() != '':
            dependencies = out.split(':')[1].strip().split(' ')
        else:
            dependencies = []
        ret = {}
        for dep in dependencies:
            ret[dep] = glob.glob(r'{}/{}@*.yang'.format(yang_models, dep))
        restore_statements()
        del self.__ctx
        return ret
Exemple #3
0
def validate_doc(request):
    doc_type = request.path.split('/')[-1]
    payload_body = try_validate_and_load_data(request)
    doc_name = payload_body.get(doc_type)
    if doc_name.endswith('.txt'):
        doc_name = doc_name[:-4]
    logger.info('validating {} {}'.format(doc_type, doc_name))
    config = create_config()
    tmp = config.get('Directory-Section', 'temp')
    ietf_dir = config.get('Directory-Section', 'ietf-directory')
    if doc_type == 'draft':
        draft_dir = os.path.join(ietf_dir, 'my-id-archive-mirror')
        matching_drafts = fnmatch.filter(os.listdir(draft_dir), '{}*.txt'.format(doc_name))
        if matching_drafts:
            draft_file = sorted(matching_drafts)[-1]
            url = os.path.join(draft_dir, draft_file)
        else:
            url = 'https://tools.ietf.org/id/{!s}.txt'.format(doc_name)
    elif doc_type == 'rfc':
        rfc_file = 'rfc{}.txt'.format(doc_name)
        path = os.path.join(ietf_dir, 'rfc', rfc_file)
        if os.path.exists(path):
            url = path
        else:
            url = 'https://tools.ietf.org/rfc/{}'.format(rfc_file)
    while True:
        suffix = create_random_suffix()
        working_dir = '{}/yangvalidator/yangvalidator-v2-cache-{}'.format(tmp, suffix)
        if not os.path.exists(working_dir):
            break
    return extract_files(request, url, payload_body.get('latest', True), working_dir)
Exemple #4
0
def upload_draft_id(request: WSGIRequest, id: t.Optional[str]):
    """ Validate each of the uploaded documents individually in separate temporary cache directory.
    """
    config = create_config()
    tmp = config.get('Directory-Section', 'temp')
    pre_setup_dir = '{}/yangvalidator/{}'.format(tmp, id)

    result = load_pre_setup(pre_setup_dir)
    if isinstance(result, HttpResponse):
        return result
    else:
        setup = result

    status = 200
    latest = setup.get('latest')
    results = []
    working_dirs = []
    try:
        for file in request.FILES.getlist('data'):
            while True:
                suffix = create_random_suffix()
                cache_dir = '{}/yangvalidator/yangvalidator-v2-cache-{}'.format(
                    tmp, suffix)
                if not os.path.exists(cache_dir):
                    break
            os.mkdir(cache_dir)
            working_dirs.append(cache_dir)
            assert file.name is not None
            filepath = os.path.join(cache_dir, file.name)
            with open(filepath, 'wb+') as f:
                for chunk in file.chunks():
                    f.write(chunk)
            extract_files_response = extract_files(request,
                                                   filepath,
                                                   latest,
                                                   cache_dir,
                                                   remove_working_dir=False)
            status = extract_files_response.status_code
            output = json.loads(extract_files_response.content)
            output['document-name'] = file.name
            results.append(output)
    except Exception as e:
        for wd in working_dirs:
            if os.path.exists(wd):
                shutil.rmtree(wd)
        return JsonResponse(
            {
                'Error':
                'Failed to upload and validate documents - {}'.format(e)
            },
            status=400)
    results = results[0] if len(results) == 1 else results
    return JsonResponse(results, status=status, safe=False)
Exemple #5
0
def upload_file(request: WSGIRequest, id: str):
    config = create_config()
    yang_models = config.get('Directory-Section', 'save-file-dir')
    tmp = config.get('Directory-Section', 'temp')
    working_dir = '{}/yangvalidator/{}'.format(tmp, id)

    result = load_pre_setup(working_dir)
    if isinstance(result, HttpResponse):
        return result
    else:
        setup = result

    latest = setup.get('latest')
    get_from_options = setup.get('get-from-options')
    try:
        saved_files = []
        for file in request.FILES.getlist('data'):
            assert file.name is not None
            name, ext = os.path.splitext(file.name)

            if ext == '.yang':
                with open(os.path.join(working_dir, file.name), 'wb+') as f:
                    for chunk in file.chunks():
                        f.write(chunk)
                saved_files.append(file.name)

            if ext == '.zip':
                zipfilename = os.path.join(working_dir, file.name)
                with open(zipfilename, 'wb+') as f:
                    for chunk in file.chunks():
                        f.write(chunk)
                zf = ZipFile(zipfilename, 'r')
                zf.extractall(working_dir)
                saved_files.extend([
                    filename for filename in zf.namelist()
                    if filename.endswith('.yang')
                ])
    except Exception as e:
        logger.exception('Error: {} : {}'.format(working_dir, e))
        if os.path.exists(working_dir):
            shutil.rmtree(working_dir)
        return JsonResponse({'Error': 'Failed to get yang files'}, status=400)
    return create_output(request,
                         yang_models,
                         None,
                         latest,
                         working_dir,
                         saved_files,
                         choose_options=get_from_options)
Exemple #6
0
def upload_setup(request):
    payload_body = try_validate_and_load_data(request)
    latest = payload_body.get('latest', False)
    get_from_options = payload_body.get('get-from-options', False)
    config = create_config()
    tmp = config.get('Directory-Section', 'temp')
    while True:
        suffix = create_random_suffix()
        working_dir = '{}/yangvalidator/yangvalidator-v2-cache-{}'.format(tmp, suffix)
        if not os.path.exists(working_dir):
            break
    os.mkdir(working_dir)
    with open('{}/pre-setup.json'.format(working_dir), 'w') as f:
        json.dump({'latest': latest,
                   'get-from-options': get_from_options
                   }, f)
    return JsonResponse({'output': {'cache': working_dir.split('/')[-1]}})
Exemple #7
0
def validate_doc(request: WSGIRequest):
    """ Request contains either the RFC number or the name of the Draft to be validated.
    URL (or path to cached document) is composed according to whether it is a validation of RFC or Draft.
    Cache directory name is generated and both path and URL are passed to the extract_files() method.
    """
    doc_type = request.path.split('/')[-1]
    payload_body = try_validate_and_load_data(request)
    doc_name = payload_body.get(doc_type)
    if not doc_name:
        return JsonResponse(
            {
                'Error':
                'Required property "{}" is missing or empty'.format(doc_type)
            },
            status=400)
    if doc_name.endswith('.txt'):
        doc_name = doc_name[:-4]
    logger.info('validating {} {}'.format(doc_type, doc_name))
    config = create_config()
    tmp = config.get('Directory-Section', 'temp')
    ietf_dir = config.get('Directory-Section', 'ietf-directory')
    url = ''
    if doc_type == 'draft':
        draft_dir = os.path.join(ietf_dir, 'my-id-archive-mirror')
        matching_drafts = fnmatch.filter(os.listdir(draft_dir),
                                         '{}*.txt'.format(doc_name))
        if matching_drafts:
            draft_file = sorted(matching_drafts)[-1]
            url = os.path.join(draft_dir, draft_file)
        else:
            url = 'https://tools.ietf.org/id/{!s}.txt'.format(doc_name)
    elif doc_type == 'rfc':
        rfc_file = 'rfc{}.txt'.format(doc_name)
        path = os.path.join(ietf_dir, 'rfc', rfc_file)
        if os.path.exists(path):
            url = path
        else:
            url = 'https://tools.ietf.org/rfc/{}'.format(rfc_file)
    while True:
        suffix = create_random_suffix()
        cache_dir = '{}/yangvalidator/yangvalidator-v2-cache-{}'.format(
            tmp, suffix)
        if not os.path.exists(cache_dir):
            break
    return extract_files(request, url, payload_body.get('latest', True),
                         cache_dir)
Exemple #8
0
def copy_dependencies(f):
    config = create_config()
    yang_models = config.get('Directory-Section', 'save-file-dir')
    tmp = config.get('Directory-Section', 'temp')
    out = f.getvalue()
    logger.info('dependencies received in following format: {}'.format(out))
    letters = string.ascii_letters
    suffix = ''.join(random.choice(letters) for i in range(8))
    dep_dir = '{}/yangvalidator-dependencies-{}'.format(tmp, suffix)
    os.mkdir(dep_dir)
    if len(out.split(':')) == 2:
        dependencies = out.split(':')[1].strip().split(' ')
    else:
        dependencies = []
    for dep in dependencies:
        for file in glob.glob(r'{}/{}@*.yang'.format(yang_models, dep)):
            shutil.copy(file, dep_dir)
    return dep_dir
Exemple #9
0
def upload_setup(request: WSGIRequest):
    """ Dump parameters from request into pre-setup.json file.
    This JSON file is stored in a temporary cache directory whose name is sent back in the response.
    """
    payload_body = try_validate_and_load_data(request)
    latest = payload_body.get('latest', False)
    get_from_options = payload_body.get('get-from-options', False)
    config = create_config()
    tmp = config.get('Directory-Section', 'temp')
    while True:
        suffix = create_random_suffix()
        cache_dir = '{}/yangvalidator/yangvalidator-v2-cache-{}'.format(
            tmp, suffix)
        if not os.path.exists(cache_dir):
            break
    os.mkdir(cache_dir)
    with open('{}/pre-setup.json'.format(cache_dir), 'w') as f:
        body = {'latest': latest, 'get-from-options': get_from_options}
        json.dump(body, f)
    change_ownership_recursive(cache_dir)
    return JsonResponse({'output': {'cache': cache_dir.split('/')[-1]}})
Exemple #10
0
def upload_draft_id(request, id):
    config = create_config()
    tmp = config.get('Directory-Section', 'temp')
    working_dir = '{}/yangvalidator/{}'.format(tmp, id)

    result = load_pre_setup(working_dir, id)
    if isinstance(result, HttpResponse):
        return result
    else:
        setup = result

    latest = setup.get('latest')
    results = []
    working_dirs = []
    try:
        for file in request.FILES.getlist('data'):
            while True:
                suffix = create_random_suffix()
                working_dir = '{}/yangvalidator/yangvalidator-v2-cache-{}'.format(tmp, suffix)
                if not os.path.exists(working_dir):
                    break
            os.mkdir(working_dir)
            working_dirs.append(working_dir)
            filepath = os.path.join(working_dir, file.name)
            with open(filepath, 'wb+') as f:
                for chunk in file.chunks():
                    f.write(chunk)
            output = json.loads(extract_files(request, filepath, latest, working_dir, remove_working_dir=False).content)
            output['document-name'] = file.name
            results.append(output)
    except Exception as e:
        for wd in working_dirs:
            if os.path.exists(wd):
                shutil.rmtree(wd)
        return JsonResponse({'Error': 'Failed to upload and validate documents - {}'.format(e)}, status=400)
    return JsonResponse(results, safe=False)
Exemple #11
0
import logging.config
import os

from yangvalidator.create_config import create_config

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

STATIC_ROOT = os.path.join(BASE_DIR, "static")

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
config = create_config()
django_secret_key = config.get('Secrets-Section',
                               'yangvalidator-secret-key').strip('"')
SECRET_KEY = django_secret_key

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False

ALLOWED_HOSTS = [
    'yangcatalog.org', 'www.yangcatalog.org', 'new.yangcatalog.org',
    'localhost', '127.0.0.1', 'yang2.amsl.com', 'www.yangvalidator.com',
    'www.yangvalidator.org', 'yangvalidator.com', 'yangvalidator.org',
    '18.224.127.129'
]

# Application definition
Exemple #12
0
def validate(request: WSGIRequest, xym_result=None, json_body=None):
    """
    Validate yang module using 4 different validators. Yanglint, Pyang, Confdc, Yumadump-pro.
    Check if the are valid modules according to these validators and if not return problems that
    occurred while validating by each parser.
    :param json_body: json body sent from other function
    :param request: request sent from user
    :return: HTTP response with validated yang modules
    """
    try:
        if json_body is None:
            json_body = try_validate_and_load_data(request)
    except ValueError as e:
        # Missing json content or bad json content
        return JsonResponse({'Error': 'Not a json content - {}'.format(e)}, status=400)
    except IllegalMethodError as e:
        # Method other then POST
        return JsonResponse({'Error': '{}'.format(e)}, status=405)
    to_validate = json_body.get('modules-to-validate')
    if to_validate is None:
        # Missing modules to validate
        return JsonResponse({'Error': 'No module received for validation'}, status=400)
    user_to_validate = to_validate.get('user-modules', [])
    repo_to_validate = to_validate.get('repo-modules', [])
    if len(user_to_validate) == 0 and len(repo_to_validate) == 0:
        # Missing modules to validate
        return JsonResponse({'Error': 'No module received for validation'}, status=400)

    config = create_config()
    tmp = config.get('Directory-Section', 'temp')
    yang_models = config.get('Directory-Section', 'save-file-dir')
    while True:
        suffix = create_random_suffix()
        work_dir = '{}/yangvalidator/yangvalidator-v2-workdir-{}'.format(tmp, suffix)
        if not os.path.exists(work_dir):
            break
    results = {}
    if xym_result is not None:
        results['xym'] = xym_result
    try:
        os.mkdir(work_dir)
        modules_to_validate = []
        skipped_modules = []
        dependencies = json_body.get('dependencies', {})

        # Keep this uncommented code in here. Several lines bellow explaind why we want to keep this
        # user_dependencies = dependencies.get('user-modules', [])

        repo_dependencies = dependencies.get('repo-modules', [])

        # Copy modules that you need to validate to working directory
        for group_to_validate, source in ((repo_to_validate, yang_models),
                                          (user_to_validate, os.path.join(tmp, 'yangvalidator', json_body['cache']))):

            for module_to_validate in group_to_validate:
                # skip modules that are in dependencies
                for repo_dependency in repo_dependencies:
                    if repo_dependency.split('@')[0] == module_to_validate.split('@')[0]:
                        skipped_modules.append(module_to_validate)
                        break
                else:
                    shutil.copy(os.path.join(source, module_to_validate), work_dir)
                    modules_to_validate.append(module_to_validate)

        if len(skipped_modules) > 0:
            results['warning'] = 'Following modules {} were skipped from validation because you chose different repo' \
                                 ' modules as a dependency with same name'.format(', '.join(skipped_modules))

        # UI sends the users dependencies anyway for better code readability but it s not used anymore in here.
        # please keep following code for understanding why we are receiving user_dependencies.
        # These dependencies are already copied to working directory in step above when copying user modules to
        # validate.
        #
        # for dependency in user_dependencies:
        #     shutil.copy(os.path.join(tmp, json_body['cache'], dependency), work_dir)

        # Copy rest of dependencies to working directory
        for dependency in repo_dependencies:
            shutil.copy(os.path.join(yang_models, dependency), work_dir)
        # Validate each yang file with all parsers use only working directory for all dependencies
        for module_to_validate in modules_to_validate:
            results[module_to_validate] = {}
            for Parser, name in ((PyangParser, 'pyang'), (ConfdParser, 'confd'),
                                 (YanglintParser, 'yanglint'), (YangdumpProParser, 'yangdump-pro')):
                parser_results = Parser([work_dir], module_to_validate, work_dir).parse_module()
                results[module_to_validate][name] = parser_results
    except Exception as e:
        results['error'] = 'Failed to parse a document - {}'.format(e)
        logger.exception('Failed to parse module - {}'.format(e))
    finally:
        logger.info('Removing temporary directories')
        if os.path.exists(work_dir):
            shutil.rmtree(work_dir)

        cache_tmp_path = os.path.join(tmp, 'yangvalidator', json_body.get('cache', ''))
        if os.path.exists(cache_tmp_path):
            shutil.rmtree(cache_tmp_path)
    return JsonResponse({'output': results})
Exemple #13
0
def try_validate_and_load_data(request: WSGIRequest):
    """
    Check if request is POST and try to parse byte string to json format
    :param request: request sent from user
    :return: Parsed json string
    """
    if request.method != 'POST':
        raise IllegalMethodError(request.method)
    return json.loads(request.body)


def create_random_suffix():
    """
    Create random suffix to create new temp directory
    :return: suffix of random 8 letters
    """
    letters = string.ascii_letters
    return ''.join(random.choice(letters) for i in range(8))


def check_missing_amount_one_only(missing: dict):
    for val in missing.values():
        if len(val) > 1:
            return False
    return True


if not os.path.exists('{}/yangvalidator'.format(create_config().get('Directory-Section', 'temp'))):
    os.mkdir('{}/yangvalidator'.format(create_config().get('Directory-Section', 'temp')))