Esempio n. 1
0
    def __init__(self, log_directory, temp_dir,
                 rabbitmq_host='127.0.0.1',
                 rabbitmq_port=None,
                 rabbitmq_virtual_host=None,
                 rabbitmq_username='******',
                 rabbitmq_password='******'):
        self.LOGGER = log.get_logger('sender', log_directory + '/yang.log')
        self.LOGGER.debug('Initializing sender')
        self.__response_type = ['Failed', 'In progress',
                                'Finished successfully', 'does not exist']

        credentials = pika.PlainCredentials(
            username=rabbitmq_username,
            password=rabbitmq_password)
        # Let try to connect to RabbitMQ until success..
        while (True):
            try:
                self.connection = pika.BlockingConnection(
                    pika.ConnectionParameters(
                        host=rabbitmq_host,
                        port=rabbitmq_port,
                        virtual_host=rabbitmq_virtual_host,
                        credentials=credentials,
                        heartbeat=0))
                break
            except pika.exceptions.ConnectionClosed:
                self.LOGGER.debug('Cannot connect to rabbitMQ, trying after a sleep')
                time.sleep(60)

        self.channel = self.connection.channel()
        self.channel.queue_declare(queue='module_queue')
        self.__temp_dir = temp_dir
        self.__response_file = 'correlation_ids'
        self.LOGGER.debug('Sender initialized')
Esempio n. 2
0
    def __init__(self, private_dir, log_directory):
        LOGGER = log.get_logger(__name__, log_directory + '/parseAndPopulate.log')
        LOGGER.debug('Loading compilation statuses and results')
        self.names = []
        with open(private_dir + '/json_links', 'r') as f:
            for line in f:
                self.names.append(line.replace('.json', '').replace('\n', ''))

        self.status = {}
        self.headers = {}
        for name in self.names:
            with open('{}/{}.json'.format(private_dir, name), 'r') as f:
                self.status[name] = json.load(f)
            if name == 'IETFYANGRFC':
                with open('{}/{}.html'.format(private_dir, name)) as f:
                    html = f.read()
            else:
                with open('{}/{}YANGPageCompilation.html'.format(private_dir, name)) as f:
                    html = f.read()
            ths = html.split('<TH>')
            results = []
            for th in ths:
                res = th.split('</TH>')[0]
                if 'Compilation Result' in res:
                    results.append(res)
            self.headers[name] = results
Esempio n. 3
0
    def __init__(self, config_path='/etc/yangcatalog/yangcatalog.conf'):
        """Setup Webex teams rooms and smtp

            Arguments:
                :param config_path: (str) path to a yangcatalog.conf file
        """
        def list_matching_rooms(a, title_match):
            return [r for r in a.rooms.list() if title_match in r.title]

        config = ConfigParser.ConfigParser()
        config._interpolation = ConfigParser.ExtendedInterpolation()
        config.read(config_path)
        log_directory = config.get('Directory-Section', 'logs')
        self.LOGGER = log.get_logger(__name__, log_directory + '/yang.log')
        self.LOGGER.info('Initialising Message')
        token = config.get('Message-Section', 'access-token')
        self.__api = CiscoSparkAPI(access_token=token)
        rooms = list_matching_rooms(self.__api, 'YANG Catalog admin')
        self._temp_dir = config.get('Directory-Section', 'temp')

        if len(rooms) == 0:
            self.LOGGER.error('Need at least one room')
            sys.exit(1)
        if len(rooms) != 1:
            self.LOGGER.error('Too many rooms! Refine the name:')
            for r in rooms:
                self.LOGGER.info('{}'.format(r.title))
            sys.exit(1)

        # Ok, we should have just one room if we get here
        self.__room = rooms[0]
        self.__smtp = smtplib.SMTP('localhost')
Esempio n. 4
0
 def __init__(self, log_directory, file_name, yangcatalog_api_prefix):
     global LOGGER
     LOGGER = log.get_logger(__name__,
                             log_directory + '/parseAndPopulate.log')
     self.file_name = file_name
     self.name_revision_organization = set()
     self.yang_modules = {}
     self.yangcatalog_api_prefix = yangcatalog_api_prefix
Esempio n. 5
0
    def __init__(self):
        config = create_config()
        self.__confd_ip = config.get('Web-Section', 'confd-ip')
        self.__confd_port = config.get('Web-Section', 'confd-port')
        self.__confd_protocol = config.get('General-Section', 'protocol-confd')
        self.credentials = config.get('Secrets-Section', 'confd-credentials').strip('"').split(' ')
        self.log_directory = config.get('Directory-Section', 'logs')

        self.LOGGER = log.get_logger('confdService', '{}/confdService.log'.format(self.log_directory))
        self.confd_prefix = '{}://{}:{}'.format(self.__confd_protocol, self.__confd_ip, self.__confd_port)
Esempio n. 6
0
    def __init__(self, db: t.Optional[t.Union[int, str]] = None):
        config = create_config()
        self._redis_host = config.get('DB-Section', 'redis-host')
        self._redis_port = config.get('DB-Section', 'redis-port')
        if db is None:
            db = config.get('DB-Section', 'redis-users-db', fallback=2)
        self.redis = Redis(host=self._redis_host, port=self._redis_port,
                           db=db)  # pyright: ignore

        self.log_directory = config.get('Directory-Section', 'logs')
        self.LOGGER = log.get_logger(
            'redisUsersConnection',
            '{}/redisUsersConnection.log'.format(self.log_directory))
Esempio n. 7
0
 def __init__(self, log_directory: str, file_name: str,
              yangcatalog_api_prefix: str):
     """
     Arguments:
         :param log_directory:           (str) directory where the log file is saved
         :param file_name:               (str) name of the file to which the modules are dumped
         :param yangcatalog_api_prefix:  (str) yangcatalog api prefix used for making requests
     """
     global LOGGER
     LOGGER = log.get_logger(
         __name__, '{}/parseAndPopulate.log'.format(log_directory))
     self.file_name = file_name
     self.yang_modules: t.Dict[str, Module] = {}
     self.yangcatalog_api_prefix = yangcatalog_api_prefix
Esempio n. 8
0
    def __init__(self, modules_db: t.Optional[t.Union[int, str]] = None,
                 vendors_db: t.Optional[t.Union[int, str]] = None):
        config = create_config()
        self.log_directory = config.get('Directory-Section', 'logs')
        self._redis_host = config.get('DB-Section', 'redis-host')
        self._redis_port = config.get('DB-Section', 'redis-port')
        if modules_db is None:
            modules_db = config.get('DB-Section', 'redis-modules-db', fallback=1)
        if vendors_db is None:
            vendors_db = config.get('DB-Section', 'redis-vendors-db', fallback=4)
        self.modulesDB = Redis(host=self._redis_host, port=self._redis_port, db=modules_db) # pyright: ignore
        self.vendorsDB = Redis(host=self._redis_host, port=self._redis_port, db=vendors_db) # pyright: ignore

        self.LOGGER = log.get_logger('redisModules', os.path.join(self.log_directory, 'redisModulesConnection.log'))
Esempio n. 9
0
    def __init__(self, directory: str, dumper: Dumper, file_hasher: FileHasher,
                 api: bool, dir_paths: DirPaths):
        """
        Arguments:
            :param directory            (str) the directory containing the files
            :param dumper               (Dumper) Dumper object
            :param filehasher           (FileHasher) FileHasher object
            :param api                  (bool) whether the request came from API or not
            :param dir_paths            (DirPaths) paths to various needed directories according to configuration
        """

        global LOGGER
        LOGGER = log.get_logger(
            'capability', '{}/parseAndPopulate.log'.format(dir_paths['log']))
        LOGGER.debug('Running {} constructor'.format(self.__class__.__name__))
        self.logger = log.get_logger(
            'repoutil', '{}/parseAndPopulate.log'.format(dir_paths['log']))
        self.dir_paths = dir_paths
        self.dumper = dumper
        self.api = api
        self.file_hasher = file_hasher
        self.directory = directory
        self.parsed_jsons = LoadFiles(dir_paths['private'], dir_paths['log'])
Esempio n. 10
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    log_directory = scriptConf.log_directory
    LOGGER = log.get_logger('reviseTreeType',
                            '{}/parseAndPopulate.log'.format(log_directory))
    LOGGER.info('Starting Cron job for reviseTreeType')
    api_protocol = scriptConf.api_protocol
    ip = scriptConf.ip
    api_port = scriptConf.api_port
    is_uwsgi = scriptConf.is_uwsgi
    separator = ':'
    suffix = api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(api_protocol, ip, separator,
                                                   suffix)
    credentials = scriptConf.credentials
    save_file_dir = scriptConf.save_file_dir
    direc = '/var/yang/tmp'
    yang_models = scriptConf.yang_models
    temp_dir = scriptConf.temp_dir
    json_ytree = scriptConf.json_ytree
    complicatedAlgorithms = ModulesComplicatedAlgorithms(
        log_directory, yangcatalog_api_prefix, credentials, save_file_dir,
        direc, {}, yang_models, temp_dir, json_ytree)
    response = requests.get('{}search/modules'.format(yangcatalog_api_prefix))
    if response.status_code != 200:
        LOGGER.error('Failed to fetch list of modules')
        job_log(start_time,
                temp_dir,
                os.path.basename(__file__),
                error=response.text,
                status='Fail')
        return
    modules_revise = []
    modules = response.json()['module']
    for module in modules:
        if module.get('tree-type') == 'nmda-compatible':
            if not complicatedAlgorithms.check_if_latest_revision(module):
                modules_revise.append(module)
    LOGGER.info('Resolving tree-types for {} modules'.format(
        len(modules_revise)))
    complicatedAlgorithms.resolve_tree_type({'module': modules_revise})
    complicatedAlgorithms.populate()
    LOGGER.info('Job finished successfully')
    job_log(start_time, temp_dir, os.path.basename(__file__), status='Success')
Esempio n. 11
0
    def __init__(self, private_dir: str, log_directory: str):
        """
        Preset LoadFiles class to load all .json files from private directory.
        Filenames of json files are stored in json_links file.

        :param private_dir      (str) path to the directory with private HTML result files
        :param log_directory:   (str) directory where the log file is saved
        """
        LOGGER = log.get_logger(__name__, '{}/parseAndPopulate.log'.format(log_directory))
        LOGGER.debug('Loading compilation statuses and results')
        excluded_names = ['private', 'IETFCiscoAuthorsYANGPageCompilation']

        self.names = self.load_names(private_dir, LOGGER)
        self.names = [name for name in self.names if name not in excluded_names]
        self.status = {}
        self.headers = {}

        for name in self.names:
            try:
                with open('{}/{}.json'.format(private_dir, name), 'r') as f:
                    self.status[name] = json.load(f)
            except FileNotFoundError:
                self.status[name] = {}
                LOGGER.exception('{}/{}.json file was not found'.format(private_dir, name))
            if name == 'IETFYANGRFC':
                try:
                    with open('{}/{}.html'.format(private_dir, name), 'r') as f:
                        html = f.read()
                except FileNotFoundError:
                    html = ''
                    LOGGER.exception('{}/{}.html file was not found'.format(private_dir, name))
            else:
                try:
                    with open('{}/{}YANGPageCompilation.html'.format(private_dir, name), 'r') as f:
                        html = f.read()
                except FileNotFoundError:
                    html = ''
                    LOGGER.exception('{}/{}YANGPageCompilation.html file was not found'.format(private_dir, name))
            ths = html.split('<TH>')
            results = []
            for th in ths:
                result = th.split('</TH>')[0]
                if 'Compilation Result' in result:
                    results.append(result)
            self.headers[name] = results
        LOGGER.debug('Compilation statuses and results loaded successfully')
Esempio n. 12
0
    def load_config(self) -> StatusMessage:
        config = create_config(self._config_path)
        self._log_directory = config.get('Directory-Section', 'logs')
        self.LOGGER = log.get_logger(
            'receiver', os.path.join(self._log_directory, 'receiver.log'))
        self.LOGGER.info('Loading config')
        logging.getLogger('pika').setLevel(logging.INFO)
        self._api_ip = config.get('Web-Section', 'ip')
        self._api_port = int(config.get('Web-Section', 'api-port'))
        self._api_protocol = config.get('General-Section', 'protocol-api')
        self._notify_indexing = config.get('General-Section', 'notify-index')
        self._save_file_dir = config.get('Directory-Section', 'save-file-dir')
        self._yang_models = config.get('Directory-Section', 'yang-models-dir')
        self._is_uwsgi = config.get('General-Section', 'uwsgi')
        self._rabbitmq_host = config.get('RabbitMQ-Section',
                                         'host',
                                         fallback='127.0.0.1')
        self._rabbitmq_port = int(
            config.get('RabbitMQ-Section', 'port', fallback='5672'))
        self._changes_cache_path = config.get('Directory-Section',
                                              'changes-cache')
        self._delete_cache_path = config.get('Directory-Section',
                                             'delete-cache')
        self._lock_file = config.get('Directory-Section', 'lock')
        rabbitmq_username = config.get('RabbitMQ-Section',
                                       'username',
                                       fallback='guest')
        rabbitmq_password = config.get('Secrets-Section',
                                       'rabbitMq-password',
                                       fallback='guest')
        self.temp_dir = config.get('Directory-Section', 'temp')
        self.json_ytree = config.get('Directory-Section', 'json-ytree')

        self._notify_indexing = self._notify_indexing == 'True'
        separator = ':'
        suffix = self._api_port
        if self._is_uwsgi == 'True':
            separator = '/'
            suffix = 'api'
        self._yangcatalog_api_prefix = '{}://{}{}{}/'.format(
            self._api_protocol, self._api_ip, separator, suffix)
        self._rabbitmq_credentials = pika.PlainCredentials(
            username=rabbitmq_username, password=rabbitmq_password)
        self.LOGGER.info('Config loaded succesfully')
        return StatusMessage.SUCCESS
Esempio n. 13
0
 def __init__(self, file_name: str, cache_dir: str, is_active: bool,
              log_directory: str):
     """
     Arguments:
         :param file_name        (str) name of the file to which the modules hashes are dumped
         :param cache_dir        (str) directory where json file with hashes is saved
         :param is_active        (bool) whether FileHasher is active or not (use hashes to skip module parsing or not)
         :param log_directory    (str) directory where the log file is saved
     """
     self.file_name = file_name
     self.cache_dir = cache_dir
     self.is_active = is_active
     self.LOGGER = log.get_logger(
         __name__, '{}/parseAndPopulate.log'.format(log_directory))
     self.lock = threading.Lock()
     self.validators_versions_bytes = self.get_versions()
     self.files_hashes = self.load_hashed_files_list()
     self.updated_hashes = {}
Esempio n. 14
0
 def __init__(self, log_directory, yangcatalog_api_prefix, credentials,
              protocol, ip, port, save_file_dir, direc, all_modules,
              yang_models_dir, temp_dir):
     global LOGGER
     LOGGER = log.get_logger('modulesComplicatedAlgorithms',
                             log_directory + '/parseAndPopulate.log')
     if all_modules is None:
         with open(direc + '/prepare.json', 'r') as f:
             self.__all_modules = json.load(f)
     else:
         self.__all_modules = all_modules
     self.__yangcatalog_api_prefix = yangcatalog_api_prefix
     self.__new_modules = []
     self.__credentials = credentials
     self.__save_file_dir = save_file_dir
     self.__path = None
     self.__prefix = '{}://{}:{}'.format(protocol, ip, port)
     self.__yang_models = yang_models_dir
     self.temp_dir = temp_dir
Esempio n. 15
0
    def __init__(self, config_path=os.environ['YANGCATALOG_CONFIG_PATH']):
        """Setup Webex teams rooms and smtp

            Arguments:
                :param config_path: (str) path to a yangcatalog.conf file
        """
        def list_matching_rooms(a, title_match):
            return [r for r in a.rooms.list() if title_match in r.title]

        config = create_config(config_path)
        log_directory = config.get('Directory-Section', 'logs')
        token = config.get('Secrets-Section', 'webex-access-token')
        self.__email_from = config.get('Message-Section', 'email-from')
        self.__is_production = config.get('General-Section', 'is-prod')
        self.__is_production = self.__is_production == 'True'
        self.__email_to = config.get('Message-Section', 'email-to').split()
        self.__developers_email = config.get('Message-Section',
                                             'developers-email').split()
        self._temp_dir = config.get('Directory-Section', 'temp')
        self.__me = config.get('Web-Section', 'my-uri')

        self.__api = CiscoSparkAPI(access_token=token)
        rooms = list_matching_rooms(self.__api, 'YANG Catalog admin')
        self.__me = self.__me.split('/')[-1]
        self._message_log_file = os.path.join(self._temp_dir,
                                              'message-log.txt')
        self.LOGGER = log.get_logger(__name__,
                                     os.path.join(log_directory, 'yang.log'))
        self.LOGGER.info('Initialising Message Factory')

        if len(rooms) == 0:
            self.LOGGER.error('Need at least one room')
            sys.exit(1)
        if len(rooms) != 1:
            self.LOGGER.error('Too many rooms! Refine the name:')
            for r in rooms:
                self.LOGGER.info('{}'.format(r.title))
            sys.exit(1)

        # Ok, we should have just one room if we get here
        self.__room = rooms[0]
        self.__smtp = smtplib.SMTP('localhost')
Esempio n. 16
0
    def __init__(self, log_directory: str, yangcatalog_api_prefix: str,
                 credentials: list, save_file_dir: str, direc: str,
                 all_modules: t.Optional[dict], yang_models_dir: str,
                 temp_dir: str, json_ytree: str):
        global LOGGER
        LOGGER = log.get_logger(
            'modulesComplicatedAlgorithms',
            '{}/parseAndPopulate.log'.format(log_directory))
        if all_modules is None:
            with open('{}/prepare.json'.format(direc), 'r') as f:
                self._all_modules = json.load(f)
        else:
            self._all_modules = all_modules
        self._yangcatalog_api_prefix = yangcatalog_api_prefix
        self.new_modules = defaultdict(dict)
        self._credentials = credentials
        self._save_file_dir = save_file_dir
        self._yang_models = yang_models_dir
        self.temp_dir = temp_dir
        self.json_ytree = json_ytree
        self._trees = defaultdict(dict)
        self._unavailable_modules = []
        LOGGER.info('get all existing modules')
        response = requests.get('{}search/modules'.format(
            self._yangcatalog_api_prefix),
                                headers=json_headers)
        existing_modules = response.json().get('module', [])
        self._existing_modules_dict = defaultdict(dict)
        self._latest_revisions = {}
        for module in existing_modules:
            # Store latest revision of each module - used in resolving tree-type
            latest_revision = self._latest_revisions.get(module['name'])
            if latest_revision is None:
                self._latest_revisions[module['name']] = module['revision']
            else:
                self._latest_revisions[module['name']] = max(
                    module['revision'], latest_revision)

            self._existing_modules_dict[module['name']][
                module['revision']] = module
Esempio n. 17
0
    def __init__(self, name: str, path: str, jsons: LoadFiles,
                 dir_paths: DirPaths, git_commit_hash: str, yang_modules: dict,
                 schema_base: str, aditional_info: t.Optional[t.Dict[str,
                                                                     str]],
                 submodule_name: t.Optional[str]):
        """
        Initialize and parse everything out of a module.
        Arguments:
            :param name:            (str) name of the module (not parsed out of the module)
            :param path:            (str) path to yang file being parsed
            :param jsons:           (obj) LoadFiles class containing all the json
                                    and html files with parsed results
            :param dir_paths:       (dict) paths to various needed directories according to configuration
            :param git_commit_hash: (str) name of the git commit hash where we can find the module
            :param yang_modules:    (dict) yang modules we've already parsed
            :param schema_base:     (str) url to a raw module on github up to and not including the
                                    path of the file in the repo
            :param aditional_info:  (dict) some aditional information about module given from client
            :param submodule_name:  (str) name of the git submodule the yang module belongs to
        """
        global LOGGER
        LOGGER = log.get_logger(
            'modules', '{}/parseAndPopulate.log'.format(dir_paths['log']))
        config = create_config()
        self._web_uri = config.get('Web-Section',
                                   'my-uri',
                                   fallback='https://yangcatalog.org')
        self.html_result_dir = dir_paths['result']
        self._jsons = jsons
        self._path = path
        self.yang_models = dir_paths['yang_models']

        self._parsed_yang = yangParser.parse(self._path)
        self.implementations: t.List[Implementation] = []
        self._parse_all(name, git_commit_hash, yang_modules, schema_base,
                        dir_paths['save'], aditional_info, submodule_name)
        del self._jsons
Esempio n. 18
0
        type=str,
        help=
        'Set ip address where the api is started. Default -> yangcatalog.org')
    parser.add_argument('--config-path',
                        type=str,
                        default='/etc/yangcatalog/yangcatalog.conf',
                        help='Set path to config file')

    args = parser.parse_args()

    config_path = args.config_path
    config = ConfigParser.ConfigParser()
    config._interpolation = ConfigParser.ExtendedInterpolation()
    config.read(config_path)
    log_directory = config.get('Directory-Section', 'logs')
    LOGGER = log.get_logger('runCapabilities',
                            log_directory + '/parseAndPopulate.log')
    is_uwsgi = config.get('General-Section', 'uwsgi')
    private_dir = config.get('Web-Section', 'private_directory')
    yang_models = config.get('Directory-Section', 'yang_models_dir')

    temp_dir = config.get('Directory-Section', 'temp')

    separator = ':'
    suffix = args.api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol,
                                                   args.api_ip, separator,
                                                   suffix)
    start = time.time()
Esempio n. 19
0
    protocol = config.get('General-Section', 'protocol-api')
    notify = config.get('DraftPullLocal-Section', 'notify-index')
    save_file_dir = config.get('Directory-Section', 'save-file-dir')
    private_credentials = config.get('General-Section',
                                     'private-secret').split(' ')
    token = config.get('DraftPull-Section', 'yang-catalog-token')
    username = config.get('DraftPull-Section', 'username')
    config_name = config.get('General-Section', 'repo-config-name')
    config_email = config.get('General-Section', 'repo-config-email')
    log_directory = config.get('Directory-Section', 'logs')
    ietf_draft_url = config.get('General-Section', 'ietf-draft-private-url')
    ietf_rfc_url = config.get('General-Section', 'ietf-RFC-tar-private-url')
    yang_models_url_suffix = config.get('General-Section',
                                        'yang-models-repo-url_suffix')
    temp_dir = config.get('Directory-Section', 'temp')
    LOGGER = log.get_logger('draftPullLocal',
                            log_directory + '/jobs/draft-pull-local.log')
    LOGGER.info('Starting cron job IETF pull request local')

    github_credentials = ''
    if len(username) > 0:
        github_credentials = username + ':' + token + '@'

    # Fork and clone the repository YangModles/yang
    LOGGER.info('Cloning repository')
    reponse = requests.post('https://' + github_credentials +
                            yang_models_url_suffix)
    repo = repoutil.RepoUtil('https://' + token + '@github.com/' + username +
                             '/yang.git')

    repo.clone(config_name, config_email)
    LOGGER.info('Cloning repo to local directory {}'.format(repo.localdir))
Esempio n. 20
0
    def __init__(self, yang_models_dir, log_directory, path, html_result_dir, jsons, temp_dir,
                 is_vendor=False, is_yang_lib=False, data=None,
                 is_vendor_imp_inc=False, run_integrity=False):
        """
        Preset Modules class to parse yang module and save data to it.
        :param yang_models_dir:     (str) directory with all yang modules from
                                    github https://github.com/YangModels/yang
        :param log_directory:       (str) directory where the log file is saved
        :param path:                (str) path to yang file being parsed
        :param html_result_dir:     (str) path to directory with html result
                                    files
        :param jsons:               (obj) LoadFiles class containing all the json
                                    and html files with parsed results
        :param temp_dir:            (str) path to temporary directory
        :param is_vendor:           (boolean) if we parsing vendor files (cisco, huawei, ..)
                                    or sdo files (ietf, ieee, ...)
        :param is_yang_lib:         (boolean) if we are parsing file from yang_lib
                                    capability file
        :param data:                (dict) data from yang_lib capability file with additional
                                    information
        :param is_vendor_imp_inc:   (boolean) Obsolete
        :param run_integrity        (boolean) if we are running integrity as well. If true
                                    part of the data parsed are not needed and therefor not
                                    parsed
        """
        global LOGGER
        LOGGER = log.get_logger('modules', log_directory + '/parseAndPopulate.log')
        self.run_integrity = run_integrity
        self.__temp_dir = temp_dir
        self.__missing_submodules = []
        self.__missing_modules = []
        self.__missing_namespace = None
        self.__missing_revision = None
        self.is_yang_lib = is_yang_lib
        self.html_result_dir = html_result_dir
        self.jsons = jsons
        self.__is_vendor = is_vendor
        self.revision = '*'
        self.__path = path
        self.features = []
        self.deviations = []
        self.yang_models = yang_models_dir

        if is_vendor:
            if is_yang_lib:
                self.deviations = data['deviations']
                self.features = data['features']
                self.revision = data['revision']
                if self.revision is None:
                    self.revision = '*'
                self.__path = self.__find_file(data['name'], self.revision)
            else:
                self.features = self.\
                    __resolve_deviations_and_features('features=', data)
                self.deviations = \
                    self.__resolve_deviations_and_features('deviations=', data)

                if 'revision' in data:
                    revision_and_more = data.split('revision=')[1]
                    revision = revision_and_more.split('&')[0]
                    self.revision = revision

                self.__path = self.__find_file(data.split('&')[0], self.revision)
        else:
            self.__path = path

        if is_vendor_imp_inc:
            self.__is_vendor = True
        if self.__path:
            self.name = None
            self.organization = None
            self.ietf_wg = None
            self.namespace = None
            self.schema = None
            self.generated_from = None
            self.maturity_level = None
            self.document_name = None
            self.author_email = None
            self.reference = None
            self.tree = None
            self.expired = None
            self.expiration_date = None
            self.module_classification = None
            self.compilation_status = None
            self.compilation_result = {}
            self.prefix = None
            self.yang_version = None
            self.description = None
            self.contact = None
            self.belongs_to = None
            self.submodule = []
            self.dependencies = []
            self.module_type = None
            self.tree_type = None
            self.semver = None
            self.derived_semver = None
            self.implementation = []
            self.imports = []
            self.json_submodules = json.dumps([])
            self.__parsed_yang = yangParser.parse(os.path.abspath(self.__path))
            if self.__parsed_yang is None:
                raise ParseException(path)
        else:
            raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path.split('&')[0])
Esempio n. 21
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    revision_updated_modules = 0
    datatracker_failures = []
    args = scriptConf.args
    log_directory = scriptConf.log_directory
    temp_dir = scriptConf.temp_dir
    is_uwsgi = scriptConf.is_uwsgi
    LOGGER = log.get_logger(
        'resolveExpiration',
        '{}/jobs/resolveExpiration.log'.format(log_directory))

    separator = ':'
    suffix = args.api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol,
                                                   args.api_ip, separator,
                                                   suffix)
    redisConnection = RedisConnection()
    LOGGER.info('Starting Cron job resolve modules expiration')
    try:
        LOGGER.info('Requesting all the modules from {}'.format(
            yangcatalog_api_prefix))
        updated = False

        response = requests.get(
            '{}search/modules'.format(yangcatalog_api_prefix))
        if response.status_code < 200 or response.status_code > 299:
            LOGGER.error('Request on path {} failed with {}'.format(
                yangcatalog_api_prefix, response.text))
        else:
            LOGGER.debug('{} modules fetched from {} successfully'.format(
                len(response.json().get('module', [])),
                yangcatalog_api_prefix))
        modules = response.json().get('module', [])
        i = 1
        for module in modules:
            LOGGER.debug('{} out of {}'.format(i, len(modules)))
            i += 1
            ret = resolve_expiration(module, LOGGER, datatracker_failures,
                                     redisConnection)
            if ret:
                revision_updated_modules += 1
            if not updated:
                updated = ret
        if updated:
            redisConnection.populate_modules(modules)
            url = ('{}load-cache'.format(yangcatalog_api_prefix))
            response = requests.post(url,
                                     None,
                                     auth=(args.credentials[0],
                                           args.credentials[1]))
            LOGGER.info('Cache loaded with status {}'.format(
                response.status_code))
    except Exception as e:
        LOGGER.exception(
            'Exception found while running resolveExpiration script')
        job_log(start_time,
                temp_dir,
                error=str(e),
                status='Fail',
                filename=os.path.basename(__file__))
        raise e
    if len(datatracker_failures) > 0:
        LOGGER.debug(
            'Following references failed to get from the datatracker:\n {}'.
            format('\n'.join(datatracker_failures)))
    messages = [{
        'label': 'Modules with changed revison',
        'message': revision_updated_modules
    }, {
        'label': 'Datatracker modules failures',
        'message': len(datatracker_failures)
    }]
    job_log(start_time,
            temp_dir,
            messages=messages,
            status='Success',
            filename=os.path.basename(__file__))
    LOGGER.info('Job finished successfully')
Esempio n. 22
0

if __name__ == '__main__':
    parser = argparse.ArgumentParser()

    parser.add_argument('--config-path', type=str, default='/etc/yangcatalog/yangcatalog.conf',
                        help='Set path to config file')
    args = parser.parse_args()
    config_path = args.config_path
    config = ConfigParser.ConfigParser()
    config._interpolation = ConfigParser.ExtendedInterpolation()
    config.read(config_path)
    log_directory = config.get('Directory-Section', 'logs')
    temp_dir = config.get('Directory-Section', 'temp')
    ys_users = config.get('Directory-Section', 'ys_users')
    LOGGER = lo.get_logger('removeUnused', log_directory + '/jobs/removeUnused.log')
    LOGGER.info('Removing unused files')
    current_time = time.time()
    cutoff = current_time - 86400
    for dir in next(os.walk(temp_dir))[1]:
        if represents_int(dir):
            creation_time = os.path.getctime('{}/{}'.format(temp_dir, dir))
            if creation_time < cutoff:
                shutil.rmtree('{}/{}'.format(temp_dir, dir))

    dirs = os.listdir(ys_users)
    for dir in dirs:
        abs = os.path.abspath('{}/{}'.format(ys_users, dir))
        if not abs.endswith('yangcat') and not abs.endswith('yang'):
            try:
                shutil.rmtree(abs)
Esempio n. 23
0
from redisConnections.redisConnection import RedisConnection

import utility.log as log
from utility import confdService
from utility.create_config import create_config
from utility.util import job_log

if __name__ == '__main__':
    start_time = int(time.time())
    config = create_config()
    credentials = config.get('Secrets-Section',
                             'confd-credentials').strip('"').split(' ')
    logs_dir = config.get('Directory-Section', 'logs')
    temp_dir = config.get('Directory-Section', 'temp')

    LOGGER = log.get_logger('healthcheck',
                            os.path.join(logs_dir, 'healthcheck.log'))
    messages = []
    letters = string.ascii_letters
    suffix = ''.join(random.choice(letters) for i in range(6))
    check_module_name = 'confd-full-check-{}'.format(suffix)
    confdService = confdService.ConfdService()
    confdService.delete_modules()
    confdService.delete_vendors()

    LOGGER.info('Running confdFullCheck')
    try:
        redisConnection = RedisConnection()
        yang_catalog_module = redisConnection.get_module(
            'yang-catalog@2018-04-03/ietf')
        module = json.loads(yang_catalog_module)
        error = confdService.patch_modules([module])
Esempio n. 24
0
    config.read(config_path)
    api_ip = config.get('DraftPullLocal-Section', 'api-ip')
    api_port = int(config.get('General-Section', 'api-port'))
    credentials = config.get('General-Section', 'credentials').split(' ')
    token = config.get('DraftPull-Section', 'yang-catalog-token')
    username = config.get('DraftPull-Section', 'username')
    api_protocol = config.get('General-Section', 'protocol-api')
    is_uwsgi = config.get('General-Section', 'uwsgi')
    config_name = config.get('General-Section', 'repo-config-name')
    config_email = config.get('General-Section', 'repo-config-email')
    log_directory = config.get('Directory-Section', 'logs')
    openconfig_models_forked_url = config.get(
        'General-Section', 'openconfig-models-forked-repo-url')
    openconfig_models_url_suffix = config.get(
        'General-Section', 'openconfig-models-repo-url_suffix')
    LOGGER = log.get_logger('openconfigPullLocal',
                            log_directory + '/jobs/openconfig-pull.log')
    LOGGER.info('Starting Cron job openconfig pull request local')

    separator = ':'
    suffix = api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(api_protocol, api_ip,
                                                   separator, suffix)
    github_credentials = ''
    if len(username) > 0:
        github_credentials = username + ':' + token + '@'

    LOGGER.info('Forking repository')
    reponse = requests.post('https://' + github_credentials +
Esempio n. 25
0
def init_logger(state):
    bp.LOGGER = log.get_logger('yang-search', '{}/yang.log'.format(state.app.config.d_logs))
Esempio n. 26
0
    global rabbitmq_virtual_host
    rabbitmq_virtual_host = config.get('RabbitMQ-Section',
                                       'virtual_host',
                                       fallback='/')
    global rabbitmq_username
    rabbitmq_username = config.get('RabbitMQ-Section',
                                   'username',
                                   fallback='guest')
    global rabbitmq_password
    rabbitmq_password = config.get('RabbitMQ-Section',
                                   'password',
                                   fallback='guest')

    log_directory = config.get('Directory-Section', 'logs')
    global LOGGER
    LOGGER = log.get_logger('receiver', log_directory + '/yang.log')
    global temp_dir
    temp_dir = config.get('Directory-Section', 'temp')
    LOGGER.info('Starting receiver')

    if notify_indexing == 'True':
        notify_indexing = True
    else:
        notify_indexing = False
    global yangcatalog_api_prefix
    separator = ':'
    suffix = api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(api_protocol, api_ip,
Esempio n. 27
0
    def __init__(self, searched_term: str, case_sensitive: bool, searched_fields: list, type: str,
                 schema_types: list, logs_dir: str, es: Elasticsearch, latest_revision: bool,
                 redisConnection: RedisConnection, include_mibs: bool, yang_versions: list, needed_output_colums: list,
                 all_output_columns: list, sub_search: list) -> None:
        """
        Initialization of search under elasticsearch engine. We need to prepare a query
        that will be used to search in elasticsearch.

        :param searched_term:   (str) String that we are searching for
        :param case_sensitive:  (boolean) Whether we want search to be case sensitive or not
        :param searched_fields: (list) Fields under which we are searching (it can be [argument, description, module])
        :param type:            (str) Can be only regex or term
        :param schema_types     (list) In which schema types do we search for (typedef, leaf, leaflist, container, etc.)
        :param logs_dir         (str) Directory to log files
        :param es               (ElasticSearch) Elasticsearch engine
        :param latest_revision  (boolean) Whether we want to search only for latest revision of found modules.
        :param redisConnection  (RedisConnection) Redis connection to modules db (db=1)
        :param include_mibs     (boolean) Whether we want to search for MIBs as well from the searched modules
        :param yang_versions    (list) List of yang version that we search for
        :param needed_output_colums (list) output columns that are going to be used within response json
        :param all_output_columns   (list) all existing output columns so we can get diff which we need to remove
        :param sub_search       (list of dic) search for specific part of text from output received from elasticsearch
        """
        self.__response_size = 2000
        self.query = \
            {
                'query': {
                    'bool': {
                        'must': [{
                            'bool': {
                                'must': {
                                    'terms': {
                                        'statement': schema_types
                                    }
                                }
                            }
                        }, {
                            'bool': {
                                'should': []
                            }
                        }]
                    }
                },
                'aggs': {
                    'groupby': {
                        'terms': {
                            'field': 'module.keyword',
                            'size': self.__response_size
                        },
                        'aggs': {
                            'latest-revision': {
                                'max': {
                                    'field': 'revision'
                                }
                            }
                        }
                    }
                }
            }
        self.__case_sensitive = case_sensitive
        self.__searched_fields = searched_fields
        self.__searched_term = searched_term
        self.__type = type
        self.__es = es
        self.__redisConnection = redisConnection
        self.__latest_revision = latest_revision
        self.__include_mibs = include_mibs
        self.__yang_versions = yang_versions
        self.__sub_search = sub_search
        self.__current_scroll_id = None
        self.__latest_revisions = {}
        self.__output_columns = needed_output_colums
        self.__remove_columns = list(set(all_output_columns) - set(needed_output_colums))
        self.__row_hashes = []
        self.__missing_modules = []
        self.LOGGER = log.get_logger('yc-elasticsearch', '{}/yang.log'.format(logs_dir))
Esempio n. 28
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    args = scriptConf.args

    config_path = args.config_path
    config = create_config(config_path)
    token = config.get('Secrets-Section', 'yang-catalog-token')
    username = config.get('General-Section', 'repository-username')
    commit_dir = config.get('Directory-Section', 'commit-dir')
    config_name = config.get('General-Section', 'repo-config-name')
    config_email = config.get('General-Section', 'repo-config-email')
    log_directory = config.get('Directory-Section', 'logs')
    temp_dir = config.get('Directory-Section', 'temp')
    exceptions = config.get('Directory-Section', 'exceptions')
    yang_models = config.get('Directory-Section', 'yang-models-dir')
    ietf_draft_url = config.get('Web-Section', 'ietf-draft-private-url')
    ietf_rfc_url = config.get('Web-Section', 'ietf-RFC-tar-private-url')
    is_production = config.get('General-Section', 'is-prod')
    is_production = is_production == 'True'
    LOGGER = log.get_logger('draftPull',
                            '{}/jobs/draft-pull.log'.format(log_directory))
    LOGGER.info('Starting Cron job IETF pull request')

    repo_name = 'yang'
    repourl = 'https://{}@github.com/{}/{}.git'.format(token, username,
                                                       repo_name)
    commit_author = {'name': config_name, 'email': config_email}

    draftPullUtility.update_forked_repository(yang_models, LOGGER)
    repo = draftPullUtility.clone_forked_repository(repourl, commit_author,
                                                    LOGGER)

    if not repo:
        error_message = 'Failed to clone repository {}/{}'.format(
            username, repo_name)
        job_log(start_time,
                temp_dir,
                error=error_message,
                status='Fail',
                filename=os.path.basename(__file__))
        sys.exit()

    try:
        # Get rfc.tgz file
        response = requests.get(ietf_rfc_url)
        tgz_path = '{}/rfc.tgz'.format(repo.local_dir)
        extract_to = '{}/standard/ietf/RFCtemp'.format(repo.local_dir)
        with open(tgz_path, 'wb') as zfile:
            zfile.write(response.content)
        tar_opened = draftPullUtility.extract_rfc_tgz(tgz_path, extract_to,
                                                      LOGGER)
        if tar_opened:
            diff_files = []
            new_files = []

            temp_rfc_yang_files = glob.glob(
                '{}/standard/ietf/RFCtemp/*.yang'.format(repo.local_dir))
            for temp_rfc_yang_file in temp_rfc_yang_files:
                file_name = os.path.basename(temp_rfc_yang_file)
                rfc_yang_file = temp_rfc_yang_file.replace('RFCtemp', 'RFC')

                if not os.path.exists(rfc_yang_file):
                    new_files.append(file_name)
                    continue

                same = filecmp.cmp(rfc_yang_file, temp_rfc_yang_file)
                if not same:
                    diff_files.append(file_name)

            shutil.rmtree('{}/standard/ietf/RFCtemp'.format(repo.local_dir))

            with open(exceptions, 'r') as exceptions_file:
                remove_from_new = exceptions_file.read().split('\n')
            new_files = [
                file_name for file_name in new_files
                if file_name not in remove_from_new
            ]

            if args.send_message:
                if new_files or diff_files:
                    LOGGER.info(
                        'new or modified RFC files found. Sending an E-mail')
                    mf = messageFactory.MessageFactory()
                    mf.send_new_rfc_message(new_files, diff_files)

        # Experimental draft modules
        try:
            os.makedirs('{}/experimental/ietf-extracted-YANG-modules/'.format(
                repo.local_dir))
        except OSError as e:
            # be happy if someone already created the path
            if e.errno != errno.EEXIST:
                raise
        experimental_path = '{}/experimental/ietf-extracted-YANG-modules'.format(
            repo.local_dir)

        LOGGER.info('Updating IETF drafts download links')
        draftPullUtility.get_draft_module_content(ietf_draft_url,
                                                  experimental_path, LOGGER)

        LOGGER.info('Checking module filenames without revision in {}'.format(
            experimental_path))
        draftPullUtility.check_name_no_revision_exist(experimental_path,
                                                      LOGGER)

        LOGGER.info(
            'Checking for early revision in {}'.format(experimental_path))
        draftPullUtility.check_early_revisions(experimental_path, LOGGER)

        messages = []
        try:
            # Add commit and push to the forked repository
            LOGGER.info('Adding all untracked files locally')
            untracked_files = repo.repo.untracked_files
            repo.add_untracked_remove_deleted()
            LOGGER.info('Committing all files locally')
            repo.commit_all(
                'Cronjob - every day pull of ietf draft yang files.')
            LOGGER.info('Pushing files to forked repository')
            commit_hash = repo.repo.head.commit
            LOGGER.info('Commit hash {}'.format(commit_hash))
            with open(commit_dir, 'w+') as f:
                f.write('{}\n'.format(commit_hash))
            if is_production:
                LOGGER.info(
                    'Pushing untracked and modified files to remote repository'
                )
                repo.push()
            else:
                LOGGER.info(
                    'DEV environment - not pushing changes into remote repository'
                )
                LOGGER.debug(
                    'List of all untracked and modified files:\n{}'.format(
                        '\n'.join(untracked_files)))
        except GitCommandError as e:
            message = 'Error while pushing procedure - git command error: \n {} \n git command out: \n {}'.format(
                e.stderr, e.stdout)
            if 'Your branch is up to date' in e.stdout:
                LOGGER.warning(message)
                messages = [{
                    'label': 'Pull request created',
                    'message': 'False - branch is up to date'
                }]
            else:
                LOGGER.exception(
                    'Error while pushing procedure - Git command error')
                raise e
        except Exception as e:
            LOGGER.exception('Error while pushing procedure {}'.format(
                sys.exc_info()[0]))
            raise type(e)('Error while pushing procedure')
    except Exception as e:
        LOGGER.exception('Exception found while running draftPull script')
        job_log(start_time,
                temp_dir,
                error=str(e),
                status='Fail',
                filename=os.path.basename(__file__))
        raise e

    if len(messages) == 0:
        messages = [{
            'label': 'Pull request created',
            'message': 'True - {}'.format(commit_hash)
        }  # pyright: ignore
                    ]
    job_log(start_time,
            temp_dir,
            messages=messages,
            status='Success',
            filename=os.path.basename(__file__))
    LOGGER.info('Job finished successfully')
Esempio n. 29
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    args = scriptConf.args

    config_path = args.config_path
    config = create_config(config_path)
    yang_models = config.get('Directory-Section', 'yang-models-dir')
    token = config.get('Secrets-Section', 'yang-catalog-token')
    username = config.get('General-Section', 'repository-username')
    commit_dir = config.get('Directory-Section', 'commit-dir')
    config_name = config.get('General-Section', 'repo-config-name')
    config_email = config.get('General-Section', 'repo-config-email')
    log_directory = config.get('Directory-Section', 'logs')
    temp_dir = config.get('Directory-Section', 'temp')
    is_production = config.get('General-Section', 'is-prod')
    is_production = is_production == 'True'
    LOGGER = log.get_logger('ianaPull',
                            '{}/jobs/iana-pull.log'.format(log_directory))
    LOGGER.info('Starting job to pull IANA-maintained modules')

    repo_name = 'yang'
    repourl = 'https://{}@github.com/{}/{}.git'.format(token, username,
                                                       repo_name)
    commit_author = {'name': config_name, 'email': config_email}

    draftPullUtility.update_forked_repository(yang_models, LOGGER)
    repo = draftPullUtility.clone_forked_repository(repourl, commit_author,
                                                    LOGGER)

    if not repo:
        error_message = 'Failed to clone repository {}/{}'.format(
            username, repo_name)
        job_log(start_time,
                temp_dir,
                error=error_message,
                status='Fail',
                filename=os.path.basename(__file__))
        sys.exit()

    try:
        iana_temp_path = os.path.join(temp_dir, 'iana')
        if os.path.exists(iana_temp_path):
            shutil.rmtree(iana_temp_path)
        # call rsync to sync with rsync.iana.org::assignments/yang-parameters/
        subprocess.call([
            'rsync', '-avzq', '--delete',
            'rsync.iana.org::assignments/yang-parameters/', iana_temp_path
        ])
        draftPullUtility.set_permissions(iana_temp_path)
        iana_standard_path = os.path.join(repo.local_dir, 'standard/iana')
        if not os.path.exists(iana_standard_path):
            os.makedirs(iana_standard_path)
        xml_path = os.path.join(iana_temp_path, 'yang-parameters.xml')
        copy2(xml_path,
              '{}/standard/iana/yang-parameters.xml'.format(repo.local_dir))

        # Parse yang-parameters.xml file
        root = ET.parse(xml_path).getroot()
        tag = root.tag
        namespace = tag.split('registry')[0]
        modules = root.iter('{}record'.format(namespace))

        for module in modules:
            data = module.attrib
            for attributes in module:
                prop = attributes.tag.split(namespace)[-1]
                assert attributes.text is not None
                data[prop] = attributes.text

            if data.get('iana') == 'Y' and data.get('file'):
                src = '{}/{}'.format(iana_temp_path, data.get('file'))
                dst = '{}/standard/iana/{}'.format(repo.local_dir,
                                                   data.get('file'))
                copy2(src, dst)

        LOGGER.info('Checking module filenames without revision in {}'.format(
            iana_standard_path))
        draftPullUtility.check_name_no_revision_exist(iana_standard_path,
                                                      LOGGER)

        LOGGER.info(
            'Checking for early revision in {}'.format(iana_standard_path))
        draftPullUtility.check_early_revisions(iana_standard_path, LOGGER)

        messages = []
        try:
            # Add commit and push to the forked repository
            LOGGER.info('Adding all untracked files locally')
            untracked_files = repo.repo.untracked_files
            repo.add_untracked_remove_deleted()
            LOGGER.info('Committing all files locally')
            repo.commit_all('Cronjob - every day pull of iana yang files')
            LOGGER.info('Pushing files to forked repository')
            commit_hash = repo.repo.head.commit
            LOGGER.info('Commit hash {}'.format(commit_hash))
            with open(commit_dir, 'w+') as f:
                f.write('{}\n'.format(commit_hash))
            if is_production:
                LOGGER.info(
                    'Pushing untracked and modified files to remote repository'
                )
                repo.push()
            else:
                LOGGER.info(
                    'DEV environment - not pushing changes into remote repository'
                )
                LOGGER.debug(
                    'List of all untracked and modified files:\n{}'.format(
                        '\n'.join(untracked_files)))
        except GitCommandError as e:
            message = 'Error while pushing procedure - git command error: \n {} \n git command out: \n {}'.format(
                e.stderr, e.stdout)
            if 'Your branch is up to date' in e.stdout:
                LOGGER.warning(message)
                messages = [{
                    'label': 'Pull request created',
                    'message': 'False - branch is up to date'
                }]
            else:
                LOGGER.exception(
                    'Error while pushing procedure - Git command error')
                raise e
        except Exception as e:
            LOGGER.exception('Error while pushing procedure {}'.format(
                sys.exc_info()[0]))
            raise type(e)('Error while pushing procedure')
    except Exception as e:
        LOGGER.exception('Exception found while running draftPull script')
        job_log(start_time,
                temp_dir,
                error=str(e),
                status='Fail',
                filename=os.path.basename(__file__))
        raise e

    # Remove tmp folder
    LOGGER.info('Removing tmp directory')

    if len(messages) == 0:
        messages = [{
            'label': 'Pull request created',
            'message': 'True - {}'.format(commit_hash)
        }  # pyright: ignore
                    ]
    job_log(start_time,
            temp_dir,
            messages=messages,
            status='Success',
            filename=os.path.basename(__file__))
    LOGGER.info('Job finished successfully')
Esempio n. 30
0
                              'protocol-api',
                              fallback='http')
    ip = config.get('Web-Section', 'ip', fallback='localhost')
    api_port = int(config.get('Web-Section', 'api-port', fallback=5000))
    is_uwsgi = config.get('General-Section', 'uwsgi', fallback='True')
    temp_dir = config.get('Directory-Section',
                          'temp',
                          fallback='/var/yang/tmp')
    log_directory = config.get('Directory-Section',
                               'logs',
                               fallback='/var/yang/logs')
    credentials = config.get('Secrets-Section',
                             'confd-credentials',
                             fallback='user password').strip('"').split()

    LOGGER = log.get_logger('sandbox', '{}/sandbox.log'.format(log_directory))
    confdService = ConfdService()

    separator = ':'
    suffix = api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'

    yangcatalog_api_prefix = '{}://{}{}{}/'.format(api_protocol, ip, separator,
                                                   suffix)

    # GET all the existing modules of Yangcatalog
    url = '{}search/modules'.format(yangcatalog_api_prefix)
    response = requests.get(url, headers={'Accept': 'application/json'})
    all_existing_modules = response.json().get('module', [])