Пример #1
0
    def populate(self):
        new_modules = [
            revision for name in self.new_modules.values()
            for revision in name.values()
        ]
        LOGGER.info(
            'populate with module complicated data. amount of new data is {}'.
            format(len(new_modules)))
        confdService = ConfdService()
        confdService.patch_modules(new_modules)

        redisConnection = RedisConnection()
        redisConnection.populate_modules(new_modules)

        if len(new_modules) > 0:
            url = '{}load-cache'.format(self._yangcatalog_api_prefix)
            response = requests.post(url,
                                     None,
                                     auth=(self._credentials[0],
                                           self._credentials[1]))
            if response.status_code != 201:
                LOGGER.warning(
                    'Could not send a load-cache request. Status code: {} Message: {}'
                    .format(response.status_code, response.text))
            else:
                LOGGER.info('load-cache responded with status code {}'.format(
                    response.status_code))
Пример #2
0
def prepare_for_es_removal(yc_api_prefix: str, modules_to_delete: list, save_file_dir: str, LOGGER: logging.Logger):
    for mod in modules_to_delete:
        name, revision_organization = mod.split('@')
        revision = revision_organization.split('/')[0]
        path_to_delete_local = '{}/{}@{}.yang'.format(save_file_dir, name, revision)
        data = {'input': {'dependents': [{'name': name}]}}

        response = requests.post('{}search-filter'.format(yc_api_prefix), json=data)
        if response.status_code == 200:
            data = response.json()
            modules = data['yang-catalog:modules']['module']
            for mod in modules:
                redis_key = '{}@{}/{}'.format(mod['name'], mod['revision'], mod['organization'])
                redisConnection = RedisConnection()
                redisConnection.delete_dependent(redis_key, name)
        if os.path.exists(path_to_delete_local):
            os.remove(path_to_delete_local)

    post_body = {}
    if modules_to_delete:
        post_body = {'modules-to-delete': modules_to_delete}
        LOGGER.debug('Modules to delete:\n{}'.format(json.dumps(post_body, indent=2)))
        mf = messageFactory.MessageFactory()
        mf.send_removed_yang_files(json.dumps(post_body, indent=4))

    return post_body
Пример #3
0
 def __init__(self, config_path: str):
     self._config_path = config_path
     self.load_config()
     self.channel = None
     self.connection = None
     self.confdService = ConfdService()
     self.redisConnection = RedisConnection()
     self.LOGGER.info('Receiver started')
Пример #4
0
 def __init__(self, *args, **kwargs):
     super(TestRedisModulesConnectionClass, self).__init__(*args, **kwargs)
     config = create_config()
     self._redis_host = config.get('DB-Section', 'redis-host')
     self._redis_port = config.get('DB-Section', 'redis-port')
     self.resources_path = os.path.join(os.environ['BACKEND'],
                                        'redisConnections/tests/resources')
     self.redisConnection = RedisConnection(modules_db=6, vendors_db=9)
     self.modulesDB = Redis(host=self._redis_host,
                            port=self._redis_port,
                            db=6)  # pyright: ignore
     self.vendorsDB = Redis(host=self._redis_host,
                            port=self._redis_port,
                            db=9)  # pyright: ignore
Пример #5
0
    def __init__(self, *args, **kwargs):
        super(TestReceiverBaseClass, self).__init__(*args, **kwargs)
        config = create_config()

        self.log_directory = config.get('Directory-Section', 'logs')
        self.temp_dir = config.get('Directory-Section', 'temp')
        self.credentials = config.get('Secrets-Section', 'confd-credentials').strip('"').split(' ')
        self.nonietf_dir = config.get('Directory-Section', 'non-ietf-directory')
        self.yang_models = config.get('Directory-Section', 'yang-models-dir')
        self._redis_host = config.get('DB-Section', 'redis-host')
        self._redis_port = config.get('DB-Section', 'redis-port')

        self.redisConnection = RedisConnection(modules_db=6, vendors_db=9)
        self.receiver = Receiver(os.environ['YANGCATALOG_CONFIG_PATH'])
        self.receiver.redisConnection = self.redisConnection
        self.receiver.confdService = MockConfdService() # pyright: ignore
        self.modulesDB = Redis(host=self._redis_host, port=self._redis_port, db=6) # pyright: ignore
        self.vendorsDB = Redis(host=self._redis_host, port=self._redis_port, db=9) # pyright:ignore
        self.huawei_dir = '{}/vendor/huawei/network-router/8.20.0/ne5000e'.format(self.yang_models)
        self.direc = '{}/receiver_test'.format(self.temp_dir)
        self.resources_path = os.path.join(os.environ['BACKEND'], 'tests/resources')
        self.private_dir = os.path.join(self.resources_path, 'html/private')

        with open(os.path.join(self.resources_path, 'receiver_tests_data.json'), 'r') as f:
            self.test_data = json.load(f)
Пример #6
0
def load_catalog_data():
    config = create_config()
    redis_host = config.get('DB-Section', 'redis-host')
    redis_port = config.get('DB-Section', 'redis-port')
    redis_cache = redis.Redis(host=redis_host,
                              port=redis_port)  # pyright: ignore
    redisConnection = RedisConnection()
    resources_path = os.path.join(os.environ['BACKEND'], 'tests/resources')
    try:
        print('Loading cache file from path {}'.format(resources_path))
        with open(os.path.join(resources_path, 'cache_data.json'),
                  'r') as file_load:
            catalog_data = json.load(file_load, object_pairs_hook=OrderedDict)
            print('Content of cache file loaded successfully.')
    except:
        print('Failed to load data from .json file')
        sys.exit(1)

    catalog = catalog_data.get('yang-catalog:catalog')
    modules = catalog['modules']['module']
    vendors = catalog['vendors']['vendor']

    for module in modules:
        if module['name'] == 'yang-catalog' and module[
                'revision'] == '2018-04-03':
            redis_cache.set('yang-catalog@2018-04-03/ietf', json.dumps(module))
            redisConnection.populate_modules([module])
            print('yang-catalog@2018-04-03 module set in Redis')
            break

    catalog_data_json = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(
        json.dumps(catalog_data))['yang-catalog:catalog']
    modules = catalog_data_json['modules']
    vendors = catalog_data_json.get('vendors', {})

    # Fill Redis db=1 with modules data
    modules_data = {
        create_module_key(module): module
        for module in modules.get('module', [])
    }
    redisConnection.set_redis_module(modules_data, 'modules-data')
    print('{} modules set in Redis.'.format(len(modules.get('module', []))))
    redisConnection.populate_implementation(vendors.get('vendor', []))
    redisConnection.reload_vendors_cache()
    print('{} vendors set in Redis.'.format(len(vendors.get('vendor', []))))
Пример #7
0
def main():
    redis_cache = redis.Redis(host='yc-redis', port=6379, db=0)
    redisConnection = RedisConnection()

    data = redis_cache.get('modules-data')
    modules_raw = (data or b'{}').decode('utf-8')
    modules = json.loads(modules_raw).get('module')

    redisConnection.populate_modules(modules)
    redisConnection.reload_modules_cache()
Пример #8
0
def main():
    redis_cache = redis.Redis(host='yc-redis', port=6379, db=0)
    redisConnection = RedisConnection()

    data = redis_cache.get('vendors-data')
    vendors_raw = (data or b'{}').decode('utf-8')
    vendors = json.loads(vendors_raw).get('vendor')

    redisConnection.populate_implementation(vendors)
    redisConnection.reload_vendors_cache()
Пример #9
0
 def __init__(self, import_name):
     self.loading = True
     super(MyFlask, self).__init__(import_name)
     self.ys_set = 'set'
     self.waiting_for_reload = False
     self.special_id = ''
     self.special_id_counter = {}
     self.release_locked = []
     self.permanent_session_lifetime = timedelta(minutes=20)
     self.load_config()
     self.logger.debug('API initialized at {}'.format(
         self.config.yangcatalog_api_prefix))
     self.logger.debug('Starting api')
     self.secret_key = self.config.s_flask_secret_key
     self.confdService = ConfdService()
     self.redisConnection = RedisConnection()
Пример #10
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    revision_updated_modules = 0
    datatracker_failures = []
    args = scriptConf.args
    log_directory = scriptConf.log_directory
    temp_dir = scriptConf.temp_dir
    is_uwsgi = scriptConf.is_uwsgi
    LOGGER = log.get_logger(
        'resolveExpiration',
        '{}/jobs/resolveExpiration.log'.format(log_directory))

    separator = ':'
    suffix = args.api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol,
                                                   args.api_ip, separator,
                                                   suffix)
    redisConnection = RedisConnection()
    LOGGER.info('Starting Cron job resolve modules expiration')
    try:
        LOGGER.info('Requesting all the modules from {}'.format(
            yangcatalog_api_prefix))
        updated = False

        response = requests.get(
            '{}search/modules'.format(yangcatalog_api_prefix))
        if response.status_code < 200 or response.status_code > 299:
            LOGGER.error('Request on path {} failed with {}'.format(
                yangcatalog_api_prefix, response.text))
        else:
            LOGGER.debug('{} modules fetched from {} successfully'.format(
                len(response.json().get('module', [])),
                yangcatalog_api_prefix))
        modules = response.json().get('module', [])
        i = 1
        for module in modules:
            LOGGER.debug('{} out of {}'.format(i, len(modules)))
            i += 1
            ret = resolve_expiration(module, LOGGER, datatracker_failures,
                                     redisConnection)
            if ret:
                revision_updated_modules += 1
            if not updated:
                updated = ret
        if updated:
            redisConnection.populate_modules(modules)
            url = ('{}load-cache'.format(yangcatalog_api_prefix))
            response = requests.post(url,
                                     None,
                                     auth=(args.credentials[0],
                                           args.credentials[1]))
            LOGGER.info('Cache loaded with status {}'.format(
                response.status_code))
    except Exception as e:
        LOGGER.exception(
            'Exception found while running resolveExpiration script')
        job_log(start_time,
                temp_dir,
                error=str(e),
                status='Fail',
                filename=os.path.basename(__file__))
        raise e
    if len(datatracker_failures) > 0:
        LOGGER.debug(
            'Following references failed to get from the datatracker:\n {}'.
            format('\n'.join(datatracker_failures)))
    messages = [{
        'label': 'Modules with changed revison',
        'message': revision_updated_modules
    }, {
        'label': 'Datatracker modules failures',
        'message': len(datatracker_failures)
    }]
    job_log(start_time,
            temp_dir,
            messages=messages,
            status='Success',
            filename=os.path.basename(__file__))
    LOGGER.info('Job finished successfully')
Пример #11
0
def resolve_expiration(module: dict, LOGGER: logging.Logger,
                       datatracker_failures: list,
                       redisConnection: RedisConnection):
    """Walks through all the modules and updates them if necessary

        Arguments:
            :param module               (dict) Module with all the metadata
            :param LOGGER               (logging.Logger) formated logger with the specified name
            :param datatracker_failures (list) list of url that failed to get data from Datatracker
            :param redisConnection      (RedisConnection) Connection used to communication with Redis
    """
    reference = module.get('reference')
    expired = 'not-applicable'
    expires = None
    if module.get('maturity-level') == 'ratified':
        expired = False
        expires = None
    if reference is not None and 'datatracker.ietf.org' in reference:
        ref = reference.split('/')[-1]
        rev = None
        if ref.isdigit():
            ref = reference.split('/')[-2]
            rev = reference.split('/')[-1]
        url = (
            'https://datatracker.ietf.org/api/v1/doc/document/?name={}&states__type=draft&states__slug__in=active,RFC&format=json'
            .format(ref))
        retry = 6
        while True:
            try:
                response = requests.get(url)
                break
            except Exception as e:
                retry -= 1
                LOGGER.warning(
                    'Failed to fetch file content of {}'.format(ref))
                time.sleep(10)
                if retry == 0:
                    LOGGER.error(
                        'Failed to fetch file content of {} for 6 times in a row - SKIPPING.'
                        .format(ref))
                    LOGGER.error(e)
                    datatracker_failures.append(url)
                    return None

        if response.status_code == 200:
            data = response.json()
            objs = data.get('objects', [])
            if len(objs) == 1:
                if rev == objs[0].get('rev'):
                    rfc = objs[0].get('rfc')
                    if rfc is None:
                        expires = objs[0]['expires']
                        expired = False
                    else:
                        expired = True
                        expires = None
                else:
                    expired = True
                    expires = None
            else:
                expired = True
                expires = None

    expired_changed = __expired_change(module.get('expired'), expired)
    expires_changed = __expires_change(module.get('expires'), expires)

    if expires_changed or expired_changed:
        yang_name_rev = '{}@{}'.format(module['name'], module['revision'])
        LOGGER.info(
            'Module {} changing expiration\nFROM: expires: {} expired: {}\nTO: expires: {} expired: {}'
            .format(yang_name_rev, module.get('expires'),
                    module.get('expired'), expires, expired))

        if expires is not None:
            module['expires'] = expires
        module['expired'] = expired

        if expires == None and module.get('expires') is not None:
            # If the 'expires' property no longer contains a value,
            # delete request need to be done to the Redis to the 'expires' property
            result = redisConnection.delete_expires(module)
            module.pop('expires', None)

            if result:
                LOGGER.info(
                    'expires property removed from {}'.format(yang_name_rev))
            else:
                LOGGER.error(
                    'Error while removing expires property from {}'.format(
                        yang_name_rev))
        return True
    else:
        return False
Пример #12
0
    logs_dir = config.get('Directory-Section', 'logs')
    temp_dir = config.get('Directory-Section', 'temp')

    LOGGER = log.get_logger('healthcheck',
                            os.path.join(logs_dir, 'healthcheck.log'))
    messages = []
    letters = string.ascii_letters
    suffix = ''.join(random.choice(letters) for i in range(6))
    check_module_name = 'confd-full-check-{}'.format(suffix)
    confdService = confdService.ConfdService()
    confdService.delete_modules()
    confdService.delete_vendors()

    LOGGER.info('Running confdFullCheck')
    try:
        redisConnection = RedisConnection()
        yang_catalog_module = redisConnection.get_module(
            'yang-catalog@2018-04-03/ietf')
        module = json.loads(yang_catalog_module)
        error = confdService.patch_modules([module])

        if error:
            LOGGER.error(
                'Error occurred while patching yang-catalog@2018-04-03/ietf module'
            )
        else:
            LOGGER.info('yang-catalog@2018-04-03/ietf patched successfully')

        # Change module name to be used only for this check - to not affect real module
        module['name'] = check_module_name
Пример #13
0
class TestRedisModulesConnectionClass(unittest.TestCase):
    def __init__(self, *args, **kwargs):
        super(TestRedisModulesConnectionClass, self).__init__(*args, **kwargs)
        config = create_config()
        self._redis_host = config.get('DB-Section', 'redis-host')
        self._redis_port = config.get('DB-Section', 'redis-port')
        self.resources_path = os.path.join(os.environ['BACKEND'],
                                           'redisConnections/tests/resources')
        self.redisConnection = RedisConnection(modules_db=6, vendors_db=9)
        self.modulesDB = Redis(host=self._redis_host,
                               port=self._redis_port,
                               db=6)  # pyright: ignore
        self.vendorsDB = Redis(host=self._redis_host,
                               port=self._redis_port,
                               db=9)  # pyright: ignore

    def setUp(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        with open('{}/[email protected]'.format(self.resources_path),
                  'r') as f:
            self.original_data = json.load(f)
        self.modulesDB.set(redis_key, json.dumps(self.original_data))
        self.modulesDB.set('modules-data',
                           json.dumps({redis_key: self.original_data}))

    def tearDown(self):
        self.modulesDB.flushdb()
        self.vendorsDB.flushdb()

    def test_get_module(self):
        name = 'ietf-bgp'
        revision = '2021-10-25'
        organization = 'ietf'
        redis_key = '{}@{}/{}'.format(name, revision, organization)

        raw_data = self.redisConnection.get_module(redis_key)
        data = json.loads(raw_data)

        self.assertNotEqual(data, '{}')
        self.assertEqual(data.get('name'), name)
        self.assertEqual(data.get('revision'), revision)
        self.assertEqual(data.get('organization'), organization)

    @mock.patch('redisConnections.redisConnection.Redis.get')
    def test_get_module_key_not_exists(self, mock_redis_get: mock.MagicMock):
        mock_redis_get.return_value = None
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        data = self.redisConnection.get_module(redis_key)

        self.assertEqual(data, '{}')

    def test_get_all_modules(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)

    @mock.patch('redisConnections.redisConnection.Redis.get')
    def test_get_all_modules_key_not_exists(self,
                                            mock_redis_get: mock.MagicMock):
        mock_redis_get.return_value = None
        data = self.redisConnection.get_all_modules()

        self.assertEqual(data, '{}')

    def test_set_redis_module(self):
        name = 'ietf-bgp'
        revision = '2021-10-25'
        organization = 'ietf'
        redis_key = '{}@{}/{}'.format(name, revision, organization)
        self.modulesDB.flushdb()

        result = self.redisConnection.set_redis_module(self.original_data,
                                                       redis_key)
        raw_data = self.redisConnection.get_module(redis_key)
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(data, '{}')
        self.assertEqual(data.get('name'), name)
        self.assertEqual(data.get('revision'), revision)
        self.assertEqual(data.get('organization'), organization)

    def test_populate_modules_empty_database(self):
        self.modulesDB.flushdb()
        name = 'ietf-bgp'
        revision = '2021-10-25'
        organization = 'ietf'
        redis_key = '{}@{}/{}'.format(name, revision, organization)
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        module = deepcopy(self.original_data)

        self.redisConnection.populate_modules([module])
        raw_data = self.redisConnection.get_module(redis_key)
        data = json.loads(raw_data)

        self.assertNotEqual(data, {})
        self.assertEqual(data.get('name'), name)
        self.assertEqual(data.get('revision'), revision)
        self.assertEqual(data.get('organization'), organization)

    def test_reload_modules_cache(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        self.modulesDB.delete('modules-data')
        result = self.redisConnection.reload_modules_cache()
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)

    def test_reload_modules_cache_empty_database(self):
        self.modulesDB.flushdb()

        result = self.redisConnection.reload_modules_cache()
        data = self.redisConnection.get_all_modules()

        self.assertTrue(result)
        self.assertEqual(data, '{}')

    def test_reload_modules_cache_changed_string_property(self):
        new_description = 'Updated description'
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        module = deepcopy(self.original_data)
        module['description'] = new_description

        self.redisConnection.populate_modules([module])
        result = self.redisConnection.reload_modules_cache()
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)
        self.assertEqual(
            data.get(redis_key).get('description'), new_description)

    def test_reload_modules_cache_changed_submodule_property(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        module = deepcopy(self.original_data)
        original_length = len(module['submodule'])

        new_submodule = {
            'name':
            'yang-catalog',
            'revision':
            '2018-04-03',
            'schema':
            'https://raw.githubusercontent.com/YangModels/yang/[email protected]'
        }

        module['submodule'].append(new_submodule)

        self.redisConnection.populate_modules([module])
        result = self.redisConnection.reload_modules_cache()
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)
        self.assertIn(new_submodule, data[redis_key]['submodule'])
        self.assertEqual(original_length + 1,
                         len(data[redis_key]['submodule']))

    def test_reload_modules_cache_changed_dependencies_property(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        module = deepcopy(self.original_data)
        original_length = len(module['dependencies'])

        new_dependency = {
            'name':
            'yang-catalog',
            'schema':
            'https://raw.githubusercontent.com/YangModels/yang/[email protected]'
        }

        module['dependencies'].append(new_dependency)

        self.redisConnection.populate_modules([module])
        result = self.redisConnection.reload_modules_cache()
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)
        self.assertIn(new_dependency, data[redis_key]['dependencies'])
        self.assertEqual(original_length + 1,
                         len(data[redis_key]['dependencies']))

    def test_reload_modules_cache_updated_scheme_in_dependents_property(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        module = deepcopy(self.original_data)
        original_length = len(module['dependents'])

        new_dependent = {
            'name':
            'ietf-bgp-sr',
            'revision':
            '2018-06-26',
            'schema':
            'https://raw.githubusercontent.com/YangModels/yang/master/experimental/ietf-extracted-YANG-modules/[email protected]'
        }

        module['dependents'][1] = new_dependent

        self.redisConnection.populate_modules([module])
        result = self.redisConnection.reload_modules_cache()
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)
        self.assertIn(new_dependent, data[redis_key]['dependents'])
        self.assertEqual(original_length, len(data[redis_key]['dependents']))

    def test_reload_modules_cache_changed_implementations_property(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        module = deepcopy(self.original_data)
        original_length = len(module['implementations']['implementation'])

        new_implementation = {
            'conformance-type': 'implement',
            'feature': ['candidate'],
            'feature-set': 'ALL',
            'os-type': 'VRP',
            'os-version': 'V800R011C10SPC810',
            'platform': 'ne5000e',
            'software-flavor': 'ALL',
            'software-version': 'V800R011C10SPC810',
            'vendor': 'huawei'
        }

        module['implementations']['implementation'].append(new_implementation)

        self.redisConnection.populate_modules([module])
        result = self.redisConnection.reload_modules_cache()
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)
        self.assertIn(new_implementation,
                      data[redis_key]['implementations']['implementation'])
        self.assertEqual(
            original_length + 1,
            len(data[redis_key]['implementations']['implementation']))

    def test_reload_modules_cache_duplicite_implementations_property(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        module = deepcopy(self.original_data)
        original_length = len(module['implementations']['implementation'])

        new_implementation = {
            'conformance-type': 'implement',
            'feature': ['candidate'],
            'feature-set': 'ALL',
            'os-type': 'VRP',
            'os-version': 'V800R013C00',
            'platform': 'ne9000',
            'software-flavor': 'ALL',
            'software-version': 'V800R013C00',
            'vendor': 'huawei'
        }

        module['implementations']['implementation'].append(new_implementation)

        self.redisConnection.populate_modules([module])
        result = self.redisConnection.reload_modules_cache()
        raw_data = self.redisConnection.get_all_modules()
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertIn(redis_key, data)
        self.assertIn(new_implementation,
                      data[redis_key]['implementations']['implementation'])
        self.assertEqual(
            original_length,
            len(data[redis_key]['implementations']['implementation']))

    def test_delete_modules(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'

        result = self.redisConnection.delete_modules([redis_key])
        data = self.redisConnection.get_module(redis_key)

        self.assertEqual(result, 1)
        self.assertEqual(data, '{}')

    def test_delete_modules_with_reload_modules_cache(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'

        delete_result = self.redisConnection.delete_modules([redis_key])
        reload_result = self.redisConnection.reload_modules_cache()
        data = self.redisConnection.get_module(redis_key)
        raw_all_modules_data = self.redisConnection.get_all_modules()
        all_modules_data = json.loads(raw_all_modules_data)

        self.assertEqual(delete_result, 1)
        self.assertTrue(reload_result)
        self.assertEqual(data, '{}')
        self.assertEqual(all_modules_data, {})
        self.assertNotIn(redis_key, all_modules_data)

    def test_delete_modules_non_existing_key(self):
        redis_key = 'random-key'

        result = self.redisConnection.delete_modules([redis_key])

        self.assertEqual(result, 0)

    def test_delete_modules_multiple_keys(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        modules_keys = [redis_key, 'modules-data']

        result = self.redisConnection.delete_modules(modules_keys)
        data = self.redisConnection.get_module(redis_key)
        all_modules_data = self.redisConnection.get_all_modules()

        self.assertEqual(result, 2)
        self.assertEqual(data, '{}')
        self.assertEqual(all_modules_data, '{}')

    def test_delete_modules_multiple_keys_with_non_existing_keys(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        modules_keys = [redis_key, 'random-key']

        result = self.redisConnection.delete_modules(modules_keys)
        data = self.redisConnection.get_module(redis_key)

        self.assertEqual(result, 1)
        self.assertEqual(data, '{}')

    def test_delete_dependent(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        dependent_to_delete = 'ietf-bgp-l3vpn'
        module = deepcopy(self.original_data)
        original_length = len(module['dependents'])

        result = self.redisConnection.delete_dependent(redis_key,
                                                       dependent_to_delete)
        raw_data = self.redisConnection.get_module(redis_key)
        data = json.loads(raw_data)

        dependents_list_names = [
            dependent.get('name') for dependent in data['dependents']
        ]

        self.assertTrue(result)
        self.assertEqual(original_length - 1, len(data['dependents']))
        self.assertNotIn(dependent_to_delete, dependents_list_names)

    def test_delete_dependent_not_existing_dependent(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'
        dependent_to_delete = 'yang-catalog'
        module = deepcopy(self.original_data)
        original_length = len(module['dependents'])

        result = self.redisConnection.delete_dependent(redis_key,
                                                       dependent_to_delete)
        raw_data = self.redisConnection.get_module(redis_key)
        data = json.loads(raw_data)

        dependents_list_names = [
            dependent.get('name') for dependent in data['dependents']
        ]

        self.assertFalse(result)
        self.assertEqual(original_length, len(data['dependents']))
        self.assertNotIn(dependent_to_delete, dependents_list_names)

    def test_delete_expires(self):
        redis_key = 'ietf-bgp@2021-10-25/ietf'

        module = deepcopy(self.original_data)
        result = self.redisConnection.delete_expires(module)
        raw_data = self.redisConnection.get_module(redis_key)
        data = json.loads(raw_data)

        self.assertTrue(result)
        self.assertNotEqual(raw_data, '{}')
        self.assertNotIn('expires', data)
Пример #14
0
class Receiver:
    def __init__(self, config_path: str):
        self._config_path = config_path
        self.load_config()
        self.channel = None
        self.connection = None
        self.confdService = ConfdService()
        self.redisConnection = RedisConnection()
        self.LOGGER.info('Receiver started')

    def copytree(self, src: str, dst: str):
        for item in os.listdir(src):
            s = os.path.join(src, item)
            d = os.path.join(dst, item)
            if os.path.isdir(s):
                copy_tree(s, d)
            else:
                shutil.copy2(s, d)

    def process(self, arguments: t.List[str]) -> t.Tuple[StatusMessage, str]:
        """Process modules. Calls populate.py script which will parse the modules
        on the given path given by "dir" param. Populate script will also send the
        request to populate ConfD/Redis running on given IP and port. It will also copy all the modules to
        parent directory of this project /api/sdo and finally also call indexing script to update searching.

        Arguments:
            :param arguments    (list) list of arguments sent from API sender
            :return (__response_type) one of the response types which is either
                'Failed' or 'Finished successfully'
        """
        sdo = '--sdo' in arguments
        api = '--api' in arguments
        i = arguments.index('--dir')
        direc = arguments[i + 1]

        script_name = 'populate'
        module = __import__('parseAndPopulate', fromlist=[script_name])
        submodule = getattr(module, script_name)
        script_conf = submodule.ScriptConfig()
        # Set populate script arguments
        script_conf.args.__setattr__('sdo', sdo)
        script_conf.args.__setattr__('api', api)
        script_conf.args.__setattr__('dir', direc)
        if self._notify_indexing:
            script_conf.args.__setattr__('notify_indexing', True)
            script_conf.args.__setattr__('force_indexing', True)

        self.LOGGER.info(
            'Runnning populate.py script with following configuration:\n{}'.
            format(script_conf.args.__dict__))
        try:
            submodule.main(scriptConf=script_conf)
        except Exception:
            self.LOGGER.exception('Problem while running populate script')
            return StatusMessage.FAIL, 'Server error while running populate script'

        return StatusMessage.SUCCESS, ''

    def process_vendor_deletion(self, arguments: t.List[str]) -> StatusMessage:
        """Deleting vendors metadata. It deletes all the module in vendor branch of the yang-catalog.yang 
        module on given path. If the module was added by vendor and it doesn't contain any other implementations 
        it will delete the whole module in modules branch of the yang-catalog.yang module. 
        It will also call indexing script to update Elasticsearch searching.

        Argument:
            :param arguments    (list) list of arguments sent from API sender
            :return (__response_type) one of the response types which
                is either 'Finished successfully' or 'In progress'
        """
        vendor, platform, software_version, software_flavor = arguments[3:7]
        # confd_suffix = arguments[-1]

        path = '{}search'.format(self._yangcatalog_api_prefix)
        redis_vendor_key = ''
        data_key = 'vendor'
        if vendor != 'None':
            path += '/vendors/vendor/{}'.format(vendor)
            redis_vendor_key += vendor
            data_key = 'yang-catalog:vendor'
        if platform != 'None':
            path += '/platforms/platform/{}'.format(platform)
            redis_vendor_key += '/{}'.format(platform)
            data_key = 'yang-catalog:platform'
        if software_version != 'None':
            path += '/software-versions/software-version/{}'.format(
                software_version)
            redis_vendor_key += '/{}'.format(software_version)
            data_key = 'yang-catalog:software-version'
        if software_flavor != 'None':
            path += '/software-flavors/software-flavor/{}'.format(
                software_flavor)
            redis_vendor_key += '/{}'.format(software_flavor)
            data_key = 'yang-catalog:software-flavor'

        redis_vendor_data = self.redisConnection.create_vendors_data_dict(
            redis_vendor_key)
        vendor_data = {data_key: redis_vendor_data}

        modules_keys = set()
        deleted_modules = []
        self.iterate_in_depth(vendor_data, modules_keys)

        # Delete implementation
        for mod_key in modules_keys:
            try:
                name, revision, organization = mod_key.split(',')
                redis_key = '{}@{}/{}'.format(name, revision, organization)
                raw_data = self.redisConnection.get_module(redis_key)
                modules_data = json.loads(raw_data)
                implementations = modules_data.get('implementations', {}).get(
                    'implementation', [])

                count_of_implementations = len(implementations)
                count_deleted = 0
                for implementation in implementations:
                    imp_key = ''
                    if vendor and vendor != implementation['vendor']:
                        continue
                    else:
                        imp_key += implementation['vendor']
                    if platform != 'None' and platform != implementation[
                            'platform']:
                        continue
                    else:
                        imp_key += ',{}'.format(implementation['platform'])
                    if software_version != 'None' and software_version != implementation[
                            'software-version']:
                        continue
                    else:
                        imp_key += ',{}'.format(
                            implementation['software-version'])
                    if software_flavor != 'None' and software_flavor != implementation[
                            'software-flavor']:
                        continue
                    else:
                        imp_key += ',{}'.format(
                            implementation['software-flavor'])

                    # Delete module implementation from Redis
                    response = self.redisConnection.delete_implementation(
                        redis_key, imp_key)
                    if response:
                        self.LOGGER.info(
                            'Implementation {} deleted from module {} successfully'
                            .format(imp_key, mod_key))
                    elif response == 0:
                        self.LOGGER.debug(
                            'Implementation {} already deleted from module {}'.
                            format(imp_key, mod_key))
                    count_deleted += 1

                if (count_deleted == count_of_implementations
                        and count_of_implementations != 0):
                    if organization == vendor:
                        deletion_problem = False

                        # Delete module from Redis
                        response = self.redisConnection.delete_modules(
                            [redis_key])
                        if response == 1:
                            self.LOGGER.info(
                                'Module {} deleted successfully'.format(
                                    redis_key))
                        elif response == 0:
                            self.LOGGER.debug(
                                'Module {} already deleted'.format(redis_key))

                        if not deletion_problem:
                            deleted_modules.append(redis_key)
            except Exception:
                self.LOGGER.exception(
                    'YANG file {} doesn\'t exist although it should exist'.
                    format(mod_key))
        raw_all_modules = self.redisConnection.get_all_modules()
        all_modules = json.loads(raw_all_modules)

        # Delete dependets
        for module_key in modules_keys:
            name, revision, organization = module_key.split(',')
            redis_key = '{}@{}/{}'.format(name, revision, organization)
            for existing_module in all_modules.values():
                if existing_module.get('dependents') is not None:
                    dependents = existing_module['dependents']
                    for dep in dependents:
                        if dep['name'] == name and dep['revision'] == revision:
                            mod_key_redis = '{}@{}/{}'.format(
                                existing_module['name'],
                                existing_module['revision'],
                                existing_module['organization'])
                            # Delete module's dependent from Redis
                            self.redisConnection.delete_dependent(
                                mod_key_redis, dep['name'])

        # Delete vendor branch from Redis
        redis_vendor_key = redis_vendor_key.replace(' ', '#')
        response = self.redisConnection.delete_vendor(redis_vendor_key)

        if self._notify_indexing:
            body_to_send = prepare_for_es_removal(self._yangcatalog_api_prefix,
                                                  deleted_modules,
                                                  self._save_file_dir,
                                                  self.LOGGER)
            if body_to_send.get('modules-to-delete'):
                send_for_es_indexing(body_to_send, self.LOGGER,
                                     self._changes_cache_path,
                                     self._delete_cache_path, self._lock_file)
        return StatusMessage.SUCCESS

    def iterate_in_depth(self, value: dict, modules_keys: t.Set[str]):
        """Iterates through the branch to get to the level with modules.

        Arguments:
            :param value            (dict) data through which we will need to iterate
            :param modules_keys     (set) set that will contain all the modules that need to be deleted
        """
        for key, val in value.items():
            if key == 'protocols':
                continue
            if isinstance(val, list):
                for v in val:
                    self.iterate_in_depth(v, modules_keys)
            if isinstance(val, dict):
                if key == 'modules':
                    for mod in val['module']:
                        mod_key = '{},{},{}'.format(mod['name'],
                                                    mod['revision'],
                                                    mod['organization'])
                        modules_keys.add(mod_key)
                else:
                    self.iterate_in_depth(val, modules_keys)

    def make_cache(self, credentials: t.List[str]) -> requests.Response:
        """After we delete or add modules we need to reload all the modules to the file
        for quicker search. This module is then loaded to the memory.

        Argument:
            :param credentials      (list) Basic authorization credentials - username and password
            :return 'work' string if everything went through fine otherwise send back the reason why
                it failed.
        """
        path = self._yangcatalog_api_prefix + 'load-cache'
        response = requests.post(path,
                                 auth=(credentials[0], credentials[1]),
                                 headers=json_headers)
        code = response.status_code

        if code != 200 and code != 201 and code != 204:
            self.LOGGER.error(
                'Could not load json to memory-cache. Error: {} {}'.format(
                    response.text, code))
        return response

    def process_module_deletion(
            self, arguments: t.List[str]) -> t.Tuple[StatusMessage, str]:
        """Deleting one or more modules. It sends the delete request to ConfD to delete module on
        given path. This will delete whole module in modules branch of the
        yang-catalog:yang module. It will also call indexing script to update searching.

        Argument:
            :param arguments    (list) list of arguments sent from API sender
            :return (__response_type) one of the response types which
                is either 'Finished successfully' or 'Partially done'
        """
        try:
            path_to_delete = arguments[3]
            modules = json.loads(path_to_delete)['modules']
            all_modules_raw = self.redisConnection.get_all_modules()
            all_modules = json.loads(all_modules_raw)
        except Exception:
            self.LOGGER.exception('Problem while processing arguments')
            return StatusMessage.FAIL, 'Server error -> Unable to parse arguments'

        def module_in(name: str, revision: str, modules: list) -> bool:
            for module in modules:
                if module['name'] == name and module.get(
                        'revision') == revision:
                    return True
            return False

        @functools.lru_cache(maxsize=None)
        def can_delete(name: str, revision: str) -> bool:
            """ Check whether module with given 'name' and 'revison' which should be removed
            is or is not depedency/submodule of some other existing module.
            If module-to-be-deleted has reference in another existing module, it cannot be deleted.
            However, it can be deleted if also referenced existing module will be deleted too.
            """
            for redis_key, existing_module in all_modules.items():
                for dep_type in ['dependencies', 'submodule']:
                    is_dep = module_in(name, revision,
                                       existing_module.get(dep_type, []))
                    if is_dep:
                        if module_in(existing_module['name'],
                                     existing_module['revision'], modules):
                            if can_delete(existing_module['name'],
                                          existing_module['revision']):
                                continue
                        else:
                            self.LOGGER.error(
                                '{}@{} module has reference in another module\'s {}: {}'
                                .format(name, revision, dep_type, redis_key))
                            return False
            return True

        modules_not_deleted = []
        modules_to_delete = []
        mod_keys_to_delete = []
        redis_keys_to_delete = []
        for module in modules:
            mod_key = '{},{},{}'.format(module['name'], module['revision'],
                                        module['organization'])
            redis_key = '{}@{}/{}'.format(module['name'], module['revision'],
                                          module['organization'])
            if can_delete(module.get('name'), module.get('revision')):
                mod_keys_to_delete.append(mod_key)
                redis_keys_to_delete.append(redis_key)
                modules_to_delete.append(module)
            else:
                modules_not_deleted.append(mod_key)

        for mod in modules_to_delete:
            for redis_key, existing_module in all_modules.items():
                if existing_module.get('dependents') is not None:
                    dependents = existing_module['dependents']
                    dependents_dict = [
                        '{}@{}'.format(m['name'], m.get('revision', ''))
                        for m in dependents
                    ]
                    searched_dependent = '{}@{}'.format(
                        mod['name'], mod.get('revision', ''))
                    if searched_dependent in dependents_dict:
                        self.redisConnection.delete_dependent(
                            redis_key, mod['name'])
        modules_to_index = []
        for mod_key, redis_key in zip(mod_keys_to_delete,
                                      redis_keys_to_delete):

            response = self.redisConnection.delete_modules([redis_key])
            if response == 1:
                self.LOGGER.info(
                    'Module {} deleted successfully'.format(redis_key))
            elif response == 0:
                self.LOGGER.debug(
                    'Module {} already deleted'.format(redis_key))
            modules_to_index.append(redis_key)

        if self._notify_indexing:
            body_to_send = prepare_for_es_removal(self._yangcatalog_api_prefix,
                                                  modules_to_index,
                                                  self._save_file_dir,
                                                  self.LOGGER)

            if len(body_to_send) > 0:
                send_for_es_indexing(body_to_send, self.LOGGER,
                                     self._changes_cache_path,
                                     self._delete_cache_path, self._lock_file)
        if len(modules_not_deleted) == 0:
            return StatusMessage.SUCCESS, ''
        else:
            reason = 'modules-not-deleted:{}'.format(
                ':'.join(modules_not_deleted))
            return StatusMessage.IN_PROGRESS, reason

    def run_ietf(self) -> t.Tuple[StatusMessage, str]:
        """
        Runs ietf and openconfig scripts that should update all the new ietf
        and openconfig modules
        :return: response success or failed
        """
        try:
            # Run draftPullLocal.py script
            script_name = 'draftPullLocal'
            module = __import__('ietfYangDraftPull', fromlist=[script_name])
            submodule = getattr(module, script_name)
            script_conf = submodule.ScriptConfig()

            self.LOGGER.info('Runnning draftPullLocal.py script')
            try:
                submodule.main(scriptConf=script_conf)
            except Exception:
                self.LOGGER.exception(
                    'Problem while running draftPullLocal script')
                return StatusMessage.FAIL, 'Server error while running draftPullLocal script'
            # Run openconfigPullLocal.py script
            script_name = 'openconfigPullLocal'
            module = __import__('ietfYangDraftPull', fromlist=[script_name])
            submodule = getattr(module, script_name)
            script_conf = submodule.ScriptConfig()

            self.LOGGER.info('Runnning openconfigPullLocal.py script')
            try:
                submodule.main(scriptConf=script_conf)
            except Exception:
                self.LOGGER.exception(
                    'Problem while running openconfigPullLocal script')
                return StatusMessage.FAIL, 'Server error while running openconfigPullLocal script'

            return StatusMessage.SUCCESS, ''
        except Exception:
            self.LOGGER.exception('Server error while running scripts')
            return StatusMessage.FAIL, ''

    def load_config(self) -> StatusMessage:
        config = create_config(self._config_path)
        self._log_directory = config.get('Directory-Section', 'logs')
        self.LOGGER = log.get_logger(
            'receiver', os.path.join(self._log_directory, 'receiver.log'))
        self.LOGGER.info('Loading config')
        logging.getLogger('pika').setLevel(logging.INFO)
        self._api_ip = config.get('Web-Section', 'ip')
        self._api_port = int(config.get('Web-Section', 'api-port'))
        self._api_protocol = config.get('General-Section', 'protocol-api')
        self._notify_indexing = config.get('General-Section', 'notify-index')
        self._save_file_dir = config.get('Directory-Section', 'save-file-dir')
        self._yang_models = config.get('Directory-Section', 'yang-models-dir')
        self._is_uwsgi = config.get('General-Section', 'uwsgi')
        self._rabbitmq_host = config.get('RabbitMQ-Section',
                                         'host',
                                         fallback='127.0.0.1')
        self._rabbitmq_port = int(
            config.get('RabbitMQ-Section', 'port', fallback='5672'))
        self._changes_cache_path = config.get('Directory-Section',
                                              'changes-cache')
        self._delete_cache_path = config.get('Directory-Section',
                                             'delete-cache')
        self._lock_file = config.get('Directory-Section', 'lock')
        rabbitmq_username = config.get('RabbitMQ-Section',
                                       'username',
                                       fallback='guest')
        rabbitmq_password = config.get('Secrets-Section',
                                       'rabbitMq-password',
                                       fallback='guest')
        self.temp_dir = config.get('Directory-Section', 'temp')
        self.json_ytree = config.get('Directory-Section', 'json-ytree')

        self._notify_indexing = self._notify_indexing == 'True'
        separator = ':'
        suffix = self._api_port
        if self._is_uwsgi == 'True':
            separator = '/'
            suffix = 'api'
        self._yangcatalog_api_prefix = '{}://{}{}{}/'.format(
            self._api_protocol, self._api_ip, separator, suffix)
        self._rabbitmq_credentials = pika.PlainCredentials(
            username=rabbitmq_username, password=rabbitmq_password)
        self.LOGGER.info('Config loaded succesfully')
        return StatusMessage.SUCCESS

    def on_request(self, channel, method, properties, body):
        process_reload_cache = multiprocessing.Process(
            target=self.on_request_thread_safe, args=(properties, body))
        process_reload_cache.start()

    def on_request_thread_safe(self, properties, body_raw: bytes):
        """Function called when something was sent from API sender. This function
        will process all the requests that would take too long to process for API.
        When the processing is done we will sent back the result of the request
        which can be either 'Failed' or 'Finished successfully' with corespondent
        correlation ID. If the request 'Failed' it will sent back also a reason why
        it failed.
                Arguments:
                    :param body: (str) String of arguments that need to be processed
                    separated by '#'.
        """
        config_reloaded = False
        status: StatusMessage
        details: str = ''

        try:
            body = body_raw.decode()
            arguments = body.split('#')
            if body == 'run_ietf':
                self.LOGGER.info('Running all ietf and openconfig modules')
                status, details = self.run_ietf()
            elif body == 'reload_config':
                status = self.load_config()
                config_reloaded = True
            elif 'run_ping' == arguments[0]:
                status = self.run_ping(arguments[1])
            elif 'run_script' == arguments[0]:
                status = self.run_script(arguments[1:])
            elif 'github' == arguments[-1]:
                self.LOGGER.info(
                    'Github automated message starting to populate')
                paths_plus = arguments[arguments.index('repoLocalDir'):]
                self.LOGGER.info('paths plus {}'.format(paths_plus))
                arguments = arguments[:arguments.index('repoLocalDir')]
                self.LOGGER.info('arguments {}'.format(arguments))
                paths = paths_plus[1:-2]
                self.LOGGER.info('paths {}'.format(paths))
                try:
                    for path in paths:
                        with open(self.temp_dir + '/log_trigger.txt',
                                  'w') as f:
                            local_dir = paths_plus[-2]
                            arguments = arguments + [
                                '--dir', local_dir + '/' + path
                            ]
                            if self._notify_indexing:
                                arguments.append('--notify-indexing')
                            subprocess.check_call(arguments, stderr=f)
                    status = StatusMessage.SUCCESS
                except subprocess.CalledProcessError as e:
                    status = StatusMessage.FAIL
                    mf = messageFactory.MessageFactory()
                    mf.send_automated_procedure_failed(
                        arguments, self.temp_dir + '/log_no_sdo_api.txt')
                    self.LOGGER.error(
                        'check log_trigger.txt Error calling process populate.py because {}\n\n with error {}'
                        .format(e.output, e.stderr))
                except Exception:
                    status = StatusMessage.FAIL
                    self.LOGGER.error(
                        'check log_trigger.txt failed to process github message with error {}'
                        .format(sys.exc_info()[0]))
            else:
                direc = ''
                if arguments[0] == 'DELETE-VENDORS':
                    status = self.process_vendor_deletion(arguments)
                    credentials = arguments[1:3]
                elif arguments[0] == 'DELETE-MODULES':
                    status, details = self.process_module_deletion(arguments)
                    credentials = arguments[1:3]
                elif arguments[0] == 'POPULATE-MODULES':
                    status, details = self.process(arguments)
                    i = arguments.index('--credentials')
                    credentials = arguments[i + 1:i + 3]
                    i = arguments.index('--dir')
                    direc = arguments[i + 1]
                    shutil.rmtree(direc)
                elif arguments[0] == 'POPULATE-VENDORS':
                    status, details = self.process(arguments)
                    i = arguments.index('--credentials')
                    credentials = arguments[i + 1:i + 3]
                    i = arguments.index('--dir')
                    direc = arguments[i + 1]
                    shutil.rmtree(direc)
                else:
                    assert False, 'Invalid request type'

                if status == StatusMessage.SUCCESS:
                    response = self.make_cache(credentials)
                    code = response.status_code
                    if code != 200 and code != 201 and code != 204:
                        status = StatusMessage.FAIL
                        details = 'Server error-> could not reload cache'
        except Exception:
            status = StatusMessage.FAIL
            self.LOGGER.exception('receiver.py failed')
        final_response = status.value if not details else '{}#split#{}'.format(
            status.value, details)
        self.LOGGER.info(
            'Receiver is done with id - {} and message = {}'.format(
                properties.correlation_id, final_response))

        f = open('{}/correlation_ids'.format(self.temp_dir), 'r')
        lines = f.readlines()
        f.close()
        with open('{}/correlation_ids'.format(self.temp_dir), 'w') as f:
            for line in lines:
                if properties.correlation_id in line:
                    new_line = '{} -- {} - {}\n'.format(
                        datetime.now().ctime(), properties.correlation_id,
                        str(final_response))
                    f.write(new_line)
                else:
                    f.write(line)
        if config_reloaded:
            assert self.channel, 'Should only be called from self.channel.start_consuming()'
            self.channel.stop_consuming()

    def start_receiving(self):
        while True:
            try:
                self.connection = pika.BlockingConnection(
                    pika.ConnectionParameters(
                        host=self._rabbitmq_host,
                        port=self._rabbitmq_port,
                        heartbeat=10,
                        credentials=self._rabbitmq_credentials))
                self.channel = self.connection.channel()
                self.channel.queue_declare(queue='module_queue')

                self.channel.basic_qos(prefetch_count=1)
                self.channel.basic_consume('module_queue',
                                           self.on_request,
                                           auto_ack=True)

                self.LOGGER.info('Awaiting RPC request')

                self.channel.start_consuming()
            except Exception as e:
                self.LOGGER.exception('Exception: {}'.format(str(e)))
            else:
                self.LOGGER.info('Restarting connection after config reload')
            finally:
                time.sleep(10)
                try:
                    if self.channel:
                        self.channel.stop_consuming()
                except Exception:
                    pass
                try:
                    if self.connection:
                        self.connection.close()
                except Exception:
                    pass

    def run_script(self, arguments: t.List[str]) -> StatusMessage:
        module_name = arguments[0]
        script_name = arguments[1]
        body_input = json.loads(arguments[2])
        try:
            # Load submodule and its config
            module = __import__(module_name, fromlist=[script_name])
            submodule = getattr(module, script_name)
            script_conf = submodule.ScriptConfig()
            script_args_list = script_conf.get_args_list()

            for key in body_input:
                if (key != 'credentials' and
                        body_input[key] != script_args_list[key]['default']):
                    script_conf.args.__setattr__(key, body_input[key])

            self.LOGGER.info(
                'Runnning {}.py script with following configuration:\n{}'.
                format(script_name, script_conf.args.__dict__))
            submodule.main(scriptConf=script_conf)
            return StatusMessage.SUCCESS
        except Exception:
            self.LOGGER.exception(
                'Server error while running {} script'.format(script_name))
            return StatusMessage.FAIL

    def run_ping(self, message: str) -> StatusMessage:
        if message == 'ping':
            return StatusMessage.SUCCESS
        else:
            return StatusMessage.FAIL
Пример #15
0
def main(scriptConf=None):
    start_time = int(time.time())
    if scriptConf is None:
        scriptConf = ScriptConfig()
    args = scriptConf.args
    cache_directory = scriptConf.cache_directory
    log_directory = scriptConf.log_directory
    temp_dir = scriptConf.temp_dir
    var_yang = scriptConf.var_yang
    confdService = ConfdService()

    confd_backups = os.path.join(cache_directory, 'confd')
    redis_backups = os.path.join(cache_directory, 'redis')
    redis_json_backup = os.path.join(cache_directory, 'redis-json')

    LOGGER = log.get_logger('recovery', os.path.join(log_directory, 'yang.log'))
    LOGGER.info('Starting {} process of Redis database'.format(args.type))

    if 'save' == args.type:
        # Redis dump.rdb file backup
        redis_backup_file = '{}/redis/dump.rdb'.format(var_yang)
        if not os.path.exists(redis_backups):
            os.mkdir(redis_backups)
        if os.path.exists(redis_backup_file):
            redis_copy_file = os.path.join(redis_backups, '{}.rdb.gz'.format(args.name_save))
            with gzip.open(redis_copy_file, 'w') as save_file:
                with open(redis_backup_file, 'rb') as original:
                    save_file.write(original.read())
            LOGGER.info('Backup of Redis dump.rdb file created')
        else:
            LOGGER.warning('Redis dump.rdb file does not exists')

        # Backup content of Redis into JSON file
        redisConnection = RedisConnection()
        redis_modules_raw = redisConnection.get_all_modules()
        redis_vendors_raw = redisConnection.get_all_vendors()
        redis_modules_dict = json.loads(redis_modules_raw)
        redis_modules = [i for i in redis_modules_dict.values()]
        redis_vendors = json.loads(redis_vendors_raw)

        if not os.path.exists(redis_json_backup):
            os.mkdir(redis_json_backup)
        with open(os.path.join(redis_json_backup, 'backup.json'), 'w') as f:
            data = {
                'yang-catalog:catalog': {
                    'modules': redis_modules,
                    'vendors': redis_vendors
                }
            }
            json.dump(data, f)

        num_of_modules = len(redis_modules)
        num_of_vendors = len(redis_vendors.get('vendor', []))
        messages = [
            {'label': 'Saved modules', 'message': num_of_modules},
            {'label': 'Saved vendors', 'message': num_of_vendors}
        ]
        LOGGER.info('Save completed successfully')
        filename = '{} - save'.format(os.path.basename(__file__).split('.py')[0])
        job_log(start_time, temp_dir, messages=messages, status='Success', filename=filename)
    else:
        file_name = ''
        if args.name_load:
            file_name = os.path.join(confd_backups, args.name_load)
        else:
            list_of_backups = get_list_of_backups(confd_backups)
            file_name = os.path.join(confd_backups, list_of_backups[-1])

        redisConnection = RedisConnection()
        redis_modules = redisConnection.get_all_modules()
        yang_catalog_module = redisConnection.get_module('yang-catalog@2018-04-03/ietf')

        if '{}' in (redis_modules, yang_catalog_module):
            # RDB not exists - load from JSON
            backup_path = os.path.join(redis_json_backup, 'backup.json')
            modules = []
            vendors = []
            if os.path.exists(backup_path):
                with open(backup_path, 'r') as file_load:
                    catalog_data = json.load(file_load)
                    modules = catalog_data.get('yang-catalog:catalog', {}).get('modules', {}).get('module', [])
                    vendors = catalog_data.get('yang-catalog:catalog', {}).get('vendors', {}).get('vendor', [])
            else:
                if file_name.endswith('.gz'):
                    with gzip.open(file_name, 'r') as file_load:
                        LOGGER.info('Loading file {}'.format(file_load.name))
                        catalog_data = json.loads(file_load.read().decode())
                        modules = catalog_data.get('yang-catalog:catalog', {}).get('modules', {}).get('module', [])
                        vendors = catalog_data.get('yang-catalog:catalog', {}).get('vendors', {}).get('vendor', [])
                elif file_name.endswith('.json'):
                    with open(file_name, 'r') as file_load:
                        LOGGER.info('Loading file {}'.format(file_load.name))
                        catalog_data = json.load(file_load)
                        modules = catalog_data.get('yang-catalog:catalog', {}).get('modules', {}).get('module', [])
                        vendors = catalog_data.get('yang-catalog:catalog', {}).get('vendors', {}).get('vendor', [])
                else:
                    print('unable to load modules - ending')

            redisConnection.populate_modules(modules)
            redisConnection.populate_implementation(vendors)
            redisConnection.reload_modules_cache()
            redisConnection.reload_vendors_cache()
            LOGGER.info('All the modules data set to Redis successfully')

        tries = 4
        try:
            response = confdService.head_confd()
            LOGGER.info('Status code for HEAD request {} '.format(response.status_code))
            if response.status_code == 200:
                yang_catalog_module = redisConnection.get_module('yang-catalog@2018-04-03/ietf')
                error = feed_confd_modules([json.loads(yang_catalog_module)], confdService)
                if error:
                    LOGGER.error('Error occurred while patching yang-catalog@2018-04-03/ietf module')
                else:
                    LOGGER.info('yang-catalog@2018-04-03/ietf patched successfully')
        except ConnectionError:
            if tries == 0:
                LOGGER.exception('Unable to connect to ConfD for over 5 minutes')
            tries -= 1
            sleep(60)

    LOGGER.info('Job finished successfully')
Пример #16
0
def main(scriptConf=None):
    if scriptConf is None:
        scriptConf = ScriptConfig()
    args = scriptConf.args
    log_directory = scriptConf.log_directory
    is_uwsgi = scriptConf.is_uwsgi
    yang_models = scriptConf.yang_models
    temp_dir = scriptConf.temp_dir
    cache_dir = scriptConf.cache_dir
    json_ytree = scriptConf.json_ytree
    global LOGGER
    LOGGER = log.get_logger('populate', '{}/parseAndPopulate.log'.format(log_directory))

    separator = ':'
    suffix = args.api_port
    if is_uwsgi == 'True':
        separator = '/'
        suffix = 'api'
    yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol, args.api_ip, separator, suffix)
    confdService = ConfdService()
    redisConnection = RedisConnection()
    LOGGER.info('Starting the populate script')
    start = time.time()
    if args.api:
        json_dir = args.dir
    else:
        json_dir = create_dir_name(temp_dir)
        os.makedirs(json_dir)
    LOGGER.info('Calling runCapabilities script')
    try:
        runCapabilities = import_module('parseAndPopulate.runCapabilities')
        script_conf = configure_runCapabilities(runCapabilities, args, json_dir)
        runCapabilities.main(scriptConf=script_conf)
    except Exception as e:
        LOGGER.exception('runCapabilities error:\n{}'.format(e))
        raise e

    body_to_send = {}
    if args.notify_indexing:
        LOGGER.info('Sending files for indexing')
        body_to_send = prepare_for_es_indexing(yangcatalog_api_prefix, os.path.join(json_dir, 'prepare.json'),
                                               LOGGER, args.save_file_dir, force_indexing=args.force_indexing)

    LOGGER.info('Populating yang catalog with data. Starting to add modules')
    with open(os.path.join(json_dir, 'prepare.json')) as data_file:
        data = data_file.read()
    modules = json.loads(data).get('module', [])
    errors = confdService.patch_modules(modules)
    redisConnection.populate_modules(modules)

    # In each json
    if os.path.exists(os.path.join(json_dir, 'normal.json')):
        LOGGER.info('Starting to add vendors')
        with open(os.path.join(json_dir, 'normal.json')) as data:
            try:
                vendors = json.loads(data.read())['vendors']['vendor']
            except KeyError as e:
                LOGGER.error('No files were parsed. This probably means the directory is missing capability xml files')
                raise e
        errors = errors or confdService.patch_vendors(vendors)
        redisConnection.populate_implementation(vendors)
    if body_to_send:
        LOGGER.info('Sending files for indexing')
        send_for_es_indexing(body_to_send, LOGGER, scriptConf.changes_cache_path, scriptConf.delete_cache_path,
                             scriptConf.lock_file)
    if modules:
        process_reload_cache = multiprocessing.Process(target=reload_cache_in_parallel,
                                                       args=(args.credentials, yangcatalog_api_prefix,))
        process_reload_cache.start()
        LOGGER.info('Running ModulesComplicatedAlgorithms from populate.py script')
        recursion_limit = sys.getrecursionlimit()
        sys.setrecursionlimit(50000)
        complicatedAlgorithms = ModulesComplicatedAlgorithms(log_directory, yangcatalog_api_prefix,
                                                             args.credentials, args.save_file_dir, json_dir, None,
                                                             yang_models, temp_dir, json_ytree)
        complicatedAlgorithms.parse_non_requests()
        LOGGER.info('Waiting for cache reload to finish')
        process_reload_cache.join()
        complicatedAlgorithms.parse_requests()
        sys.setrecursionlimit(recursion_limit)
        LOGGER.info('Populating with new data of complicated algorithms')
        complicatedAlgorithms.populate()
        end = time.time()
        LOGGER.info('Populate took {} seconds with the main and complicated algorithm'.format(int(end - start)))

        # Keep new hashes only if the ConfD was patched successfully
        if not errors:
            path = os.path.join(json_dir, 'temp_hashes.json')
            fileHasher = FileHasher('backend_files_modification_hashes', cache_dir, not args.force_parsing, log_directory)
            updated_hashes = fileHasher.load_hashed_files_list(path)
            if updated_hashes:
                fileHasher.merge_and_dump_hashed_files_list(updated_hashes)

    LOGGER.info('Populate script finished successfully')