def __init__(self): help = 'Resolve the tree-type for modules that are no longer the latest revision. ' \ 'Runs as a daily cronjob.' config = create_config() super().__init__(help, None, []) self.api_protocol = config.get('General-Section', 'protocol-api', fallback='http') self.ip = config.get('Web-Section', 'ip', fallback='localhost') self.api_port = int( config.get('Web-Section', 'api-port', fallback=5000)) self.is_uwsgi = config.get('General-Section', 'uwsgi', fallback=True) self.temp_dir = config.get('Directory-Section', 'temp', fallback='/var/yang/tmp') self.log_directory = config.get('Directory-Section', 'logs', fallback='/var/yang/logs') self.save_file_dir = config.get('Directory-Section', 'save-file-dir', fallback='/var/yang/all_modules') self.yang_models = config.get( 'Directory-Section', 'yang-models-dir', fallback='/var/yang/nonietf/yangmodels/yang') self.credentials = config.get( 'Secrets-Section', 'confd-credentials').strip('"').split(' ') self.json_ytree = config.get('Directory-Section', 'json-ytree', fallback='/var/yang/ytrees')
def setUp(self): repourl = 'https://github.com/yang-catalog/test' self.repo_owner = 'yang-catalog' logger = logging.getLogger(__name__) f_handler = logging.FileHandler('test_repoutil.log') f_handler.setLevel(logging.ERROR) f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') f_handler.setFormatter(f_format) logger.addHandler(f_handler) self.myname = 'yang-catalog' self.myemail = '*****@*****.**' self.repo = ru.ModifiableRepoUtil( repourl, clone_options={ 'config_username': self.myname, 'config_user_email': self.myemail } ) self.repo.logger = logger if os.environ.get('GITHUB_ACTIONS'): self.token = os.environ['TOKEN'] else: self.token = create_config().get('Secrets-Section', 'yang-catalog-token')
def __init__(self): help = 'Parse modules on given directory and generate json with module metadata that can be populated' \ ' to confd directory' config = create_config() super().__init__(help, None, []) self.api_protocol = config.get('General-Section', 'protocol-api', fallback='http') self.ip = config.get('Web-Section', 'ip', fallback='localhost') self.api_port = int( config.get('Web-Section', 'api-port', fallback=5000)) self.is_uwsgi = config.get('General-Section', 'uwsgi', fallback=True) self.temp_dir = config.get('Directory-Section', 'temp', fallback='/var/yang/tmp') self.log_directory = config.get('Directory-Section', 'logs', fallback='/var/yang/logs') self.save_file_dir = config.get('Directory-Section', 'save-file-dir', fallback='/var/yang/all_modules') self.yang_models = config.get( 'Directory-Section', 'yang-models-dir', fallback='/var/yang/nonietf/yangmodels/yang') self.credentials = config.get( 'Secrets-Section', 'confd-credentials', fallback='test test').strip('"').split(' ') self.json_ytree = config.get('Directory-Section', 'json-ytree', fallback='/var/yang/ytrees')
def __init__(self): config = create_config() help = '' args: t.List[Arg] = [ { 'flag': '--sdo', 'help': 'If we are processing sdo or vendor yang modules', 'action': 'store_true', 'default': False }, { 'flag': '--dir', 'help': 'Set directory where to look for hello message xml files', 'type': str, 'default': '/var/yang/nonietf/yangmodels/yang/standard/ietf/RFC' }, { 'flag': '--output', 'help': 'Output json file', 'type': str, 'default': 'integrity.json' } ] self.yang_models = config.get('Directory-Section', 'yang-models-dir') super().__init__(help, args, None if __name__ == '__main__' else [])
def __init__(self, *args, **kwargs): super(TestReceiverBaseClass, self).__init__(*args, **kwargs) config = create_config() self.log_directory = config.get('Directory-Section', 'logs') self.temp_dir = config.get('Directory-Section', 'temp') self.credentials = config.get('Secrets-Section', 'confd-credentials').strip('"').split(' ') self.nonietf_dir = config.get('Directory-Section', 'non-ietf-directory') self.yang_models = config.get('Directory-Section', 'yang-models-dir') self._redis_host = config.get('DB-Section', 'redis-host') self._redis_port = config.get('DB-Section', 'redis-port') self.redisConnection = RedisConnection(modules_db=6, vendors_db=9) self.receiver = Receiver(os.environ['YANGCATALOG_CONFIG_PATH']) self.receiver.redisConnection = self.redisConnection self.receiver.confdService = MockConfdService() # pyright: ignore self.modulesDB = Redis(host=self._redis_host, port=self._redis_port, db=6) # pyright: ignore self.vendorsDB = Redis(host=self._redis_host, port=self._redis_port, db=9) # pyright:ignore self.huawei_dir = '{}/vendor/huawei/network-router/8.20.0/ne5000e'.format(self.yang_models) self.direc = '{}/receiver_test'.format(self.temp_dir) self.resources_path = os.path.join(os.environ['BACKEND'], 'tests/resources') self.private_dir = os.path.join(self.resources_path, 'html/private') with open(os.path.join(self.resources_path, 'receiver_tests_data.json'), 'r') as f: self.test_data = json.load(f)
def __init__(self): config = create_config() self.__confd_ip = config.get('Web-Section', 'confd-ip') self.__confd_port = config.get('Web-Section', 'confd-port') self.__confd_protocol = config.get('General-Section', 'protocol-confd') self.credentials = config.get('Secrets-Section', 'confd-credentials').strip('"').split(' ') self.log_directory = config.get('Directory-Section', 'logs') self.LOGGER = log.get_logger('confdService', '{}/confdService.log'.format(self.log_directory)) self.confd_prefix = '{}://{}:{}'.format(self.__confd_protocol, self.__confd_ip, self.__confd_port)
def __init__(self) -> None: config = create_config() es_aws = config.get('DB-Section', 'es-aws') elk_credentials = config.get('Secrets-Section', 'elk-secret').strip('"').split(' ') self.elk_repo_name = config.get('General-Section', 'elk-repo-name') es_host_config = { 'host': config.get('DB-Section', 'es-host', fallback='localhost'), 'port': config.get('DB-Section', 'es-port', fallback='9200') } if es_aws == 'True': self.es = Elasticsearch(hosts=[es_host_config], http_auth=(elk_credentials[0], elk_credentials[1]), scheme='https') else: self.es = Elasticsearch(hosts=[es_host_config])
def __init__(self, db: t.Optional[t.Union[int, str]] = None): config = create_config() self._redis_host = config.get('DB-Section', 'redis-host') self._redis_port = config.get('DB-Section', 'redis-port') if db is None: db = config.get('DB-Section', 'redis-users-db', fallback=2) self.redis = Redis(host=self._redis_host, port=self._redis_port, db=db) # pyright: ignore self.log_directory = config.get('Directory-Section', 'logs') self.LOGGER = log.get_logger( 'redisUsersConnection', '{}/redisUsersConnection.log'.format(self.log_directory))
def __init__(self, *args, **kwargs): super(TestRedisModulesConnectionClass, self).__init__(*args, **kwargs) config = create_config() self._redis_host = config.get('DB-Section', 'redis-host') self._redis_port = config.get('DB-Section', 'redis-port') self.resources_path = os.path.join(os.environ['BACKEND'], 'redisConnections/tests/resources') self.redisConnection = RedisConnection(modules_db=6, vendors_db=9) self.modulesDB = Redis(host=self._redis_host, port=self._redis_port, db=6) # pyright: ignore self.vendorsDB = Redis(host=self._redis_host, port=self._redis_port, db=9) # pyright: ignore
def __init__(self, modules_db: t.Optional[t.Union[int, str]] = None, vendors_db: t.Optional[t.Union[int, str]] = None): config = create_config() self.log_directory = config.get('Directory-Section', 'logs') self._redis_host = config.get('DB-Section', 'redis-host') self._redis_port = config.get('DB-Section', 'redis-port') if modules_db is None: modules_db = config.get('DB-Section', 'redis-modules-db', fallback=1) if vendors_db is None: vendors_db = config.get('DB-Section', 'redis-vendors-db', fallback=4) self.modulesDB = Redis(host=self._redis_host, port=self._redis_port, db=modules_db) # pyright: ignore self.vendorsDB = Redis(host=self._redis_host, port=self._redis_port, db=vendors_db) # pyright: ignore self.LOGGER = log.get_logger('redisModules', os.path.join(self.log_directory, 'redisModulesConnection.log'))
def __init__(self, path: t.Optional[str]): if path is not None: config = create_config() var_path = config.get('Directory-Section', 'var') self.msg = 'Failed to parse module on path {}'.format(path) try: with open('{}/unparsable-modules.json'.format(var_path), 'r') as f: modules = json.load(f) except: modules = [] module = path.split('/')[-1] if module not in modules: modules.append(module) with open('{}/unparsable-modules.json'.format(var_path), 'w') as f: json.dump(modules, f)
def __init__(self): config = create_config() api_protocol = config.get('General-Section', 'protocol-api') api_port = config.get('Web-Section', 'api-port') api_host = config.get('Web-Section', 'ip') save_file_dir = config.get('Directory-Section', 'save-file-dir') temp = config.get('Directory-Section', 'temp') help = 'This serves to save or load all information in yangcatalog.org in elk.' \ 'in case the server will go down and we would lose all the information we' \ ' have got. We have two options in here.' args: t.List[Arg] = [ { 'flag': '--api-ip', 'help':'Set host where the API is started. Default: {}'.format(api_host), 'type': str, 'default': api_host }, { 'flag': '--api-port', 'help': 'Set port where the API is started. Default: {}'.format(api_port), 'type': int, 'default': api_port }, { 'flag': '--api-protocol', 'help': 'Whether API runs on http or https. Default: {}'.format(api_protocol), 'type': str, 'default': api_protocol }, { 'flag': '--save-file-dir', 'help': 'Directory for all yang modules lookup. Default: {}'.format(save_file_dir), 'type': str, 'default': save_file_dir }, { 'flag': '--temp', 'help': 'Path to yangcatalog temporary directory. Default: {}'.format(temp), 'type': str, 'default': temp } ] super().__init__(help, args, None if __name__ == '__main__' else []) self.is_uwsgi = config.get('General-Section', 'uwsgi')
def load_config(self) -> StatusMessage: config = create_config(self._config_path) self._log_directory = config.get('Directory-Section', 'logs') self.LOGGER = log.get_logger( 'receiver', os.path.join(self._log_directory, 'receiver.log')) self.LOGGER.info('Loading config') logging.getLogger('pika').setLevel(logging.INFO) self._api_ip = config.get('Web-Section', 'ip') self._api_port = int(config.get('Web-Section', 'api-port')) self._api_protocol = config.get('General-Section', 'protocol-api') self._notify_indexing = config.get('General-Section', 'notify-index') self._save_file_dir = config.get('Directory-Section', 'save-file-dir') self._yang_models = config.get('Directory-Section', 'yang-models-dir') self._is_uwsgi = config.get('General-Section', 'uwsgi') self._rabbitmq_host = config.get('RabbitMQ-Section', 'host', fallback='127.0.0.1') self._rabbitmq_port = int( config.get('RabbitMQ-Section', 'port', fallback='5672')) self._changes_cache_path = config.get('Directory-Section', 'changes-cache') self._delete_cache_path = config.get('Directory-Section', 'delete-cache') self._lock_file = config.get('Directory-Section', 'lock') rabbitmq_username = config.get('RabbitMQ-Section', 'username', fallback='guest') rabbitmq_password = config.get('Secrets-Section', 'rabbitMq-password', fallback='guest') self.temp_dir = config.get('Directory-Section', 'temp') self.json_ytree = config.get('Directory-Section', 'json-ytree') self._notify_indexing = self._notify_indexing == 'True' separator = ':' suffix = self._api_port if self._is_uwsgi == 'True': separator = '/' suffix = 'api' self._yangcatalog_api_prefix = '{}://{}{}{}/'.format( self._api_protocol, self._api_ip, separator, suffix) self._rabbitmq_credentials = pika.PlainCredentials( username=rabbitmq_username, password=rabbitmq_password) self.LOGGER.info('Config loaded succesfully') return StatusMessage.SUCCESS
def load_catalog_data(): config = create_config() redis_host = config.get('DB-Section', 'redis-host') redis_port = config.get('DB-Section', 'redis-port') redis_cache = redis.Redis(host=redis_host, port=redis_port) # pyright: ignore redisConnection = RedisConnection() resources_path = os.path.join(os.environ['BACKEND'], 'tests/resources') try: print('Loading cache file from path {}'.format(resources_path)) with open(os.path.join(resources_path, 'cache_data.json'), 'r') as file_load: catalog_data = json.load(file_load, object_pairs_hook=OrderedDict) print('Content of cache file loaded successfully.') except: print('Failed to load data from .json file') sys.exit(1) catalog = catalog_data.get('yang-catalog:catalog') modules = catalog['modules']['module'] vendors = catalog['vendors']['vendor'] for module in modules: if module['name'] == 'yang-catalog' and module[ 'revision'] == '2018-04-03': redis_cache.set('yang-catalog@2018-04-03/ietf', json.dumps(module)) redisConnection.populate_modules([module]) print('yang-catalog@2018-04-03 module set in Redis') break catalog_data_json = json.JSONDecoder(object_pairs_hook=OrderedDict).decode( json.dumps(catalog_data))['yang-catalog:catalog'] modules = catalog_data_json['modules'] vendors = catalog_data_json.get('vendors', {}) # Fill Redis db=1 with modules data modules_data = { create_module_key(module): module for module in modules.get('module', []) } redisConnection.set_redis_module(modules_data, 'modules-data') print('{} modules set in Redis.'.format(len(modules.get('module', [])))) redisConnection.populate_implementation(vendors.get('vendor', [])) redisConnection.reload_vendors_cache() print('{} vendors set in Redis.'.format(len(vendors.get('vendor', []))))
def __init__(self, config_path=os.environ['YANGCATALOG_CONFIG_PATH']): """Setup Webex teams rooms and smtp Arguments: :param config_path: (str) path to a yangcatalog.conf file """ def list_matching_rooms(a, title_match): return [r for r in a.rooms.list() if title_match in r.title] config = create_config(config_path) log_directory = config.get('Directory-Section', 'logs') token = config.get('Secrets-Section', 'webex-access-token') self.__email_from = config.get('Message-Section', 'email-from') self.__is_production = config.get('General-Section', 'is-prod') self.__is_production = self.__is_production == 'True' self.__email_to = config.get('Message-Section', 'email-to').split() self.__developers_email = config.get('Message-Section', 'developers-email').split() self._temp_dir = config.get('Directory-Section', 'temp') self.__me = config.get('Web-Section', 'my-uri') self.__api = CiscoSparkAPI(access_token=token) rooms = list_matching_rooms(self.__api, 'YANG Catalog admin') self.__me = self.__me.split('/')[-1] self._message_log_file = os.path.join(self._temp_dir, 'message-log.txt') self.LOGGER = log.get_logger(__name__, os.path.join(log_directory, 'yang.log')) self.LOGGER.info('Initialising Message Factory') if len(rooms) == 0: self.LOGGER.error('Need at least one room') sys.exit(1) if len(rooms) != 1: self.LOGGER.error('Too many rooms! Refine the name:') for r in rooms: self.LOGGER.info('{}'.format(r.title)) sys.exit(1) # Ok, we should have just one room if we get here self.__room = rooms[0] self.__smtp = smtplib.SMTP('localhost')
def __init__(self, name: str, path: str, jsons: LoadFiles, dir_paths: DirPaths, git_commit_hash: str, yang_modules: dict, schema_base: str, aditional_info: t.Optional[t.Dict[str, str]], submodule_name: t.Optional[str]): """ Initialize and parse everything out of a module. Arguments: :param name: (str) name of the module (not parsed out of the module) :param path: (str) path to yang file being parsed :param jsons: (obj) LoadFiles class containing all the json and html files with parsed results :param dir_paths: (dict) paths to various needed directories according to configuration :param git_commit_hash: (str) name of the git commit hash where we can find the module :param yang_modules: (dict) yang modules we've already parsed :param schema_base: (str) url to a raw module on github up to and not including the path of the file in the repo :param aditional_info: (dict) some aditional information about module given from client :param submodule_name: (str) name of the git submodule the yang module belongs to """ global LOGGER LOGGER = log.get_logger( 'modules', '{}/parseAndPopulate.log'.format(dir_paths['log'])) config = create_config() self._web_uri = config.get('Web-Section', 'my-uri', fallback='https://yangcatalog.org') self.html_result_dir = dir_paths['result'] self._jsons = jsons self._path = path self.yang_models = dir_paths['yang_models'] self._parsed_yang = yangParser.parse(self._path) self.implementations: t.List[Implementation] = [] self._parse_all(name, git_commit_hash, yang_modules, schema_base, dir_paths['save'], aditional_info, submodule_name) del self._jsons
def __init__(self): help = 'This serves to save or load all the data in yangcatalog.org to JSON file in' \ ' case the server will go down and we would lose all the data we' \ ' have got. We have two options in here. Saving create backup of Redis .rdb file and also dumps' \ ' data from Redis into JSON file. Load will first load data to Redis either from saved JSON file or' \ ' from snapshot of Redis. Then it will make PATCH request to write yang-catalog@2018-04-03 module' \ ' to the ConfD. This runs as a daily cronjob to save latest state of Redis database.' config = create_config() args: t.List[Arg] = [ { 'flag': '--name_save', 'help': 'Set name of the file to save. Default name is date and time in UTC', 'type': str, 'default': datetime.datetime.utcnow().strftime(backup_date_format) }, { 'flag': '--name_load', 'help': 'Set name of the file to load. Default will take a last saved file', 'type': str, 'default': '' }, { 'flag': '--type', 'help': 'Set whether you want to save a file or load a file. Default is save', 'type': str, 'choices': ['save', 'load'], 'default': 'save' } ] super().__init__(help, args, None if __name__ == '__main__' else []) self.log_directory = config.get('Directory-Section', 'logs') self.temp_dir = config.get('Directory-Section', 'temp') self.cache_directory = config.get('Directory-Section', 'cache') self.redis_host = config.get('DB-Section', 'redis-host') self.redis_port = config.get('DB-Section', 'redis-port') self.var_yang = config.get('Directory-Section', 'var')
def __init__(self): config = create_config() api_protocol = config.get('General-Section', 'protocol-api', fallback='http') api_port = config.get('Web-Section', 'api-port', fallback=5000) api_host = config.get('Web-Section', 'ip', fallback='localhost') credentials = config.get('Secrets-Section', 'confd-credentials', fallback='user password').strip('"').split() help = 'Resolve expiration metadata for each module and set it to Redis if changed. This runs as a daily' \ ' cronjob' args: t.List[Arg] = [{ 'flag': '--credentials', 'help': 'Set authorization parameters username password respectively. Default parameters are {}' .format(str(credentials)), 'nargs': 2, 'default': credentials, 'type': str }, { 'flag': '--api-ip', 'help': 'Set host address where the API is started. Default: {}'.format( api_host), 'default': api_host, 'type': str }, { 'flag': '--api-port', 'help': 'Set port where the API is started. Default: {}'.format(api_port), 'type': int, 'default': api_port }, { 'flag': '--api-protocol', 'help': 'Whether API runs on http or https. Default: {}'.format( api_protocol), 'type': str, 'default': api_protocol }] super().__init__(help, args, None if __name__ == '__main__' else []) self.log_directory = config.get('Directory-Section', 'logs', fallback='/var/yang/logs') self.temp_dir = config.get('Directory-Section', 'temp', fallback='/var/yang/tmp') self.is_uwsgi = config.get('General-Section', 'uwsgi', fallback='True')
import json import os import random import string import time from redisConnections.redisConnection import RedisConnection import utility.log as log from utility import confdService from utility.create_config import create_config from utility.util import job_log if __name__ == '__main__': start_time = int(time.time()) config = create_config() credentials = config.get('Secrets-Section', 'confd-credentials').strip('"').split(' ') logs_dir = config.get('Directory-Section', 'logs') temp_dir = config.get('Directory-Section', 'temp') LOGGER = log.get_logger('healthcheck', os.path.join(logs_dir, 'healthcheck.log')) messages = [] letters = string.ascii_letters suffix = ''.join(random.choice(letters) for i in range(6)) check_module_name = 'confd-full-check-{}'.format(suffix) confdService = confdService.ConfdService() confdService.delete_modules() confdService.delete_vendors()
def main(scriptConf: t.Optional[ScriptConfig] = None): start_time = int(time.time()) if scriptConf is None: scriptConf = ScriptConfig() args = scriptConf.args config_path = args.config_path config = create_config(config_path) protocol = config.get('General-Section', 'protocol-api') api_ip = config.get('Web-Section', 'ip') api_port = config.get('Web-Section', 'api-port') config_name = config.get('General-Section', 'repo-config-name') config_email = config.get('General-Section', 'repo-config-email') move_to = '{}/.'.format(config.get('Web-Section', 'public-directory')) is_uwsgi = config.get('General-Section', 'uwsgi') yang_models = config.get('Directory-Section', 'yang-models-dir') log_directory = config.get('Directory-Section', 'logs') temp_dir = config.get('Directory-Section', 'temp') private_dir = config.get('Web-Section', 'private-directory') global LOGGER LOGGER = log.get_logger('statistics', '{}/statistics/yang.log'.format(log_directory)) if is_uwsgi == 'True': separator = '/' suffix = 'api' else: separator = ':' suffix = api_port global yangcatalog_api_prefix yangcatalog_api_prefix = '{}://{}{}{}/'.format(protocol, api_ip, separator, suffix) LOGGER.info('Starting statistics') repo = None # Fetch the list of all modules known by YangCatalog url = os.path.join(yangcatalog_api_prefix, 'search/modules') try: response = requests.get(url, headers=json_headers) if response.status_code != 200: LOGGER.error('Cannot access {}, response code: {}'.format( url, response.status_code)) sys.exit(1) else: all_modules_data = response.json() except requests.exceptions.RequestException as e: LOGGER.error( 'Cannot access {}, response code: {}\nRetrying in 120s'.format( url, e.response)) time.sleep(120) response = requests.get(url, headers=json_headers) if response.status_code != 200: LOGGER.error('Cannot access {}, response code: {}'.format( url, response.status_code)) sys.exit(1) else: all_modules_data = response.json() LOGGER.error('Success after retry on {}'.format(url)) vendor_data = {} for module in all_modules_data['module']: for implementation in module.get('implementations', {}).get('implementation', []): if implementation['vendor'] == 'cisco': if implementation['os-type'] not in vendor_data: vendor_data[implementation['os-type']] = {} version = implementation['software-version'] if implementation['os-type'] in ('IOS-XE', 'IOS-XR'): version = version.replace('.', '') elif implementation['os-type'] == 'NX-OS': version = version.replace('(', '-').replace(')', '-').rstrip('-') if version not in vendor_data[implementation['os-type']]: vendor_data[implementation['os-type']][version] = set() vendor_data[implementation['os-type']][version].add( implementation['platform']) try: # pull(yang_models) no need to pull https://github.com/YangModels/yang as it is daily done via SDO_analysis module # function needs to be renamed to something more descriptive (I don't quite understand it's purpose) def process_platforms(versions: t.List[str], module_platforms, os_type: str, os_type_name: str) -> t.Tuple[list, dict]: platform_values = [] json_output = {} for version in versions: path = '{}/vendor/cisco/{}/{}/platform-metadata.json'.format( yang_models, os_type, version) try: with open(path, 'r') as f: data = json.load(f) metadata_platforms = data['platforms']['platform'] except Exception: LOGGER.exception('Problem with opening {}'.format(path)) metadata_platforms = [] values = [version] json_output[version] = {} for module_platform in module_platforms: exist = '<i class="fa fa-times"></i>' exist_json = False if os_type_name in vendor_data: if version in vendor_data[os_type_name]: if module_platform in vendor_data[os_type_name][ version]: exist = '<i class="fa fa-check"></i>' exist_json = True for metadata_platform in metadata_platforms: if (metadata_platform['name'] == module_platform and metadata_platform['software-version'] == version): values.append( '<i class="fa fa-check"></i>/{}'.format(exist)) json_output[version][module_platform] = { 'yangcatalog': True, 'github': exist_json } break else: values.append( '<i class="fa fa-times"></i>/{}'.format(exist)) json_output[version][module_platform] = { 'yangcatalog': False, 'github': exist_json } platform_values.append(values) return platform_values, json_output os_types = (('xr', 'IOS-XR'), ('xe', 'IOS-XE'), ('nx', 'NX-OS')) platforms = {} for os_type, _ in os_types: platforms[os_type] = solve_platforms('{}/vendor/cisco/{}'.format( yang_models, os_type)) versions = {} for os_type, _ in os_types: os_type_dir = os.path.join(yang_models, 'vendor/cisco', os_type) dirs = (dir for dir in os.listdir(os_type_dir) if os.path.isdir(os.path.join(os_type_dir, dir))) versions[os_type] = sorted(dirs) values = {} json_output = {} for os_type, name in os_types: values[os_type], json_output[os_type] = process_platforms( versions[os_type], platforms[os_type], os_type, name) global all_modules_data_unique all_modules_data_unique = {} for mod in all_modules_data['module']: name = mod['name'] revision = mod['revision'] org = mod['organization'] all_modules_data_unique['{}@{}_{}'.format(name, revision, org)] = mod all_modules_data = len(all_modules_data['module']) # Vendors separately vendor_list = [] def get_output(**kwargs) -> str: """run runYANGallstats with the provided kwargs as command line arguments. removedup is set to True by default. """ kwargs.setdefault('removedup', True) script_conf = all_stats.ScriptConfig() for key, value in kwargs.items(): setattr(script_conf.args, key, value) with redirect_stdout(io.StringIO()) as f: all_stats.main(script_conf=script_conf) return f.getvalue() for direc in next(os.walk(os.path.join(yang_models, 'vendor')))[1]: vendor_direc = os.path.join(yang_models, 'vendor', direc) if os.path.isdir(vendor_direc): LOGGER.info( 'Running runYANGallstats.py for directory {}'.format( vendor_direc)) out = get_output(rootdir=vendor_direc) process_data(out, vendor_list, vendor_direc, direc) # Vendors all together out = get_output(rootdir=os.path.join(yang_models, 'vendor')) vendor_modules = out.split( '{}/vendor : '.format(yang_models))[1].splitlines()[0] vendor_modules_ndp = out.split( '{}/vendor (duplicates removed): '.format( yang_models))[1].splitlines()[0] # Standard all together out = get_output(rootdir=os.path.join(yang_models, 'standard')) standard_modules = out.split( '{}/standard : '.format(yang_models))[1].splitlines()[0] standard_modules_ndp = out.split( '{}/standard (duplicates removed): '.format( yang_models))[1].splitlines()[0] # Standard separately sdo_list = [] def process_sdo_dir(dir: str, name: str): out = get_output(rootdir=os.path.join(yang_models, dir)) process_data(out, sdo_list, os.path.join(yang_models, dir), name) process_sdo_dir('standard/ietf/RFC', 'IETF RFCs') process_sdo_dir('standard/ietf/DRAFT', 'IETF drafts') process_sdo_dir('experimental/ietf-extracted-YANG-modules', 'IETF experimental drafts') process_sdo_dir('standard/iana', 'IANA standard') process_sdo_dir('standard/bbf/standard', 'BBF standard') process_sdo_dir('standard/etsi', 'ETSI standard') for direc in next( os.walk(os.path.join(yang_models, 'standard/ieee/published')))[1]: ieee_direc = os.path.join(yang_models, 'standard/ieee/published', direc) if os.path.isdir(ieee_direc): process_sdo_dir(os.path.join('standard/ieee/published', direc), 'IEEE {} with par'.format(direc)) for direc in next( os.walk(os.path.join(yang_models, 'standard/ieee/draft')))[1]: ieee_direc = os.path.join(yang_models, 'standard/ieee/draft', direc) if os.path.isdir(ieee_direc): process_sdo_dir(os.path.join('standard/ieee/draft', direc), 'IEEE draft {} with par'.format(direc)) for direc in next( os.walk(os.path.join(yang_models, 'experimental/ieee')))[1]: ieee_direc = os.path.join(yang_models, 'experimental/ieee', direc) if os.path.isdir(ieee_direc): process_sdo_dir(os.path.join('experimental/ieee', direc), 'IEEE {} no par'.format(direc)) process_sdo_dir('standard/mef/src/model/standard', 'MEF standard') process_sdo_dir('standard/mef/src/model/draft', 'MEF draft') # Openconfig is from different repo that s why we need models in github zero LOGGER.info('Cloning the repo') repo = repoutil.ModifiableRepoUtil(os.path.join( github_url, 'openconfig/public'), clone_options={ 'config_username': config_name, 'config_user_email': config_email }) out = get_output( rootdir=os.path.join(repo.local_dir, 'release/models')) process_data(out, sdo_list, os.path.join(repo.local_dir, 'release/models'), 'openconfig') context = { 'table_sdo': sdo_list, 'table_vendor': vendor_list, 'num_yang_files_vendor': vendor_modules, 'num_yang_files_vendor_ndp': vendor_modules_ndp, 'num_yang_files_standard': standard_modules, 'num_yang_files_standard_ndp': standard_modules_ndp, 'num_parsed_files': all_modules_data, 'num_unique_parsed_files': len(all_modules_data_unique), 'xr': platforms['xr'], 'xe': platforms['xe'], 'nx': platforms['nx'], 'xr_values': values['xr'], 'xe_values': values['xe'], 'nx_values': values['nx'], 'current_date': time.strftime('%d/%m/%y') } LOGGER.info('Rendering data') with open('{}/stats/stats.json'.format(private_dir), 'w') as f: for sdo in sdo_list: sdo['percentage_compile'] = float( sdo['percentage_compile'].split(' ')[0]) sdo['percentage_extra'] = float( sdo['percentage_extra'].split(' ')[0]) for vendor in vendor_list: vendor['percentage_compile'] = float( vendor['percentage_compile'].split(' ')[0]) vendor['percentage_extra'] = float( vendor['percentage_extra'].split(' ')[0]) output = { 'table_sdo': sdo_list, 'table_vendor': vendor_list, 'num_yang_files_vendor': int(vendor_modules), 'num_yang_files_vendor_ndp': int(vendor_modules_ndp), 'num_yang_files_standard': int(standard_modules), 'num_yang_files_standard_ndp': int(standard_modules_ndp), 'num_parsed_files': all_modules_data, 'num_unique_parsed_files': len(all_modules_data_unique), 'xr': json_output['xr'], 'xe': json_output['xe'], 'nx': json_output['nx'], 'current_date': time.strftime('%d/%m/%y') } json.dump(output, f) result = render( os.path.join(os.environ['BACKEND'], 'statistic/template/stats.html'), context) with open( os.path.join(os.environ['BACKEND'], 'statistic/statistics.html'), 'w+') as f: f.write(result) file_from = os.path.abspath( os.path.join(os.environ['BACKEND'], 'statistic/statistics.html')) file_to = os.path.join(os.path.abspath(move_to), 'statistics.html') resolved_path_file_to = os.path.realpath(file_to) if move_to != './': if os.path.exists(resolved_path_file_to): os.remove(resolved_path_file_to) shutil.move(file_from, resolved_path_file_to) end_time = int(time.time()) total_time = end_time - start_time LOGGER.info('Final time in seconds to produce statistics {}'.format( total_time)) except Exception as e: LOGGER.exception('Exception found while running statistics script') job_log(start_time, temp_dir, error=str(e), status='Fail', filename=os.path.basename(__file__)) raise Exception(e) job_log(start_time, temp_dir, status='Success', filename=os.path.basename(__file__)) LOGGER.info('Job finished successfully')
def __init__(self, *args, **kwargs): self.module_dir = os.path.join(os.environ['BACKEND'], 'tests/resources/integrity') config = create_config() self.yang_models = config.get('Directory-Section', 'yang-models-dir') super().__init__(*args, **kwargs)
def main(scriptConf=None): start_time = int(time.time()) if scriptConf is None: scriptConf = ScriptConfig() args = scriptConf.args config_path = args.config_path config = create_config(config_path) token = config.get('Secrets-Section', 'yang-catalog-token') username = config.get('General-Section', 'repository-username') commit_dir = config.get('Directory-Section', 'commit-dir') config_name = config.get('General-Section', 'repo-config-name') config_email = config.get('General-Section', 'repo-config-email') log_directory = config.get('Directory-Section', 'logs') temp_dir = config.get('Directory-Section', 'temp') exceptions = config.get('Directory-Section', 'exceptions') yang_models = config.get('Directory-Section', 'yang-models-dir') ietf_draft_url = config.get('Web-Section', 'ietf-draft-private-url') ietf_rfc_url = config.get('Web-Section', 'ietf-RFC-tar-private-url') is_production = config.get('General-Section', 'is-prod') is_production = is_production == 'True' LOGGER = log.get_logger('draftPull', '{}/jobs/draft-pull.log'.format(log_directory)) LOGGER.info('Starting Cron job IETF pull request') repo_name = 'yang' repourl = 'https://{}@github.com/{}/{}.git'.format(token, username, repo_name) commit_author = {'name': config_name, 'email': config_email} draftPullUtility.update_forked_repository(yang_models, LOGGER) repo = draftPullUtility.clone_forked_repository(repourl, commit_author, LOGGER) if not repo: error_message = 'Failed to clone repository {}/{}'.format( username, repo_name) job_log(start_time, temp_dir, error=error_message, status='Fail', filename=os.path.basename(__file__)) sys.exit() try: # Get rfc.tgz file response = requests.get(ietf_rfc_url) tgz_path = '{}/rfc.tgz'.format(repo.local_dir) extract_to = '{}/standard/ietf/RFCtemp'.format(repo.local_dir) with open(tgz_path, 'wb') as zfile: zfile.write(response.content) tar_opened = draftPullUtility.extract_rfc_tgz(tgz_path, extract_to, LOGGER) if tar_opened: diff_files = [] new_files = [] temp_rfc_yang_files = glob.glob( '{}/standard/ietf/RFCtemp/*.yang'.format(repo.local_dir)) for temp_rfc_yang_file in temp_rfc_yang_files: file_name = os.path.basename(temp_rfc_yang_file) rfc_yang_file = temp_rfc_yang_file.replace('RFCtemp', 'RFC') if not os.path.exists(rfc_yang_file): new_files.append(file_name) continue same = filecmp.cmp(rfc_yang_file, temp_rfc_yang_file) if not same: diff_files.append(file_name) shutil.rmtree('{}/standard/ietf/RFCtemp'.format(repo.local_dir)) with open(exceptions, 'r') as exceptions_file: remove_from_new = exceptions_file.read().split('\n') new_files = [ file_name for file_name in new_files if file_name not in remove_from_new ] if args.send_message: if new_files or diff_files: LOGGER.info( 'new or modified RFC files found. Sending an E-mail') mf = messageFactory.MessageFactory() mf.send_new_rfc_message(new_files, diff_files) # Experimental draft modules try: os.makedirs('{}/experimental/ietf-extracted-YANG-modules/'.format( repo.local_dir)) except OSError as e: # be happy if someone already created the path if e.errno != errno.EEXIST: raise experimental_path = '{}/experimental/ietf-extracted-YANG-modules'.format( repo.local_dir) LOGGER.info('Updating IETF drafts download links') draftPullUtility.get_draft_module_content(ietf_draft_url, experimental_path, LOGGER) LOGGER.info('Checking module filenames without revision in {}'.format( experimental_path)) draftPullUtility.check_name_no_revision_exist(experimental_path, LOGGER) LOGGER.info( 'Checking for early revision in {}'.format(experimental_path)) draftPullUtility.check_early_revisions(experimental_path, LOGGER) messages = [] try: # Add commit and push to the forked repository LOGGER.info('Adding all untracked files locally') untracked_files = repo.repo.untracked_files repo.add_untracked_remove_deleted() LOGGER.info('Committing all files locally') repo.commit_all( 'Cronjob - every day pull of ietf draft yang files.') LOGGER.info('Pushing files to forked repository') commit_hash = repo.repo.head.commit LOGGER.info('Commit hash {}'.format(commit_hash)) with open(commit_dir, 'w+') as f: f.write('{}\n'.format(commit_hash)) if is_production: LOGGER.info( 'Pushing untracked and modified files to remote repository' ) repo.push() else: LOGGER.info( 'DEV environment - not pushing changes into remote repository' ) LOGGER.debug( 'List of all untracked and modified files:\n{}'.format( '\n'.join(untracked_files))) except GitCommandError as e: message = 'Error while pushing procedure - git command error: \n {} \n git command out: \n {}'.format( e.stderr, e.stdout) if 'Your branch is up to date' in e.stdout: LOGGER.warning(message) messages = [{ 'label': 'Pull request created', 'message': 'False - branch is up to date' }] else: LOGGER.exception( 'Error while pushing procedure - Git command error') raise e except Exception as e: LOGGER.exception('Error while pushing procedure {}'.format( sys.exc_info()[0])) raise type(e)('Error while pushing procedure') except Exception as e: LOGGER.exception('Exception found while running draftPull script') job_log(start_time, temp_dir, error=str(e), status='Fail', filename=os.path.basename(__file__)) raise e if len(messages) == 0: messages = [{ 'label': 'Pull request created', 'message': 'True - {}'.format(commit_hash) } # pyright: ignore ] job_log(start_time, temp_dir, messages=messages, status='Success', filename=os.path.basename(__file__)) LOGGER.info('Job finished successfully')
def main(scriptConf=None): start_time = int(time.time()) if scriptConf is None: scriptConf = ScriptConfig() args = scriptConf.args config_path = args.config_path config = create_config(config_path) notify_indexing = config.get('General-Section', 'notify-index') config_name = config.get('General-Section', 'repo-config-name') config_email = config.get('General-Section', 'repo-config-email') log_directory = config.get('Directory-Section', 'logs') ietf_draft_url = config.get('Web-Section', 'ietf-draft-private-url') ietf_rfc_url = config.get('Web-Section', 'ietf-RFC-tar-private-url') temp_dir = config.get('Directory-Section', 'temp') LOGGER = log.get_logger( 'draftPullLocal', '{}/jobs/draft-pull-local.log'.format(log_directory)) LOGGER.info('Starting cron job IETF pull request local') messages = [] notify_indexing = notify_indexing == 'True' populate_error = False repo = None try: # Clone YangModels/yang repository clone_dir = '{}/draftpulllocal'.format(temp_dir) if os.path.exists(clone_dir): shutil.rmtree(clone_dir) repo = repoutil.ModifiableRepoUtil(os.path.join( github_url, 'YangModels/yang.git'), clone_options={ 'config_username': config_name, 'config_user_email': config_email, 'local_dir': clone_dir }) LOGGER.info('YangModels/yang repo cloned to local directory {}'.format( repo.local_dir)) response = requests.get(ietf_rfc_url) tgz_path = '{}/rfc.tgz'.format(repo.local_dir) extract_to = '{}/standard/ietf/RFC'.format(repo.local_dir) with open(tgz_path, 'wb') as zfile: zfile.write(response.content) tar_opened = draftPullUtility.extract_rfc_tgz(tgz_path, extract_to, LOGGER) if tar_opened: # Standard RFC modules direc = '{}/standard/ietf/RFC'.format(repo.local_dir) LOGGER.info( 'Checking module filenames without revision in {}'.format( direc)) draftPullUtility.check_name_no_revision_exist(direc, LOGGER) LOGGER.info('Checking for early revision in {}'.format(direc)) draftPullUtility.check_early_revisions(direc, LOGGER) execution_result = run_populate_script(direc, notify_indexing, LOGGER) if execution_result == False: populate_error = True message = { 'label': 'Standard RFC modules', 'message': 'Error while calling populate script' } messages.append(message) else: message = { 'label': 'Standard RFC modules', 'message': 'populate script finished successfully' } messages.append(message) # Experimental modules experimental_path = '{}/experimental/ietf-extracted-YANG-modules'.format( repo.local_dir) LOGGER.info('Updating IETF drafts download links') draftPullUtility.get_draft_module_content(ietf_draft_url, experimental_path, LOGGER) LOGGER.info('Checking module filenames without revision in {}'.format( experimental_path)) draftPullUtility.check_name_no_revision_exist(experimental_path, LOGGER) LOGGER.info( 'Checking for early revision in {}'.format(experimental_path)) draftPullUtility.check_early_revisions(experimental_path, LOGGER) execution_result = run_populate_script(experimental_path, notify_indexing, LOGGER) if execution_result == False: populate_error = True message = { 'label': 'Experimental modules', 'message': 'Error while calling populate script' } messages.append(message) else: message = { 'label': 'Experimental modules', 'message': 'populate script finished successfully' } messages.append(message) # IANA modules iana_path = '{}/standard/iana'.format(repo.local_dir) if os.path.exists(iana_path): LOGGER.info( 'Checking module filenames without revision in {}'.format( iana_path)) draftPullUtility.check_name_no_revision_exist(iana_path, LOGGER) LOGGER.info('Checking for early revision in {}'.format(iana_path)) draftPullUtility.check_early_revisions(iana_path, LOGGER) execution_result = run_populate_script(iana_path, notify_indexing, LOGGER) if execution_result == False: populate_error = True message = { 'label': 'IANA modules', 'message': 'Error while calling populate script' } messages.append(message) else: message = { 'label': 'IANA modules', 'message': 'populate script finished successfully' } messages.append(message) except Exception as e: LOGGER.exception('Exception found while running draftPullLocal script') job_log(start_time, temp_dir, error=str(e), status='Fail', filename=os.path.basename(__file__)) raise e if not populate_error: LOGGER.info('Job finished successfully') else: LOGGER.info( 'Job finished, but errors found while calling populate script') job_log(start_time, temp_dir, messages=messages, status='Success', filename=os.path.basename(__file__))
def main(scriptConf=None): if scriptConf is None: scriptConf = ScriptConfig() args = scriptConf.args config_path = args.config_path config = create_config(config_path) dir_paths: DirPaths = { 'log': config.get('Directory-Section', 'logs', fallback='/var/yang/logs'), 'private': config.get('Web-Section', 'private-directory', fallback='tests/resources/html/private'), 'yang_models': config.get('Directory-Section', 'yang-models-dir', fallback='tests/resources/yangmodels/yang'), 'cache': config.get('Directory-Section', 'cache', fallback='tests/resources/cache'), 'json': args.json_dir, 'result': args.result_html_dir, 'save': args.save_file_dir } LOGGER = log.get_logger('runCapabilities', '{}/parseAndPopulate.log'.format(dir_paths['log'])) is_uwsgi = config.get('General-Section', 'uwsgi', fallback='True') separator = ':' suffix = args.api_port if is_uwsgi == 'True': separator = '/' suffix = 'api' yangcatalog_api_prefix = '{}://{}{}{}/'.format(args.api_protocol, args.api_ip, separator, suffix) start = time.time() dumper = Dumper(dir_paths['log'], 'prepare', yangcatalog_api_prefix) fileHasher = FileHasher('backend_files_modification_hashes', dir_paths['cache'], args.save_file_hash, dir_paths['log']) LOGGER.info('Starting to iterate through files') if args.sdo: LOGGER.info('Found directory for sdo {}'.format(args.dir)) # If yang-parameters.xml exists -> parsing IANA-maintained modules if os.path.isfile(os.path.join(args.dir, 'yang-parameters.xml')): LOGGER.info('yang-parameters.xml file found') grouping = IanaDirectory(args.dir, dumper, fileHasher, args.api, dir_paths) grouping.parse_and_load() else: LOGGER.info('Starting to parse files in sdo directory') grouping = SdoDirectory(args.dir, dumper, fileHasher, args.api, dir_paths) grouping.parse_and_load() dumper.dump_modules(dir_paths['json']) else: for pattern in ['*capabilit*.xml', '*ietf-yang-library*.xml']: for root, basename in find_files(args.dir, pattern): filename = os.path.join(root, basename) LOGGER.info('Found xml source {}'.format(filename)) if pattern == '*capabilit*.xml': grouping = VendorCapabilities(root, filename, dumper, fileHasher, args.api, dir_paths) else: grouping = VendorYangLibrary(root, filename, dumper, fileHasher, args.api, dir_paths) try: grouping.parse_and_load() except Exception as e: LOGGER.exception('Skipping {}, error while parsing'.format(filename)) dumper.dump_modules(dir_paths['json']) dumper.dump_vendors(dir_paths['json']) end = time.time() LOGGER.info('Time taken to parse all the files {} seconds'.format(int(end - start))) # Dump updated hashes into temporary directory if len(fileHasher.updated_hashes) > 0: fileHasher.dump_tmp_hashed_files_list(fileHasher.updated_hashes, dir_paths['json'])
def load_config(self): config = create_config() self.search_key = config.get('Secrets-Section', 'update-signature', fallback='') self.secret_key = config.get('Secrets-Section', 'flask-secret-key', fallback='S3CR3T!') self.nginx_dir = config.get('Directory-Section', 'nginx-conf', fallback='') self.result_dir = config.get('Web-Section', 'result-html-dir', fallback='tests/resources/html/results') self.private_dir = config.get('Web-Section', 'private-directory', fallback='tests/resources/html/private') self.register_user_email = config.get('Message-Section', 'email-to', fallback='') self.credentials = config.get( 'Secrets-Section', 'confd-credentials', fallback='test test').strip('"').split(' ') self.elk_credentials = config.get('Secrets-Section', 'elk-secret', fallback='').strip('"').split(' ') self.confd_ip = config.get('Web-Section', 'confd-ip', fallback='yangcatalog.org') self.confdPort = int( config.get('Web-Section', 'confd-port', fallback=8008)) self.protocol = config.get('General-Section', 'protocol-confd', fallback='http') self.cache_dir = config.get('Directory-Section', 'cache', fallback='tests/resources/cache') self.save_requests = config.get('Directory-Section', 'save-requests', fallback='/var/yang/test-requests') self.save_file_dir = config.get('Directory-Section', 'save-file-dir', fallback='/var/yang/all_modules') self.var_yang = config.get('Directory-Section', 'var', fallback='/var/yang') self.logs_dir = config.get('Directory-Section', 'logs', fallback='/var/yang/logs') self.token = config.get('Secrets-Section', 'yang-catalog-token', fallback='') self.admin_token = config.get('Secrets-Section', 'admin-token', fallback='') self.oidc_client_secret = config.get('Secrets-Section', 'client-secret', fallback='') self.oidc_client_id = config.get('Secrets-Section', 'client-id', fallback='') self.commit_msg_file = config.get('Directory-Section', 'commit-dir', fallback='') self.temp_dir = config.get('Directory-Section', 'temp', fallback='tests/resources/tmp') self.diff_file_dir = config.get('Web-Section', 'save-diff-dir', fallback='tests/resources/html') self.ip = config.get('Web-Section', 'ip', fallback='localhost') self.oidc_redirects = config.get('Web-Section', 'redirect-oidc', fallback='').split(' ') self.oidc_issuer = config.get('Web-Section', 'issuer', fallback='') self.api_port = int( config.get('Web-Section', 'api-port', fallback=5000)) self.api_protocol = config.get('General-Section', 'protocol-api', fallback='https') self.is_prod = config.get('General-Section', 'is-prod', fallback='False') self.is_uwsgi = config.get('General-Section', 'uwsgi', fallback=True) self.ys_users_dir = config.get('Directory-Section', 'ys-users', fallback='') self.my_uri = config.get('Web-Section', 'my-uri', fallback='http://localhost') self.yang_models = config.get( 'Directory-Section', 'yang-models-dir', fallback='tests/resources/yangmodels/yang') self.es_host = config.get('DB-Section', 'es-host', fallback='localhost') self.es_port = config.get('DB-Section', 'es-port', fallback='9200') self.es_aws = config.get('DB-Section', 'es-aws', fallback=False) self.redis_host = config.get('DB-Section', 'redis-host', fallback='localhost') self.redis_port = config.get('DB-Section', 'redis-port', fallback='6379') self.json_ytree = config.get('Directory-Section', 'json-ytree', fallback='/var/yang/ytrees') self.es_aws = self.es_aws == 'True' es_host_config = {'host': self.es_host, 'port': self.es_port} if self.es_aws: self.es = Elasticsearch(hosts=[es_host_config], http_auth=(self.elk_credentials[0], self.elk_credentials[1]), scheme='https') else: self.es = Elasticsearch(hosts=[es_host_config]) self.LOGGER = log.get_logger('api.yc_gc', '{}/yang.log'.format(self.logs_dir)) separator = ':' suffix = self.api_port if self.is_uwsgi == 'True': separator = '/' suffix = 'api' self.yangcatalog_api_prefix = '{}://{}{}{}/'.format( self.api_protocol, self.ip, separator, suffix) self.LOGGER.info('yangcatalog configuration reloaded') self.redis = redis.Redis(host=self.redis_host, port=self.redis_port) # pyright: ignore self.check_wait_redis_connected()
def main(): start_time = int(time.time()) parser = argparse.ArgumentParser() parser.add_argument('--config-path', type=str, default=os.environ['YANGCATALOG_CONFIG_PATH'], help='Set path to config file') parser.add_argument( '--compress', action='store_true', default=True, help='Set whether to compress snapshot files. Default is True') args = parser.parse_args() config_path = args.config_path config = create_config(config_path) log_directory = config.get('Directory-Section', 'logs') temp_dir = config.get('Directory-Section', 'temp') ys_users = config.get('Directory-Section', 'ys-users') cache_directory = config.get('Directory-Section', 'cache') es_aws = config.get('DB-Section', 'es-aws') log_file_path = os.path.join(log_directory, 'jobs', 'removeUnused.log') LOGGER = log.get_logger('removeUnused', log_file_path) LOGGER.info('Starting Cron job remove unused files') current_time = time.time() cutoff = current_time - DAY try: LOGGER.info('Removing old tmp directory representing int folders') for dir in next(os.walk(temp_dir))[1]: if represents_int(dir): creation_time = os.path.getctime(os.path.join(temp_dir, dir)) if creation_time < cutoff: shutil.rmtree(os.path.join(temp_dir, dir)) LOGGER.info('Removing old ys temporary users') dirs = os.listdir(ys_users) for dir in dirs: abs = os.path.abspath('{}/{}'.format(ys_users, dir)) if not abs.endswith('yangcat') and not abs.endswith('yang'): try: shutil.rmtree(abs) except Exception: pass LOGGER.info('Removing old correlation ids') # removing correlation ids from file that are older than a day # Be lenient to missing files try: filename = open('{}/correlation_ids'.format(temp_dir), 'r') lines = filename.readlines() filename.close() except IOError: lines = [] with open('{}/correlation_ids'.format(temp_dir), 'w') as filename: for line in lines: line_datetime = line.split(' -')[0] t = dt.strptime(line_datetime, '%a %b %d %H:%M:%S %Y') diff = dt.now() - t if diff.days == 0: filename.write(line) LOGGER.info('Removing old yangvalidator cache dirs') yang_validator_cache = os.path.join(temp_dir, 'yangvalidator') cutoff = current_time - 2 * DAY dirs = os.listdir(yang_validator_cache) for dir in dirs: if dir.startswith('yangvalidator-v2-cache-'): creation_time = os.path.getctime( os.path.join(yang_validator_cache, dir)) if creation_time < cutoff: try: shutil.rmtree(os.path.join(yang_validator_cache, dir)) except PermissionError: LOGGER.exception( 'Problem while deleting {}'.format(dir)) continue if es_aws != 'True': LOGGER.info('Removing old elasticsearch snapshots') es_manager = ESManager() es_manager.create_snapshot_repository(args.compress) sorted_snapshots = es_manager.get_sorted_snapshots() for snapshot in sorted_snapshots[:-5]: es_manager.delete_snapshot(snapshot['snapshot']) def hash_file(path: str) -> bytes: sha1 = hashlib.sha1() with open(path, 'rb') as byte_file: while True: data = byte_file.read(BLOCK_SIZE) if not data: break sha1.update(data) return sha1.digest() def hash_node(path: str) -> bytes: if os.path.isfile(path): return hash_file(path) elif os.path.isdir(path): sha1 = hashlib.sha1() for root, _, filenames in os.walk(path): for filename in filenames: file_path = os.path.join(root, filename) # we only want to compare the contents, not the top directory name relative_path = file_path[len(path):] file_signature = relative_path.encode() + hash_file( file_path) sha1.update(file_signature) return sha1.digest() else: assert False # remove all files that are same keep the latest one only. Last two months keep all different content json files # other 4 months (6 in total) keep only latest, remove all other files def remove_old_backups(subdir: str): backup_directory = os.path.join(cache_directory, subdir) list_of_backups = get_list_of_backups(backup_directory) backup_name_latest = os.path.join(backup_directory, list_of_backups[-1]) def diff_month(later_datetime, earlier_datetime): return (later_datetime.year - earlier_datetime.year ) * 12 + later_datetime.month - earlier_datetime.month to_remove = [] last_six_months = {} last_two_months = {} today = dt.now() for backup in list_of_backups: backup_dt = dt.strptime(backup[:backup.index('.')], backup_date_format) month_difference = diff_month(today, backup_dt) if month_difference > 6: to_remove.append(backup) elif month_difference > 2: month = backup_dt.month if month in last_six_months: if last_six_months[month] > backup: to_remove.append(backup) else: to_remove.append(last_six_months[month]) last_six_months[month] = backup else: last_six_months[month] = backup else: backup_path = os.path.join(backup_directory, backup) currently_processed_backup_hash = hash_node(backup_path) if currently_processed_backup_hash in last_two_months: if last_two_months[ currently_processed_backup_hash] > backup: to_remove.append(backup) else: to_remove.append(last_two_months[ currently_processed_backup_hash]) last_two_months[currently_processed_backup_hash] = backup for backup in to_remove: backup_path = os.path.join(backup_directory, backup) if backup_path != backup_name_latest: if os.path.isdir(backup_path): shutil.rmtree(backup_path) elif os.path.isfile(backup_path): os.unlink(backup_path) LOGGER.info('Removing old cache json files') remove_old_backups('confd') except Exception as e: LOGGER.exception('Exception found while running removeUnused script') job_log(start_time, temp_dir, error=str(e), status='Fail', filename=os.path.basename(__file__)) raise e job_log(start_time, temp_dir, status='Success', filename=os.path.basename(__file__)) LOGGER.info('Job finished successfully')
def main(scriptConf=None): start_time = int(time.time()) if scriptConf is None: scriptConf = ScriptConfig() args = scriptConf.args config_path = args.config_path config = create_config(config_path) api_ip = config.get('Web-Section', 'ip') api_port = int(config.get('Web-Section', 'api-port')) credentials = config.get('Secrets-Section', 'confd-credentials').strip('"').split(' ') api_protocol = config.get('General-Section', 'protocol-api') is_uwsgi = config.get('General-Section', 'uwsgi') config_name = config.get('General-Section', 'repo-config-name') config_email = config.get('General-Section', 'repo-config-email') log_directory = config.get('Directory-Section', 'logs') temp_dir = config.get('Directory-Section', 'temp') openconfig_repo_url = config.get('Web-Section', 'openconfig-models-repo-url') LOGGER = log.get_logger( 'openconfigPullLocal', '{}/jobs/openconfig-pull.log'.format(log_directory)) LOGGER.info('Starting Cron job openconfig pull request local') separator = ':' suffix = api_port if is_uwsgi == 'True': separator = '/' suffix = 'api' yangcatalog_api_prefix = '{}://{}{}{}/'.format(api_protocol, api_ip, separator, suffix) commit_author = {'name': config_name, 'email': config_email} repo = draftPullUtility.clone_forked_repository(openconfig_repo_url, commit_author, LOGGER) assert repo modules = [] try: yang_files = glob('{}/release/models/**/*.yang'.format(repo.local_dir), recursive=True) for yang_file in yang_files: basename = os.path.basename(yang_file) name = basename.split('.')[0].split('@')[0] revision = resolve_revision(yang_file) path = yang_file.split('{}/'.format(repo.local_dir))[-1] module = { 'generated-from': 'not-applicable', 'module-classification': 'unknown', 'name': name, 'revision': revision, 'organization': 'openconfig', 'source-file': { 'owner': 'openconfig', 'path': path, 'repository': 'public' } } modules.append(module) data = json.dumps({'modules': {'module': modules}}) except Exception as e: LOGGER.exception( 'Exception found while running openconfigPullLocal script') job_log(start_time, temp_dir, error=str(e), status='Fail', filename=os.path.basename(__file__)) raise e LOGGER.debug(data) api_path = '{}modules'.format(yangcatalog_api_prefix) response = requests.put(api_path, data, auth=(credentials[0], credentials[1]), headers=json_headers) status_code = response.status_code payload = json.loads(response.text) if status_code < 200 or status_code > 299: e = 'PUT /api/modules responsed with status code {}'.format( status_code) job_log(start_time, temp_dir, error=str(e), status='Fail', filename=os.path.basename(__file__)) LOGGER.info( 'Job finished, but an error occured while sending PUT to /api/modules' ) else: messages = [{'label': 'Job ID', 'message': payload['job-id']}] job_log(start_time, temp_dir, messages=messages, status='Success', filename=os.path.basename(__file__)) LOGGER.info('Job finished successfully')
def main(): config = create_config() es_aws = config.get('DB-Section', 'es-aws') elk_credentials = config.get('Secrets-Section', 'elk-secret').strip('"').split(' ') # ------------------------------------------------------------------------------------------------------------------ # INIT ES CONNECTION # ------------------------------------------------------------------------------------------------------------------ es_host_config = { 'host': config.get('DB-Section', 'es-host', fallback='localhost'), 'port': config.get('DB-Section', 'es-port', fallback='9200') } if es_aws == 'True': es = Elasticsearch(hosts=[es_host_config], http_auth=(elk_credentials[0], elk_credentials[1]), scheme='https') else: es = Elasticsearch(hosts=[es_host_config]) # ------------------------------------------------------------------------------------------------------------------ # INIT ALL INDICES # ------------------------------------------------------------------------------------------------------------------ es_manager = ESManager() for index in ESIndices: if not es_manager.index_exists(index): create_result = es_manager.create_index(index) print(create_result) # ------------------------------------------------------------------------------------------------------------------ # GET ALL MODULES FROM 'modules' INDEX # ------------------------------------------------------------------------------------------------------------------ all_results = {} match_all_query = {'query': {'match_all': {}}} total_index_docs = 0 es_result = es.search(index=ESIndices.MODULES.value, body=match_all_query, scroll=u'10s', size=250) scroll_id = es_result.get('_scroll_id') hits = es_result['hits']['hits'] _store_hits(hits, all_results) total_index_docs += len(hits) while len(es_result['hits']['hits']): es_result = es.scroll(scroll_id=scroll_id, scroll=u'10s') scroll_id = es_result.get('_scroll_id') hits = es_result['hits']['hits'] _store_hits(hits, all_results) total_index_docs += len(hits) es.clear_scroll(scroll_id=scroll_id, ignore=(404, )) print('Total number of modules retreived from "modules" index: {}'.format( total_index_docs)) # ------------------------------------------------------------------------------------------------------------------ # FILL 'autocomplete' INDEX # ------------------------------------------------------------------------------------------------------------------ for query in all_results.values(): es_manager.delete_from_index(ESIndices.AUTOCOMPLETE, query) index_result = es_manager.index_module(ESIndices.AUTOCOMPLETE, query) if index_result['result'] != 'created': print(index_result)
def main(scriptConf=None): start_time = int(time.time()) if scriptConf is None: scriptConf = ScriptConfig() args = scriptConf.args config_path = args.config_path config = create_config(config_path) yang_models = config.get('Directory-Section', 'yang-models-dir') token = config.get('Secrets-Section', 'yang-catalog-token') username = config.get('General-Section', 'repository-username') commit_dir = config.get('Directory-Section', 'commit-dir') config_name = config.get('General-Section', 'repo-config-name') config_email = config.get('General-Section', 'repo-config-email') log_directory = config.get('Directory-Section', 'logs') temp_dir = config.get('Directory-Section', 'temp') is_production = config.get('General-Section', 'is-prod') is_production = is_production == 'True' LOGGER = log.get_logger('ianaPull', '{}/jobs/iana-pull.log'.format(log_directory)) LOGGER.info('Starting job to pull IANA-maintained modules') repo_name = 'yang' repourl = 'https://{}@github.com/{}/{}.git'.format(token, username, repo_name) commit_author = {'name': config_name, 'email': config_email} draftPullUtility.update_forked_repository(yang_models, LOGGER) repo = draftPullUtility.clone_forked_repository(repourl, commit_author, LOGGER) if not repo: error_message = 'Failed to clone repository {}/{}'.format( username, repo_name) job_log(start_time, temp_dir, error=error_message, status='Fail', filename=os.path.basename(__file__)) sys.exit() try: iana_temp_path = os.path.join(temp_dir, 'iana') if os.path.exists(iana_temp_path): shutil.rmtree(iana_temp_path) # call rsync to sync with rsync.iana.org::assignments/yang-parameters/ subprocess.call([ 'rsync', '-avzq', '--delete', 'rsync.iana.org::assignments/yang-parameters/', iana_temp_path ]) draftPullUtility.set_permissions(iana_temp_path) iana_standard_path = os.path.join(repo.local_dir, 'standard/iana') if not os.path.exists(iana_standard_path): os.makedirs(iana_standard_path) xml_path = os.path.join(iana_temp_path, 'yang-parameters.xml') copy2(xml_path, '{}/standard/iana/yang-parameters.xml'.format(repo.local_dir)) # Parse yang-parameters.xml file root = ET.parse(xml_path).getroot() tag = root.tag namespace = tag.split('registry')[0] modules = root.iter('{}record'.format(namespace)) for module in modules: data = module.attrib for attributes in module: prop = attributes.tag.split(namespace)[-1] assert attributes.text is not None data[prop] = attributes.text if data.get('iana') == 'Y' and data.get('file'): src = '{}/{}'.format(iana_temp_path, data.get('file')) dst = '{}/standard/iana/{}'.format(repo.local_dir, data.get('file')) copy2(src, dst) LOGGER.info('Checking module filenames without revision in {}'.format( iana_standard_path)) draftPullUtility.check_name_no_revision_exist(iana_standard_path, LOGGER) LOGGER.info( 'Checking for early revision in {}'.format(iana_standard_path)) draftPullUtility.check_early_revisions(iana_standard_path, LOGGER) messages = [] try: # Add commit and push to the forked repository LOGGER.info('Adding all untracked files locally') untracked_files = repo.repo.untracked_files repo.add_untracked_remove_deleted() LOGGER.info('Committing all files locally') repo.commit_all('Cronjob - every day pull of iana yang files') LOGGER.info('Pushing files to forked repository') commit_hash = repo.repo.head.commit LOGGER.info('Commit hash {}'.format(commit_hash)) with open(commit_dir, 'w+') as f: f.write('{}\n'.format(commit_hash)) if is_production: LOGGER.info( 'Pushing untracked and modified files to remote repository' ) repo.push() else: LOGGER.info( 'DEV environment - not pushing changes into remote repository' ) LOGGER.debug( 'List of all untracked and modified files:\n{}'.format( '\n'.join(untracked_files))) except GitCommandError as e: message = 'Error while pushing procedure - git command error: \n {} \n git command out: \n {}'.format( e.stderr, e.stdout) if 'Your branch is up to date' in e.stdout: LOGGER.warning(message) messages = [{ 'label': 'Pull request created', 'message': 'False - branch is up to date' }] else: LOGGER.exception( 'Error while pushing procedure - Git command error') raise e except Exception as e: LOGGER.exception('Error while pushing procedure {}'.format( sys.exc_info()[0])) raise type(e)('Error while pushing procedure') except Exception as e: LOGGER.exception('Exception found while running draftPull script') job_log(start_time, temp_dir, error=str(e), status='Fail', filename=os.path.basename(__file__)) raise e # Remove tmp folder LOGGER.info('Removing tmp directory') if len(messages) == 0: messages = [{ 'label': 'Pull request created', 'message': 'True - {}'.format(commit_hash) } # pyright: ignore ] job_log(start_time, temp_dir, messages=messages, status='Success', filename=os.path.basename(__file__)) LOGGER.info('Job finished successfully')
from utility.yangParser import create_context if __name__ == '__main__': parser = argparse.ArgumentParser( description='Process changed modules in a git repo') parser.add_argument('--time', type=str, help='Modified time argument to find(1)', required=False) parser.add_argument('--config-path', type=str, default=os.environ['YANGCATALOG_CONFIG_PATH'], help='Set path to config file') args = parser.parse_args() config_path = args.config_path config = create_config(config_path) save_file_dir = config.get('Directory-Section', 'save-file-dir') json_ytree = config.get('Directory-Section', 'json-ytree') jsons = glob.glob('{}/*.json'.format(json_ytree)) num_of_jsons = len(jsons) i = 0 for i, jsn in enumerate(jsons): print('tree {} {} out of {}'.format(jsn, i + 1, num_of_jsons)) file_stat = Path(jsn).stat() if file_stat.st_size != 0: continue plugin.init([]) ctx = create_context(save_file_dir) ctx.opts.lint_namespace_prefixes = [] ctx.opts.lint_modulename_prefixes = []