from __future__ import print_function import ConfigParser import argparse import fnmatch import os import time import capability as cap import prepare import statistics import tools.utility.log as log from tools.utility.util import get_curr_dir LOGGER = log.get_logger('runCapabilities', '/home/miroslav/log/populate/yang.log') def find_missing_hello(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): if not any(".xml" in name for name in files): yield root def find_files(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename)
import fileinput import fnmatch import json import os import unicodedata import xml.etree.ElementTree as ET from click.exceptions import FileError import tools.utility.log as log from tools.parseAndPopulate.loadJsonFiles import LoadFiles from tools.parseAndPopulate.modules import Modules from tools.utility.util import get_curr_dir LOGGER = log.get_logger(__name__, '/home/miroslav/log/populate/yang.log') github_raw = 'https://raw.githubusercontent.com/' # searching for file based on pattern or pattern_with_revision def find_first_file(directory, pattern, pattern_with_revision): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern_with_revision): filename = os.path.join(root, basename) return filename for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename)
from tools.utility import log from tools.utility.util import load_json_from_url, resolve_results LOGGER = log.get_logger('modules') class LoadFiles: def __init__(self, path): LOGGER.debug('Loading Benoit\'s compilation statuses and results') self.ietf_rfc_json = {} self.ietf_draft_json = {} self.ietf_draft_example_json = {} self.bbf_json = {} self.ieee_standard_json = {} self.ieee_experimental_json = {} self.ietf_rfc_standard_json = {} self.mef_experimental_json = {} self.openconfig_json = {} self.ietf_rfc_standard_json['json'] = load_json_from_url( 'http://www.claise.be/RFCStandard.json') self.ietf_rfc_standard_json['ths'] = resolve_results( 'http://www.claise.be/RFCStandardYANGPageCompilation.html') self.ietf_rfc_json['json'] = load_json_from_url( 'http://www.claise.be/IETFYANGRFC.json') self.ietf_rfc_json['ths'] = resolve_results( 'http://www.claise.be/IETFYANGOutOfRFC.html') self.bbf_json['json'] = load_json_from_url( 'http://www.claise.be/BBF.json') self.bbf_json['ths'] = resolve_results(
import datetime import uuid import pika import tools.utility.log as log LOGGER = log.get_logger(__name__) class Sender: def __init__(self): LOGGER.debug('Initializing sender') self.__response_type = ['Failed', 'In progress', 'Finished successfully', 'does not exist'] self.connection = pika.BlockingConnection( pika.ConnectionParameters('127.0.0.1')) self.channel = self.connection.channel() self.channel.queue_declare(queue='module_queue') self.__response_file = 'correlation_ids' def get_response(self, correlation_id): """Get response according to job_id. It can be either 'Failed', 'In progress', 'Finished successfully' or 'does not exist' Arguments: :param correlation_id: (str) job_id searched between responses :return one of the following - 'Failed', 'In progress', 'Finished successfully' or 'does not exist'
import subprocess import sys import urllib2 from Crypto.Hash import SHA, HMAC from datetime import datetime from urllib2 import URLError import pika import requests import tools.utility.log as log from tools.parseAndPopulate.modulesComplicatedAlgorithms import ModulesComplicatedAlgorithms from tools.utility import messageFactory from tools.utility.util import get_curr_dir LOGGER = log.get_logger('receiver', '/home/miroslav/log/api/yang.log') # Make a http request on path with json_data def http_request(path, method, json_data, http_credentials, header, indexing=None, return_code=False): """Create HTTP request Arguments: :param indexing: (str) Whether there need to be added a X-YC-Signature. This is because of the verification with indexing script
import argparse import base64 import errno import fnmatch import json import os import shutil import subprocess import unicodedata import urllib2 import tools.utility.log as log LOGGER = log.get_logger('populate') def find_files(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename) yield filename # Unicode to string def unicode_normalize(variable): return unicodedata.normalize('NFKD', variable).encode('ascii', 'ignore') # Make a http request on path with json_data def http_request(path, method, json_data, credentials):
import json import tools.utility.log as lo import requests LOGGER = lo.get_logger('missing-element') URL = 'http://localhost:8009/api/config/catalog/modules' HEADERS = 'application/vnd.yang.data+json' API_URL = 'http://localhost:5000/search/name/' DELETE_URL = 'http://yangcatalog.org:8008/api/config/catalog/vendors/vendor/{}/platforms/platform/{}/software-versions/software-version/{}/software-flavors/software-flavor/{}/modules/module/{},{},{}' def schema_does_not_exist(): org = mod['organization'] if org == 'independent': if mod.get('schema') is None: LOGGER.info('removing {} {}'.format(mod['name'], mod['revision'])) if mod.get('implementations'): imp = mod['implementations']['implementation'] for im in imp: r = requests.delete(DELETE_URL.format( im['vendor'], im['platform'], im['software-version'], im['software-flavor'], mod['name'], mod['revision'], 'missing element'), auth=('admin', 'admin'), headers={ 'Accept': HEADERS, 'Content-type': HEADERS }) pass r = requests.delete('{}/module/{},{},{}'.format( URL, mod['name'], mod['revision'], 'independent'),
import json import subprocess from datetime import datetime import requests from tools.utility import log from tools.utility.util import get_curr_dir, find_first_file LOGGER = log.get_logger('') class ModulesComplicatedAlgorithms: def __init__(self, yangcatalog_api_prefix, credentials, protocol, ip, port, save_file_dir, direc, all_modules): if all_modules is None: with open('../parseAndPopulate/' + direc + '/prepare.json', 'r') as f: self.__all_modules = json.load(f) else: self.__all_modules = all_modules self.__yangcatalog_api_prefix = yangcatalog_api_prefix self.__new_modules = [] self.__credentials = credentials self.__protocol = protocol self.__ip = ip self.__port = port self.__save_file_dir = save_file_dir self.__path = None self.__prefix = '{}://{}:{}'.format(protocol, ip, port)
import errno import fnmatch import json import os import shutil import subprocess import unicodedata from datetime import datetime import requests import tools.utility.log as log from tools.api.receiver import send_to_indexing from tools.utility.util import get_curr_dir LOGGER = log.get_logger('populate') def find_files(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename) yield filename # Unicode to string def unicode_normalize(variable): return unicodedata.normalize('NFKD', variable).encode('ascii', 'ignore') if __name__ == "__main__":
import argparse import json import os import subprocess import tarfile import urllib import urllib2 from datetime import datetime from numpy.f2py.auxfuncs import throw_error import tools.utility.log as log from tools.utility import yangParser from tools.utility.util import get_curr_dir LOGGER = log.get_logger('draftPullLocal') def load_json_from_url(url): failed = True loaded_json = None tries = 10 while failed: try: response = urllib2.urlopen(url).read() loaded_json = json.loads(response) failed = False except: tries -= 1 if tries == 0: failed = False
import json import tools.utility.log as lo import requests LOGGER = lo.get_logger('missing-element') URL = 'http://localhost:8009/api/config/catalog/modules' HEADERS = 'application/vnd.yang.data+json' API_URL = 'http://localhost:5000/search/name/' DELETE_URL = 'http://yangcatalog.org:8008/api/config/catalog/vendors/vendor/{}/platforms/platform/{}/software-versions/software-version/{}/software-flavors/software-flavor/{}/modules/module/{},{},{}' def schema_does_not_exist(): org = mod['organization'] if org == 'independent': if mod.get('schema') is None: LOGGER.info('removing {} {}'.format(mod['name'], mod['revision'])) if mod.get('implementations'): imp = mod['implementations']['implementation'] for im in imp: r = requests.delete( DELETE_URL.format(im['vendor'], im['platform'], im['software-version'], im['software-flavor'], mod['name'], mod['revision'], 'missing element'), auth=('admin', 'admin'), headers={'Accept': HEADERS, 'Content-type': HEADERS}) pass r = requests.delete('{}/module/{},{},{}'.format(URL, mod['name'], mod['revision'],
import ConfigParser import os import smtplib import sys from email.mime.text import MIMEText from ciscosparkapi import CiscoSparkAPI import tools.utility.log as lo LOGGER = lo.get_logger('Messaging', '/home/miroslav/log/messaging/yang.log') GREETINGS = 'Hello from yang-catalog' class MessageFactory: def __init__(self, config_path='../utility/config.ini'): def list_matching_rooms(a, title_match): return [r for r in a.rooms.list() if title_match in r.title] LOGGER.info('Initialising Message') config_path = os.path.abspath('.') + '/' + config_path config = ConfigParser.ConfigParser() config.read(config_path) token = config.get('Message-Section', 'access-token') self.__api = CiscoSparkAPI(access_token=token) rooms = list_matching_rooms(self.__api, 'YANG Catalog admin') if len(rooms) == 0: LOGGER.error('Need at least one room') sys.exit(1) if len(rooms) != 1:
import argparse import json import requests import tools.utility.log as log LOGGER = log.get_logger('ResolveExpiration') def __resolve_expiration(reference, module, args): update = False if reference is not None and 'datatracker.ietf.org' in reference: ref = reference.split('/')[-1] url = ('https://datatracker.ietf.org/api/v1/doc/document/' + ref + '/?format=json') response = requests.get(url) if response.status_code == 200: data = json.loads(response.content) if '/api/v1/doc/state/2/' in data['states']: expired = module.get('expired') if expired is None or not expired: update = True module['expired'] = True if module.get('expires') is not None: if module['expires'] != data['expires']: update = True module['expires'] = data['expires'] else: expired = module.get('expired') if expired is None or expired:
import unicodedata import urllib2 from urllib2 import URLError import MySQLdb import requests import sys from flask import Flask, jsonify, abort, make_response, request, Response from flask_httpauth import HTTPBasicAuth import repoutil import tools.utility.log as lo from tools.api.sender import Sender from tools.parseAndPopulate import yangParser LOGGER = lo.get_logger('api') url = 'https://github.com/' github_api_url = 'https://api.github.com' github_repos_url = github_api_url + '/repos' yang_models_url = github_repos_url + '/YangModels/yang' auth = HTTPBasicAuth() app = Flask(__name__) NS_MAP = { "http://cisco.com/ns/yang/": "cisco", "http://www.huawei.com/netconf": "huawei", "http://openconfig.net/yang/": "openconfig", "http://tail-f.com/": "tail-f" }
import argparse import math import os import shutil import time import datetime import tools.utility.log as lo from tools.utility import messageFactory LOGGER = lo.get_logger('removeUnused') if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--remove-dir', type=str, default='.', help='Set path to config file') parser.add_argument('--remove-dir2', type=str, default='.', help='Set path to yangsuite users') parser.add_argument('--remove-dir3', type=str, default='/home/miroslav/yangsuite-users/', help='Set path to yangsuite saved users') parser.add_argument('--logs-path', type=str, default='.', help='Set path to config file')
import json import tools.utility.log as log LOGGER = log.get_logger(__name__) class Prepare: def __init__(self, file_name): self.file_name = file_name self.name_revision = set() self.conformance_type = {} self.namespace = {} self.implementations = {} self.reference = {} self.prefix = {} self.yang_version = {} self.organization = {} self.description = {} self.contact = {} self.compilation_status = {} self.author_email = {} self.schema = {} self.feature = {} self.maturity_level = {} self.compilation_result = {} self.deviations = {} self.json_submodules = {} self.module_or_submodule = {} self.document_name = {} self.generated_from = {} self.working_group = {}
import ConfigParser import argparse import json import os import requests import tools.utility.log as log from tools.utility import repoutil, yangParser LOGGER = log.get_logger('openconfigPullLocal', '/home/miroslav/log/jobs/yang.log') def resolve_revision(yang_file): try: parsed_yang = yangParser.parse(os.path.abspath(yang_file)) revision = parsed_yang.search('revision')[0].arg except: revision = '1970-01-01' return revision if __name__ == "__main__": LOGGER.info('Starting Cron job openconfig pull request local') parser = argparse.ArgumentParser() parser.add_argument('--config-path', type=str, default='../utility/config.ini', help='Set path to config file') args = parser.parse_args()
import datetime import errno import json import os import shutil import subprocess import sys import urllib2 from Crypto.Hash import SHA, HMAC from urllib2 import URLError import pika import tools.utility.log as log LOGGER = log.get_logger('receiver') # Make a http request on path with json_data def http_request(path, method, json_data, http_credentials, header, indexing=None): """Create HTTP request Arguments: :param indexing: (str) Whether there need to be added a X-YC-Signature. This is because of the verification with indexing script :param header: (str) to set Content-type and Accept headers to this variable :param http_credentials: (list) Basic authorization credentials - username, password
import fnmatch import os from tools.utility import yangParser import tools.utility.log as log from tools.utility.util import get_curr_dir LOGGER = log.get_logger('dependencies') def search_dependencies(base): for root, dirs, files in os.walk(base): for basename in files: if fnmatch.fnmatch(basename, '*.yang'): filename = os.path.join(root, basename) LOGGER.info('Parsing file {}'.format(filename)) try: yangFile = yangParser.parse(os.path.abspath(filename)) try: revision = yangFile.search('revision')[0].arg except: revision = '1970-01-01' name = filename.split('/')[-1].split('.')[0].split('@')[0] key = '{}@{}.yang'.format(name, revision) if key not in dependencies: dependencies[key] = set() yangImport = yangFile.search('import') yangInclude = yangFile.search('include') for imp in yangImport: impName = imp.arg impRev = None
from __future__ import print_function import ConfigParser import argparse import fnmatch import os import time import capability as cap import prepare import statistics import tools.utility.log as log from tools.utility.util import get_curr_dir LOGGER = log.get_logger('runCapabilities') def find_missing_hello(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): if not any(".xml" in name for name in files): yield root def find_files(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename) yield filename
import datetime import uuid import pika import tools.utility.log as log LOGGER = log.get_logger(__name__, '/home/miroslav/log/api/yang.log') class Sender: def __init__(self): LOGGER.debug('Initializing sender') self.__response_type = [ 'Failed', 'In progress', 'Finished successfully', 'does not exist' ] self.connection = pika.BlockingConnection( pika.ConnectionParameters('127.0.0.1')) self.channel = self.connection.channel() self.channel.queue_declare(queue='module_queue') self.__response_file = 'correlation_ids' def get_response(self, correlation_id): """Get response according to job_id. It can be either 'Failed', 'In progress', 'Finished successfully' or 'does not exist' Arguments: :param correlation_id: (str) job_id searched between responses :return one of the following - 'Failed', 'In progress',
import json import os import shutil import subprocess import urllib2 import requests from urllib2 import URLError import jinja2 import time import tools.utility.log as log from tools.utility import yangParser from tools.utility.util import get_curr_dir LOGGER = log.get_logger('statistics') NS_MAP = { "http://cisco.com/ns/yang/": "cisco", "http://www.huawei.com/netconf": "huawei", "http://openconfig.net/yang/": "openconfig", "http://tail-f.com/": "tail-f" } MISSING_ELEMENT = 'missing%20element' def find_first_file(directory, pattern, pattern_with_revision): """Search for yang file on path Arguments: :param directory: (str) directory which should be search recursively for specified file.
import errno import fnmatch import json import os import shutil import subprocess import threading import unicodedata import requests import tools.utility.log as log from tools.api.receiver import send_to_indexing from tools.parseAndPopulate.modulesComplicatedAlgorithms import ModulesComplicatedAlgorithms LOGGER = log.get_logger('populate', '/home/miroslav/log/populate/yang.log') def run_complicated_algorithms(): complicatedAlgorithms = ModulesComplicatedAlgorithms(yangcatalog_api_prefix, args.credentials, args.protocol, args.ip, args.port, args.save_file_dir, direc, None) complicatedAlgorithms.parse() complicatedAlgorithms.populate() def find_files(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename)
import MySQLdb import MySQLdb.cursors import json import re import tools.utility.log as lo LOGGER = lo.get_logger('sql') # DBF = '/var/yang/yang.db' conn = None # you must create a Cursor object. It will let # you execute all the queries you need cur = None # Use all the SQL you like # cur.execute("SELECT * FROM YOUR_TABLE_NAME") # print all the first cell of all the rows # for row in cur.fetchall(): # print(row[0]) # db.close() def __mysql_regexp(pattern, buf, modifiers=re.I | re.S): if pattern is not None and buf is not None: exp = re.compile(pattern, modifiers) return exp.search(buf) is not None
from __future__ import print_function import argparse import fnmatch import json import tools.utility.log as log import os import shutil import time import capability as cap import prepare import statistics import statisticsInCatalog LOGGER = log.get_logger('runCapabilities') def find_missing_hello(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): if not any(".xml" in name for name in files): yield root def find_files(directory, pattern): for root, dirs, files in os.walk(directory): for basename in files: if fnmatch.fnmatch(basename, pattern): filename = os.path.join(root, basename)
import ConfigParser import os import smtplib import sys from email.mime.text import MIMEText from ciscosparkapi import CiscoSparkAPI import tools.utility.log as lo LOGGER = lo.get_logger('Messaging') GREETINGS = 'Hello from yang-catalog' class MessageFactory: def __init__(self, config_path='../utility/config.ini'): def list_matching_rooms(a, title_match): return [r for r in a.rooms.list() if title_match in r.title] LOGGER.info('Initialising Message') config_path = os.path.abspath('.') + '/' + config_path config = ConfigParser.ConfigParser() config.read(config_path) token = config.get('Message-Section', 'access-token') self.__api = CiscoSparkAPI(access_token=token) rooms = list_matching_rooms(self.__api, 'YANG Catalog admin') if len(rooms) == 0: LOGGER.error('Need at least one room') sys.exit(1) if len(rooms) != 1:
# searching for file based on pattern or pattern_with_revision import fnmatch import json import os import urllib2 from numpy.f2py.auxfuncs import throw_error import tools.utility.log as lo from tools.utility import yangParser LOGGER = lo.get_logger('util') def get_curr_dir(f): LOGGER.debug('{}'.format(os.getcwd())) return os.getcwd() def resolve_results(url): failed = True html = None tries = 10 results = [] while failed: try: html = urllib2.urlopen(url).read() failed = False except: tries -= 1 if tries == 0:
import ConfigParser import argparse import json import os import requests import tools.utility.log as log from tools.utility import repoutil, yangParser LOGGER = log.get_logger('openconfigPullLocal') def resolve_revision(yang_file): try: parsed_yang = yangParser.parse(os.path.abspath(yang_file)) revision = parsed_yang.search('revision')[0].arg except: revision = '1970-01-01' return revision if __name__ == "__main__": LOGGER.info('Starting Cron job openconfig pull request local') parser = argparse.ArgumentParser() parser.add_argument('--config-path', type=str, default='../utility/config.ini', help='Set path to config file') args = parser.parse_args() config_path = os.path.abspath('.') + '/' + args.config_path
import sys import tarfile import urllib import urllib2 import requests from numpy.f2py.auxfuncs import throw_error from travispy import TravisPy from travispy.errors import TravisError import tools.utility.log as log from tools.ietfYangDraftPull.draftPullLocal import check_name_no_revision_exist, \ check_early_revisions from tools.utility import repoutil, messageFactory LOGGER = log.get_logger('draftPull', '/home/miroslav/log/jobs/yang.log') def load_json_from_url(url): failed = True loaded_json = None tries = 10 while failed: try: response = urllib2.urlopen(url).read() loaded_json = json.loads(response) failed = False except: tries -= 1 if tries == 0: failed = False
import fnmatch import os from tools.utility import yangParser import tools.utility.log as log LOGGER = log.get_logger('dependencies') def search_dependencies(base): for root, dirs, files in os.walk(base): for basename in files: if fnmatch.fnmatch(basename, '*.yang'): filename = os.path.join(root, basename) LOGGER.info('Parsing file {}'.format(filename)) try: yangFile = yangParser.parse(os.path.abspath(filename)) try: revision = yangFile.search('revision')[0].arg except: revision = '1970-01-01' name = filename.split('/')[-1].split('.')[0].split('@')[0] key = '{}@{}.yang'.format(name, revision) if key not in dependencies: dependencies[key] = set() yangImport = yangFile.search('import') yangInclude = yangFile.search('include') for imp in yangImport: impName = imp.arg impRev = None for sub in imp.substmts:
import fnmatch import os from tools.utility import yangParser import tools.utility.log as log from tools.utility.util import get_curr_dir LOGGER = log.get_logger('dependencies', '/home/miroslav/log/populate/yang.log') def search_dependencies(base): for root, dirs, files in os.walk(base): for basename in files: if fnmatch.fnmatch(basename, '*.yang'): filename = os.path.join(root, basename) LOGGER.info('Parsing file {}'.format(filename)) try: yangFile = yangParser.parse(os.path.abspath(filename)) try: revision = yangFile.search('revision')[0].arg except: revision = '1970-01-01' name = filename.split('/')[-1].split('.')[0].split('@')[0] key = '{}@{}.yang'.format(name, revision) if key not in dependencies: dependencies[key] = set() yangImport = yangFile.search('import') yangInclude = yangFile.search('include') for imp in yangImport: impName = imp.arg impRev = None
import fnmatch import json import os import shutil import subprocess import urllib2 import requests from urllib2 import URLError import jinja2 import time import tools.utility.log as log from tools.utility import yangParser, repoutil LOGGER = log.get_logger('statistics', '/home/miroslav/log/jobs/yang.log') NS_MAP = { "http://cisco.com/ns/yang/": "cisco", "http://www.huawei.com/netconf": "huawei", "http://openconfig.net/yang/": "openconfig", "http://tail-f.com/": "tail-f" } MISSING_ELEMENT = 'missing%20element' def find_first_file(directory, pattern, pattern_with_revision): """Search for yang file on path Arguments: :param directory: (str) directory which should be search recursively for specified file.