Пример #1
0
def importprojectdir(dir_project, file_type):
    files = []
    for file_name in glob.glob(os.path.join(dir_project, '*.' + file_type)):
        files.append(Util().openfile(file_name))
    for file_name in glob.glob(os.path.join(dir_project, '*.' + 'json')):
        files.append(Util().openfile(file_name))

    return importprojectfile(files)
Пример #2
0
 def load(self):
     print('load 1')
     filelist = Util().getFileList(self.folder,".dep")
     for fn in filelist:
         #print('fn',fn, self.getOriginalName(fn))
         if Util().file_exists(self.folder, self.getOriginalName(fn)):
             print('found skip ', fn)
         else:
             #print('not found remove from git')
             self.append(fn)
     return self
Пример #3
0
    def get_add_element(self, request):

        result = False

        group_id = request.POST.get('group_id')
        element_id = request.POST.get('element_id')
        element_type = request.POST.get('element_type')
        current_data = json.loads(self.data_project)
        tosca_nfv_definition = Util().loadyamlfile(PATH_TO_TOSCA_NFV_DEFINITION)
        node_types = {}
        node_types.update(tosca_nfv_definition['node_types'])
        new_element = {}
        new_element['type'] = element_type
        type_definition = node_types[element_type]
        while element_type in node_types:
            type_definition = node_types[element_type]
            if 'properties' in type_definition:
                for propriety in type_definition['properties']:
                    if 'required' not in type_definition['properties'][propriety] or \
                            type_definition['properties'][propriety]['required']:
                        if 'properties' not in new_element:
                            new_element['properties'] = {}
                        if propriety == 'version':
                            new_element['properties'][propriety] = 1.0
                        else:
                            new_element['properties'][propriety] = 'prova'
            element_type = type_definition['derived_from'] if 'derived_from' in type_definition else None
        if new_element['type'] == 'tosca.nodes.nfv.VNF':
            if 'imports' not in current_data['toscayaml'][group_id] or current_data['toscayaml'][group_id][
                'imports'] is None:
                current_data['toscayaml'][group_id]['imports'] = []
            current_data['toscayaml'][group_id]['imports'].append(element_id + '.yaml')
            vnf_template = Util().loadyamlfile(PATH_TO_DESCRIPTORS_TEMPLATES + 'vnf.yaml')
            vnf_template['topology_template']['subsititution_mappings'] = 'tosca.nodes.nfv.VNF.' + element_id
            vnf_template['topology_template']['node_templates'] = {}
            vnf_template['imports'] = []
            vnf_template['node_types']['tosca.nodes.nfv.VNF.' + element_id] = {}
            vnf_template['node_types']['tosca.nodes.nfv.VNF.' + element_id]['derived_from'] = 'tosca.nodes.nfv.VNF'
            current_data['toscayaml'][element_id] = vnf_template
        if 'node_templates' not in current_data['toscayaml'][group_id]['topology_template'] or current_data['toscayaml'][group_id]['topology_template']['node_templates'] is None:
            current_data['toscayaml'][group_id]['topology_template']['node_templates'] = {}
        current_data['toscayaml'][group_id]['topology_template']['node_templates'][element_id] = new_element

        self.data_project = current_data
        # self.validated = validate #TODO(stefano) not clear if this is the validation for the whole project
        self.update()
        result = True
        return result
Пример #4
0
    def __requestAuthToken(self):
        util = Util()
        config = util.getPushConfig()
        if config is None:
            return None

        requestTime = str(int(round(time.time() * 1000)))
        sign = config['AppKey'] + requestTime + config['MasterSecret']
        sh = hashlib.sha256()
        sh.update(sign.encode("utf8"))
        payload = {
            "sign": sh.hexdigest(),
            "timestamp": requestTime,
            "appkey": config['AppKey']
        }

        try:
            url = "https://restapi.getui.com/v1/" + config[
                'AppId'] + "/auth_sign"
            ret = util.httpsPost(url, params=payload)
            if ret['result'] == 'ok':
                self.__setAuthToken(ret)
            return ret['auth_token']
        except Exception as err:
            print(err)
            return None
Пример #5
0
def main():
    opt = parse_cmdline()
    flist = os.listdir(opt.yaml_dir)

    ts = TreeShape(fanout=opt.fanout, width=900, height=900)
    u = Util()
    i = 0
    for f in sorted(flist):
        ybuf = u.file_to_yaml(f"{opt.yaml_dir}/{f}")
        d = ts.process_yaml(ybuf)
        if d != None:
            ts.add_tree_data(d)
        i += 1

    p, slider = ts.plot_graph(title="cN Tree Shape")
    if p == None:
        print("Empty List")
        return -1

    if slider:
        composite = [[p], [slider]]
    else:
        composite = [[p]]

    output_file(opt.html_file)
    plot = layout(composite)
    save(plot)
    print(f"Plot: {opt.html_file}")
Пример #6
0
 def setUp(self):
     super(Volume, self).setUp()
     self.primary_node = self.input["node"]
     self.util = Util(self.primary_node)
     self.data_load_flag = False
     self.number_of_cycles = 300  # ToDO: scale it up later
     self.number_of_kv_pairs = 100000  # ToDO: scale it up later
Пример #7
0
 def import_kubernetes_from_dir_project(cls, dir_project):
     result = {}
     for k8s_filename in glob.glob(
             os.path.join(dir_project, 'K8S', '*.yaml')):
         log.info(k8s_filename)
         yaml_object = Util().loadyamlfile(k8s_filename)
         json_object = Util.json_loads_byteified(
             Util.yaml2json(yaml_object))
         filename = os.path.splitext(os.path.basename(str(k8s_filename)))[0]
         result[filename] = json_object
     return result
Пример #8
0
    def get_add_element(self, request):
        result = False

        group_id = request.POST.get('group_id')
        element_id = request.POST.get('element_id')
        element_type = request.POST.get('element_type')
        x = request.POST.get('x')
        y = request.POST.get('y')
        current_data = json.loads(self.data_project)
        tosca_definition = Util().loadyamlfile(PATH_TO_TOSCA_DEFINITION)
        node_types = {}
        node_types.update(tosca_definition['node_types'])
        new_element = {}
        new_element['type'] = element_type
        type_definition = node_types[element_type]
        while element_type in node_types:
            type_definition = node_types[element_type]
            if 'properties' in type_definition:
                for propriety in type_definition['properties']:
                    if 'required' not in type_definition['properties'][
                            propriety] or type_definition['properties'][
                                propriety]['required']:
                        if 'properties' not in new_element:
                            new_element['properties'] = {}
                        if propriety == 'version':
                            new_element['properties'][propriety] = 1.0
                        else:
                            if type_definition['properties'][propriety][
                                    'type'] == 'scalar-unit.size':
                                new_element['properties'][propriety] = '1 MB'
                            else:
                                new_element['properties'][propriety] = 'prova'
            element_type = type_definition[
                'derived_from'] if 'derived_from' in type_definition else None
        if 'node_templates' not in current_data['toscayaml'][group_id][
                'topology_template'] or current_data['toscayaml'][group_id][
                    'topology_template']['node_templates'] is None:
            current_data['toscayaml'][group_id]['topology_template'][
                'node_templates'] = {}
        current_data['toscayaml'][group_id]['topology_template'][
            'node_templates'][element_id] = new_element
        if 'positions' not in current_data:
            current_data['positions'] = {}
        if 'vertices' not in current_data['positions']:
            current_data['positions']['vertices'] = {}
        if element_id not in current_data['positions']['vertices']:
            current_data['positions']['vertices'][element_id] = {}
        current_data['positions']['vertices'][element_id]['x'] = x
        current_data['positions']['vertices'][element_id]['y'] = y
        self.data_project = current_data
        # self.validated = validate #TODO(stefano) not clear if this is the validation for the whole project
        self.update()
        result = True
        return result
Пример #9
0
    def get_descriptor_template(cls, type_descriptor):
        """Returns a descriptor template for a given descriptor type"""

        try:
            # schema = Util.loadjsonfile(PATH_TO_DESCRIPTORS_TEMPLATES+type_descriptor+DESCRIPTOR_TEMPLATE_SUFFIX)
            # print 'type_descriptor : '+type_descriptor
            # FixMe bisogna creare un template
            yaml_object = Util().loadyamlfile(
                'toscaparser/extensions/nfv/tests/data/tosca_helloworld_nfv.yaml')
            toscajson = json.loads(Util.yaml2json(yaml_object))
            return toscajson
        except Exception as e:
            # log.error('Exception in get descriptor template') #TODO(stefano) add logging
            print 'Exception in get descriptor template'
            return False
Пример #10
0
    def __requestAccessToken(self):
        util = Util()
        config = util.getConfigByAgentId(self.agentId)
        if config is None:
            print('No configuration for '+ str(self.agentId) +' was found!')
            return None

        requestTime = str(int(time.time()))
        try:
            res = util.httpsGet('https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=' + config['CorpId'] + '&corpsecret=' + config['Secret'])
            self.__setAccessToken({'expires_in': res['expires_in'], 'access_token': res['access_token'], 'request_time': requestTime})
            return res['access_token']
        except Exception as err:
            print(err)
            return None
Пример #11
0
    def get_descriptor_template(cls, type_descriptor):
        """Returns a descriptor template for a given descriptor type"""

        try:
            #schema = Util.loadjsonfile(PATH_TO_DESCRIPTORS_TEMPLATES+type_descriptor+DESCRIPTOR_TEMPLATE_SUFFIX)
            # print 'type_descriptor : '+type_descriptor
            #FixMe bisogna creare un template
            yaml_object = Util().loadyamlfile(
                'usecases/TOSCA/One-Server-Three-Networks/YAML/tosca_one_server_three_networks.yaml'
            )
            toscajson = json.loads(Util.yaml2json(yaml_object))
            return toscajson
        except Exception as e:
            # log.error('Exception in get descriptor template') #TODO(stefano) add logging
            print 'Exception in get descriptor template'
            return False
Пример #12
0
    def importprojectfiles(cls, file_dict):
        """Imports descriptors (extracted from the new project POST)

        The keys in the dictionary are the file types
        """
        project = {'nsd': {}, 'vnfd': {}, 'click': {}, 'k8s': {}}
        for desc_type in project:
            if desc_type in file_dict:
                files_desc_type = file_dict[desc_type]
                for file in files_desc_type:
                    if desc_type != 'k8s':
                        project[desc_type][os.path.splitext(
                            file.name)[0]] = json.loads(file.read())
                    else:
                        yaml_object = Util().loadyamlfile(file)
                        json_object = Util.json_loads_byteified(
                            Util.yaml2json(yaml_object))
                        filename = os.path.splitext(os.path.basename(
                            str(file)))[0]
                        project[desc_type][filename] = json_object

        return project
Пример #13
0
def make_plots(arg_dict):
    u = Util()

    vlen = arg_dict["vlen"]
    nkey = arg_dict["nkey"]
    tree_path = arg_dict["tree_path"]
    crossover_path = arg_dict["crossover_path"]

    title = "vlen: {} nkey: {} size: {}".format(vlen, u.humanize(nkey),
                                                u.humanize(nkey * vlen))
    heading = Div(
        text=f"""
    <hr style="width:100%">
    <h2>{title}</h2>
    """,
        width=1000,
    )

    ts = TreeShape(fanout=16, width=500, height=500)
    c = Crossover(width=1500, height=500)

    ybuf = u.file_to_yaml(tree_path)
    d = ts.process_yaml(ybuf)
    ts.add_tree_data(d)
    tot_kvsets, c_kvsets_avg, g_kvsets_avg = ts.avg_nkvset(ybuf, pivot=2)
    tree_plot, slider = ts.plot_graph(
        title="nkvsets Get: {} Cursor: {} Total: {}".format(
            g_kvsets_avg, c_kvsets_avg, tot_kvsets))

    df = c.df_for_nkeys(nkey_dir=crossover_path,
                        c_avg_nkvset=c_kvsets_avg,
                        g_avg_nkvset=g_kvsets_avg)
    crossover_plot = c.plot(df=df)

    # Output
    arg_dict["plots"] = column(heading, row(tree_plot, crossover_plot))
Пример #14
0
    def create_teams(self, client, message):
        util = Util(client)
        vc = util.GetAuthorVChannel(message)

        if vc is None:
            return "error:投稿者はボイスチャンネルに接続していません"

        member_names = [a.display_name for a in vc.members]
        member_names = self.modify_user_list(member_names, message)
        randomized_names = random.sample(member_names, len(member_names))

        team_num = self.set_team_num(message)
        team_size = self.set_team_size(message)

        if "-lol" in message.content:
            if len(randomized_names) < 10:
                return "error: 人数が10人未満です。"
            teams = self.make_lol_team(randomized_names)
            mes = self.lol_result(teams)
        else:
            teams = self.split_list(randomized_names, team_num)
            mes = self.normal_result(teams)

        return self.create_team_headder(team_num, team_size) + mes
Пример #15
0
 def setUp(self):
     super(SimpleTests, self).setUp()
     self.primary_node = self.input["node"]
     self.util = Util(self.primary_node)
Пример #16
0
                        db='one_db').setup()
print('gitDev', gitDev)
#system_folder='~/LyttleBit/projects/lb-_documents'

#in_folder = '../one_db/sql'
#out_folder = '../'
in_folder = gitDev.getDbFolder('sql')
#in_folder = '{}/{}'.format(system_folder,project_name)
#out_folder = '{}/{}'.format(system_folder,project_name)
out_folder = gitDev.getDbFolder()
#print('in_folder', in_folder)
#print('out_folder', out_folder)

outname = 'README.sql.md'
## READ: Make list of file in a folder
files = Util().getFileList(in_folder, ext='sql')
files.sort()

#print('files: ', files)
## PROCESS: Read all files

readme = []
#print('# Changes')
readme.append('\n')
readme.append('# Changes')
readme.append('\n')
readme.append('| file | type | detail |')
readme.append('| ---- | ------- | ------ |')

for f in files:
Пример #17
0
from random import randint
from threading import Timer
from lib.util import Util
from lib.wallet import Wallet
from lib.transaction import Transaction

utils = Util()


class Node():
    def __init__(self, seed, *args):
        self.seed = seed
        self.wallet = Wallet(utils.hash(str(self.seed)))
        self.neighbours = set()
        self.DAG = {}

        root_transaction = Transaction('0000', 'root', 25000000000)
        self.attach_transaction(root_transaction, ['root'])

    def proof_of_work(self, str_input):
        proof = 0
        while self.is_valid_proof(str_input, proof) is False:
            proof += 1
        return proof

    def is_valid_proof(self, str_input, proof):
        result = utils.hash(str_input + str(proof))
        return result.count("00") >= 2 and result[-4:] == "0000"

    def select_tips(self):
        available_transactions = []
Пример #18
0
def main():
    combined_sql_list = []  # eventually is written to a file
    # [* Generate Heroku Scripts]
    print('Generate Heroku Scripts')
    print('  - Scripts to load, restart, and drop heroku applicatons')

    # [* All configuration files are stored in /config folder]
    config_folder = '{}'.format(os.getcwd().replace('_tasks', 'config'))

    # [1. Select APP Source ]
    sourceConfiguration = open_api(config_folder, file_type="source")
    if not sourceConfiguration:
        print('cancel')
        exit(0)

    # [2. Select APP Target ]
    targetConfiguration = open_api(config_folder, file_type="target")
    if not targetConfiguration:
        print('cancel')
        exit(0)

    # [* Merge Source and Target]
    sourceConfiguration.update(targetConfiguration)
    apiConfiguration = sourceConfiguration

    # setup default environment
    homeDev = HomeDevelopment().setup()

    # [* Create missing folders]
    sourceDev = HomeDevelopment().setup()
    targetDev = HomeDevelopment().setup()

    # [* Scan configuration for home, source, and target environment configurations]
    for apiName in apiConfiguration:
        if apiName == 'source':
            # [* Find Source configuration]
            sourceDev = get_environment(apiConfiguration[apiName])

        elif apiName == 'target':
            # [* Find Target configuration]
            targetDev = get_environment(apiConfiguration[apiName])

    pprint(targetDev)
    #exit(0)

    print('=================')
    report(homeDev, sourceDev, targetDev)
    print('=================')
    ##########
    # [## Heroku Scripts]
    # [* generate a deployment script]
    # [* generate a destroy script]
    # [* generate a restart script]
    #####
    # configuration eg {"lb-a": {"heroku": {"operations":"C"}}}
    apiNameList = [nm for nm in apiConfiguration
                        if 'heroku' in  apiConfiguration[nm]
                            and 'operations' in apiConfiguration[nm]['heroku']
                            and 'C' in apiConfiguration[nm]['heroku']['operations']]

    #apiNameList = [nm for nm in apiConfiguration
    #                    if 'operations' in  apiConfiguration[nm]['heroku']
    #                        and 'C' in apiConfiguration[nm]['heroku']['operations'] ]

    #apiNameList = [nm for nm in apiConfiguration if 'deploy' in  apiConfiguration and apiConfiguration[nm]['deploy']]

    print('apiNameList',apiNameList)
    for appName in apiNameList:
        print('=====',appName,'=====')
        #pprint(apiConfiguration.flatten())
        pprint(apiConfiguration.flatten(appName, parent_key='app'))

        # [* Deployment script]
        # [ * Dont overwrite scripts when they exist]

        scriptFolder = targetDev.getFolder('sh-scripts')
        # [* Create Heroku deployment script]
        #scriptName = '{}.CREATE.sh'.format(appName)
        scriptName = 'heroku.CREATE.sh'

        overwrite = False
        if Util().file_exists(scriptFolder, scriptName):
            if Util().confirm('Overwrite {}'.format(scriptName), default='N') != 'N':
                overwrite = True
        if overwrite or not Util().file_exists(scriptFolder, scriptName):
            #print('====Flatten=====')
            tmpl = ScriptTemplate(appName,folder='../templates', filename='heroku.create.app.template') \
                .apply(apiConfiguration.flatten()) \
                .apply(apiConfiguration.flatten(appName, parent_key='app'))

            tmpl.saveAs(scriptFolder, scriptName)
            cmd = 'chmod 755 {}/{}'.format(scriptFolder, scriptName)
            returned_value = subprocess.call(cmd, shell=True)  # returns the exit code in unix
            print('returned value:', returned_value)
        overwrite = False

        # [* Create Heroku destroy script]
        #scriptName = '{}.DELETE.sh'.format(appName)
        scriptName = 'heroku.DELETE.sh'

        if Util().file_exists(scriptFolder, scriptName):
            if Util().confirm('Overwrite {}'.format(scriptName), default='N') != 'N':
                overwrite = True
        if overwrite or not Util().file_exists(scriptFolder, scriptName):
            #print('====Flatten=====')
            tmpl = ScriptTemplate(appName,folder='../templates', filename='heroku.delete.app.template') \
                .apply(apiConfiguration.flatten()) \
                .apply(apiConfiguration.flatten(appName, parent_key='app'))
            print('output to ', scriptFolder)
            tmpl.saveAs(scriptFolder, scriptName)
            cmd = 'chmod 755 {}/{}'.format(scriptFolder, scriptName)
            returned_value = subprocess.call(cmd, shell=True)  # returns the exit code in unix
            print('returned value:', returned_value)
        overwrite = False
Пример #19
0
import time
from bs4 import BeautifulSoup
from lib.dispatchUrl import DispatchUrl
from lib.util import Util

dispatchUrl = DispatchUrl()
caijiUtil = Util(host='http://www.cnedu.cn/')
beginUrls = [
    'http://www.cnedu.cn/examination/courses/page1.shtm',
]
# 获取所有的的列表页
pageList = []
# 所有的url集合
urlList = []
index = 0
failLvl2Urls = open('./data/failLvl2Urls.txt', 'a')
failLvl1Urls = open('./data/failLvl1Urls.txt', 'a')

for beginUrl in beginUrls:
    html = caijiUtil.getUrlContent(beginUrl)
    if not html:
        print("获取首页列表失败:: " + beginUrl)
        failLvl1Urls.write(beginUrl)
        continue
    #soup = BeautifulSoup(html,"html5lib")
    totoal = 1655
    #获取所有列表页
    #pageList.append(beginUrl)
    for i in range(796, totoal):
        url_ = beginUrl.replace('1.shtm', '%s.shtm' % (i))
        pageList.append(url_)
Пример #20
0
#coding=utf-8
import time
from bs4 import BeautifulSoup
from lib.dispatchUrl import DispatchUrl
from lib.util import  Util

dispatchUrl = DispatchUrl()
caijiUtil = Util(host='http://zhenti.kaoyan.eol.cn/')
beginUrls  =[
           'http://kaoyan.eol.cn/shiti/zhengzhi/index.shtml',
           'http://kaoyan.eol.cn/shiti/yingyu/index.shtml',
           'http://kaoyan.eol.cn/shiti/shuxue/index.shtml',
         ]
# 获取所有的的列表页
pageList = []
# 所有的url集合
urlList = []
index = 0
for beginUrl in beginUrls:

    html = caijiUtil.getUrlContent(beginUrl)
    if not html:
        print("获取首页列表失败:: "+beginUrl)
        continue

    soup = BeautifulSoup(html,"html5lib")
    pageList_ = soup.select('.page_left #pagenav')
    totoal = caijiUtil.getTotalPage(str(pageList_[0]))

    #获取所有列表页
    pageList.append(beginUrl)
Пример #21
0
def main():
    combined_sql_list = []  # eventually is written to a file
    # [Generate API sql file]
    print('Generate API')
    print('  - load api configuration, generate funcPattern key and values')
    # get configuration file name {folder: "", name: ""}
    # get list of files of type .json in folder ./config

    # [Use a configuration file]
    config_folder = '{}'.format(os.getcwd().replace('_tasks', 'config'))

    # [Select API Source ]
    sourceConfiguration = open_api(config_folder, file_type="source")
    if not sourceConfiguration:
        print('cancel')
        exit(0)

    # [Select API Target ]
    targetConfiguration = open_api(config_folder, file_type="target")
    if not targetConfiguration:
        print('cancel')
        exit(0)

    # [Merge Source and Target]
    sourceConfiguration.update(targetConfiguration)
    apiConfiguration = sourceConfiguration

    # setup default environment
    homeDev = HomeDevelopment().setup()

    # [Create missing folders]
    sourceDev = HomeDevelopment().setup()
    targetDev = HomeDevelopment().setup()

    # [Scan configuration for home, source, and target environment configurations]
    for apiName in apiConfiguration:
        if apiName == 'source':
            # [Configure input sources from GIT repositories]
            sourceDev = get_environment(apiConfiguration[apiName])

        elif apiName == 'target':
            # [Configure output targets from GIT repositories]
            targetDev = get_environment(apiConfiguration[apiName])

    print('=================')
    report(homeDev, sourceDev, targetDev)
    print('=================')

    ##############
    # [Process Postgres Extentions]
    ##############
    if targetDev.getFolder('db'):
        if not Util().folder_exists(sourceDev.getFolder('db')):

            # [Copy first set of _tasks code and config files]
            Util().copyFolder(sourceDev.getFolder('db'),
                              targetDev.getFolder('db'))

        else:
            if not Util().confirm(
                    '* Install/Overwrite postgres code and extentions?', 'N'):
                print("  Overwriting postgres configuration and extentions")
                print('  - source ', sourceDev.getFolder('db'))
                print('  - target ', targetDev.getFolder('db'))
                # [Copy all files in extention/db folder]
                for fn in Util().getFileList(sourceDev.getFolder('db')):
                    if Util().file_exists(targetDev.getFolder('db'), fn):
                        if not Util().confirm('  -- Overwrite {}?'.format(fn),
                                              'N'):
                            Util().copy(
                                '{}/{}'.format(sourceDev.getFolder('db'), fn),
                                '{}/{}'.format(targetDev.getFolder('db'), fn))
                    else:
                        print('  - copy', fn)
                        Util().copy(
                            '{}/{}'.format(sourceDev.getFolder('db'), fn),
                            '{}/{}'.format(targetDev.getFolder('db'), fn))

    #############
    # [## Initalize docker-component]
    #############
    # [Define list of words to replace in docker-compose file]
    replace_ = ['one_db', 'hapi-api']
    replace_with = [targetDev.getName('db'), targetDev.getName('db_api')]
    if not Util().file_exists(targetDev.getFolder('project'),
                              'docker-compose.test.yml'):
        # [Copy docker-compose.yml to target when it doesnt exist]
        #print('Create new docker-compose.yml')
        dcDoc = Document(sourceDev.getFolder('project'),
                         'docker-compose.yml').load()
        dcDoc.replace(replace_, replace_with)
        dcDoc.saveAs(targetDev.getFolder('project'), 'docker-compose.test.yml')

    else:
        # [Ask to overwrite when docker-compose exists]
        if not Util().confirm('* Overwrite docker-compose.test.yml?', 'N'):

            dcDoc = Document(sourceDev.getFolder('project'),
                             'docker-compose.yml').load()

            backup = Util().makeBackupFile(targetDev.getFolder('project'),
                                           'docker-compose.test.yml')

            print('backup', backup)
            dcDoc = Document(sourceDev.getFolder('project'),
                             'docker-compose.yml').load()

            dcDoc.replace(replace_, replace_with)
            dcDoc.saveAs(targetDev.getFolder('project'),
                         'docker-compose.test.yml')

    ##############
    # [## .env         # if .env doesnt exist then create one]
    #############
    srcDoc = EnvironmentDocument(sourceDev.getFolder('env'), '.env').load()
    srcDoc.replace(replace_, replace_with)
    if not Util().file_exists(targetDev.getFolder('env'), '.env'):
        # [Copy .env to target when it doesnt exist]
        print('Create .env')
        srcDoc.saveAs(targetDev.getFolder('env'), '.env')
    else:  # [if .env exists then update and add new variables]
        if not Util().confirm('* Update .env?', 'N'):
            # [Ask to update when .e nv exists]
            print('Update .env')
            trgtDoc = EnvironmentDocument(targetDev.getFolder('env'),
                                          '.env').load()
            #pprint(trgtDoc)
            #print('--------')
            trgtDoc.backup()
            trgtDoc.update(srcDoc)
            trgtDoc.save()
            #trgtDoc.saveAs(targetDev.getFolder('env'), '.env')
            #pprint(trgtDoc)

    #############
    # [## Process Static Scripts]
    #############
    # [Static scripts end with .static.sql]
    if targetDev.getFolder('db'):
        if not Util().confirm('* Install/Overwrite static scripts?', 'N'):
            print("writing static scripts")

            #############
            # [Process Static Db-Api]
            #############

            process_documents_to(apiConfiguration, sourceDev, targetDev,
                                 'db.sql', '00.db.sql')

            #############
            # [Process Static Database Scripts]
            #############
            process_documents_to(apiConfiguration, sourceDev, targetDev,
                                 'table.sql', '10.base.table.sql')

            #############
            # [Process Base Function Scripts]
            #############
            # process_to_one(apiConfiguration, sourceDev, targetDev, 'base.function.sql','12.base.function.sql')
            process_documents_to(apiConfiguration, sourceDev, targetDev,
                                 'base.function.sql', '20.base.function.sql')

            #############
            # [Process Api Function Scripts]
            #############
            # process_to_one(apiConfiguration, sourceDev, targetDev, 'api.function.sql','20.api.function.sql')
            # combine_documents(apiConfiguration, sourceDev, targetDev, 'api.function.sql', '30.api.function.sql')
            process_documents_to(apiConfiguration, sourceDev, targetDev,
                                 'api.usage.sql', '30.api.usage.sql')

            #############
            # [Process Static Test Scripts]
            #############
            #combine_documents(apiConfiguration, sourceDev, targetDev, 'base.test.sql', '90.base.test.sql')
            #combine_documents(apiConfiguration, sourceDev, targetDev, 'api.test.sql', '92.api.test.sql')

            #############
            # [Process Static Data Scripts]
            # retired the data scripts. data is now encpsulated in tests
            #############
            # process_to_one(apiConfiguration, sourceDev, targetDev, 'data.sql','80.data.sql')

            #############
            # [Process Static Cleaup Scripts]
            # retired the data scripts. data is now encpsulated in tests
            #############
            # process_to_one(apiConfiguration, sourceDev, targetDev, 'cleanup.sql','98.test.cleanup.sql')

    #############
    # [Process DbApi]
    #############

    #Util().copyFolder(sourceDev.getFolder('db_api'), targetDev.getFolder('db_api'),ignore=shutil.ignore_patterns('node_modules'))

    #
    #############
    # [## Process multiple API SQL Definitions]
    # skip api-static, _tasks, source and target
    #############
    # [Set target _tasks name]
    if targetDev.getFolder('db'):
        apiNameList = [
            nm for nm in apiConfiguration
            if apiConfiguration[nm]['kind'] == 'api-definition'
        ]

        for apiName in apiNameList:

            apiScriptFilename = '{}.{}.api.function.sql'.format(
                apiConfiguration[apiName]['prefix'],
                apiConfiguration[apiName]['name'].upper())

            # [Combine all API functions into one script]
            combined_sql_list = []
            combined_sql_list.append('-- api      : {}'.format(
                apiName.upper()))
            combined_sql_list.append('-- schema   : {}'.format(
                apiConfiguration[apiName]['schema']))
            combined_sql_list.append('-- generated on: {}'.format(
                datetime.now()))
            combined_sql_list.append('-- source project: {} '.format(
                sourceDev.getName('project')))

            #combined_sql_list.append('\c {}'.format(targetDev.getName('db')))
            #combined_sql_list.append('SET search_path TO {};'.format(', '.join(apiConfiguration['_tasks']['schema'])))

            # [Generate POST Function]
            combined_sql_list.append('-- POST')
            combined_sql_list.extend(PostTemplate(apiName, folder='../templates', filename='post.sql.template') \
                                     .validate(apiConfiguration)\
                                     .apply(apiConfiguration))

            # [Generate GET Function]
            combined_sql_list.append('-- GET')
            combined_sql_list.extend(GetTemplate(apiName, folder='../templates', filename='get.sql.template') \
                                     .validate(apiConfiguration) \
                                     .apply(apiConfiguration))

            # [Generate DELETE Function]
            combined_sql_list.append('-- DELETE')
            combined_sql_list.extend(DeleteTemplate(apiName, folder='../templates', filename='delete.sql.template') \
                                     .validate(apiConfiguration) \
                                     .apply(apiConfiguration))

            # [Generate PUT Function]
            combined_sql_list.append('-- PUT')
            combined_sql_list.extend(PutTemplate(apiName, folder='../templates', filename='put.sql.template') \
                                     .validate(apiConfiguration) \
                                     .apply(apiConfiguration))

            # [Assemble API (POST, GET, PUT, and Delete) Functions into single script]
            newDoc = Document(targetDev.getFolder('scripts'),
                              apiScriptFilename).load(combined_sql_list)

            # [Confirm overwrite of existing API files]
            if not Util().file_exists(targetDev.getFolder('scripts'),
                                      apiScriptFilename):
                print('    - Writing API {} script'.format(apiScriptFilename))
                # [Create Api when Api doesnt exist]
                newDoc.write()
            else:
                if not Util().confirm(
                        '* Overwrite API {} script?'.format(apiScriptFilename),
                        'N'):
                    # [Confirm the overwrite of existing Api script]
                    # [Backup Api script before overwriting]
                    backup = Util().makeBackupFile(
                        targetDev.getFolder('scripts'), apiScriptFilename)
                    print("    - Overwriting API {} script".format(
                        apiScriptFilename))
                    newDoc.write()

        apiStaticNameList = [
            nm for nm in apiConfiguration
            if apiConfiguration[nm]['kind'] == 'api-static'
        ]
        #############
        # [## Process static API scripts]
        # Static scripts are copied, combined and renamed to the target folder
        #############
    if targetDev.getFolder('db'):

        for apiName in apiStaticNameList:

            sourceFilename = '{}.{}.{}.api.function.sql'.format(
                '24', apiConfiguration[apiName]['schema'].replace('_', '.'),
                apiConfiguration[apiName]['name'])
            targetFilename = '{}.{}.api.function.sql'.format(
                apiConfiguration[apiName]['prefix'],
                apiConfiguration[apiName]['name'].upper())

            combined_sql_list = []
            combined_sql_list.append('-- api      : {}'.format(
                apiName.upper()))
            combined_sql_list.append('-- schema   : {}'.format(
                apiConfiguration[apiName]['schema']))
            combined_sql_list.append('-- copied on: {}'.format(datetime.now()))
            combined_sql_list.append('-- source project: {}'.format(
                sourceDev.getName('project')))

            combined_sql_list.append('\c {}'.format(targetDev.getName('db')))
            combined_sql_list.append('SET search_path TO {};'.format(', '.join(
                apiConfiguration['_tasks']['schema'])))

            # [Define list of words to replace in docker-compose file]
            replace_ = ['one_db', 'hapi-api']
            replace_with = [
                targetDev.getName('db'),
                targetDev.getName('db_api')
            ]

            if not Util().file_exists(targetDev.getFolder('scripts'),
                                      targetFilename):

                # [Copy docker-compose.yml to target when it doesnt exist]

                print('* Create new {}'.format(targetFilename))
                print('    -- load {}'.format(apiName))
                print('    -- swap out values ')
                print('    -- write api to target ')
                dcDoc = Document(sourceDev.getFolder('scripts'),
                                 sourceFilename).load()
                dcDoc.replace(replace_, replace_with)
                dcDoc.saveAs(targetDev.getFolder('scripts'), targetFilename)

            else:
                # [Ask to overwrite when docker-compose exists]

                if not Util().confirm('* Overwrite {}?'.format(targetFilename),
                                      'N'):
                    #print('* Overwrite api-static')
                    print('    -- load {}'.format(apiName))
                    print('    -- replace values ')
                    print('    -- save api to target ')
                    dcDoc = Document(sourceDev.getFolder('scripts'),
                                     sourceFilename).load()

                    backup = Util().makeBackupFile(
                        targetDev.getFolder('scripts'), targetFilename)

                    print('    -- backup', backup)
                    dcDoc = Document(sourceDev.getFolder('scripts'),
                                     sourceFilename).load()
                    dcDoc.replace(replace_, replace_with)
                    dcDoc.saveAs(targetDev.getFolder('scripts'),
                                 targetFilename)

    ###################
    # [Write API tests]
    ##################
    '''
Пример #22
0
from lib.util import Util
from bs4 import BeautifulSoup
import re
import time

#通用变量
totalPagePattern = re.compile(r'_PAGE_COUNT="(\d+)"')
totalPagePattern1 = re.compile(r'_(\d+)\.shtml')
articleFull = ''
caijiUtil = Util(rootPath='D:/www/data',
                 host='http://kaoyan.eol.cn',
                 downloadPath='/files/kaoyan1')


#从网址中获取采集到的数据 ,封装成字典数据返回
def caijiUrl(url):
    if not url:
        return None

    content = caijiUtil.getUrlContent(url)
    if not content:
        return None

    soup = BeautifulSoup(content, 'html5lib')

    article = soup.select('.TRS_Editor')
    article = article[0] if article else None

    title = str(soup.select('.page_title')[0].text)
    tags = soup.select('.n_left a')
    tag = ''
Пример #23
0
#coding=utf-8
from bs4 import BeautifulSoup
import re, time, os, sys

sys.path.append("..")

from lib.dispatchUrl import DispatchUrl
from dal.articleDal import insertArticle
from lib.util import Util

#通用变量
totalPagePattern = re.compile(r'_PAGE_COUNT="(\d+)"')
totalPagePattern1 = re.compile(r'_(\d+)\.shtml')
articleFull = ''
caijiUtil = Util(rootPath='D:/www/data',
                 host='http://blog.csdn.net',
                 downloadPath='/files/blog0',
                 defaultExt='png')


#从网址中获取采集到的数据 ,封装成字典数据返回
def caijiUrl(url):
    if not url:
        return None
    content = caijiUtil.getUrlContent(url)

    if not content:
        return None

    soup = BeautifulSoup(content, 'html5lib')

    article = soup.select('#article_content')
Пример #24
0
#coding=utf-8
import time, re
from bs4 import BeautifulSoup
from lib.dispatchUrl import DispatchUrl
from lib.util import Util

dispatchUrl = DispatchUrl('csdn')
caijiUtil = Util(host='http://blog.csdn.net/')

#博主列表的第一页
beginUrls = [
    'http://blog.csdn.net/wcyoot/article/list/1',
]

# 获取所有的的列表页
pageList = []
# 所有的url集合
urlList = []
index = 0
for beginUrl in beginUrls:
    html = caijiUtil.getUrlContent(beginUrl)
    if not html:
        print("获取首页列表失败:: " + beginUrl)
        continue

    soup = BeautifulSoup(html, "html5lib")
    pageList_ = soup.select('#papelist a')
    #print(pageList_[-1])
    #exit()
    if pageList_:
        totalPagePattern = re.compile(r'(\d+)$')
Пример #25
0
    def get_remove_link(self, request):
        result = False
        parameters = request.POST.dict()
        #link = json.loads(parameters['link'])
        #source = link['source']
        #destination = link['target']
        source_type = parameters['source_type']  # source['info']['type']
        destination_type = parameters[
            'target_type']  # destination['info']['type']
        source_id = parameters['source']
        destination_id = parameters['target']
        group = parameters['group']
        current_data = json.loads(self.data_project)
        tosca_definition = Util().loadyamlfile(PATH_TO_TOSCA_DEFINITION)
        removed = False
        if 'requirements' in tosca_definition['node_types'][source_type]:
            for req in tosca_definition['node_types'][source_type][
                    'requirements']:
                for key in req:
                    if req[key]['node'] == destination_type:
                        if 'requirements' not in \
                                current_data['toscayaml'][group]['topology_template']['node_templates'][
                                    source_id] or \
                                        current_data['toscayaml'][group]['topology_template']['node_templates'][
                                            source_id][
                                            'requirements'] is None:
                            current_data['toscayaml'][group][
                                'topology_template']['node_templates'][
                                    source_id]['requirements'] = []
                        requirements = \
                        current_data['toscayaml'][group]['topology_template']['node_templates'][source_id][
                            'requirements']
                        element = next(
                            (x for x in requirements if key in x.keys()), None)
                        if element is not None:
                            removed = True
                            requirements = \
                            current_data['toscayaml'][group]['topology_template']['node_templates'][source_id][
                                'requirements'].remove(element)
        if not removed and 'requirements' in tosca_definition['node_types'][
                destination_type]:
            for req in tosca_definition['node_types'][destination_type][
                    'requirements']:
                for key in req:
                    if req[key]['node'] == source_type:
                        if 'requirements' not in \
                                current_data['toscayaml'][group]['topology_template']['node_templates'][
                                    destination_id] or \
                                        current_data['toscayaml'][group]['topology_template']['node_templates'][
                                            destination_id][
                                            'requirements'] is None:
                            current_data['toscayaml'][group][
                                'topology_template']['node_templates'][
                                    destination_id]['requirements'] = []
                        requirements = \
                            current_data['toscayaml'][group]['topology_template']['node_templates'][destination_id][
                                'requirements']
                        element = next(
                            (x for x in requirements if key in x.keys()), None)
                        if element is not None:
                            removed = True
                            requirements = \
                            current_data['toscayaml'][group]['topology_template']['node_templates'][destination_id][
                                'requirements'].remove(element)

        self.data_project = current_data
        # self.validated = validate #TODO(stefano) not clear if this is the validation for the whole project
        self.update()
        result = True

        return result
Пример #26
0
from lib.util import Util
from bs4 import BeautifulSoup
import re, time
from dal.articleDal import insertArticle
from lib.dispatchUrl import DispatchUrl

#通用变量
totalPagePattern = re.compile(r'_PAGE_COUNT="(\d+)"')
totalPagePattern1 = re.compile(r'_(\d+)\.shtml')
articleFull = ''
caijiUtil = Util(rootPath='D:/www/data',
                 host='http://www.cnedu.cn',
                 downloadPath='/files/kaoyan0')


#从网址中获取采集到的数据 ,封装成字典数据返回
def caijiUrl(url):
    if not url:
        return None

    content = caijiUtil.getUrlContent(url)
    if not content:
        return None

    soup = BeautifulSoup(content, 'html5lib')
    article = soup.select('#fontzoom')
    article = article[0] if article else None

    title = soup.select('.list-left h1')[0].text
    filters = ['2016', '2017', '2015', '2014', '2013', '2012', '2011', '2010']
    flag = False
Пример #27
0
def main():
    combined_sql_list = []  # eventually is written to a file
    # [Generate API sql file]
    print('Generate README.md')
    print('  - load api configuration, generate funcPattern key and values')
    # get configuration file name {folder: "", name: ""}
    # get list of files of type .json in folder ./config

    # [Use a configuration file]
    config_folder = '{}'.format(os.getcwd().replace('_documents', 'config'))
    print('config_folder', config_folder)
    # [Select API Source ]
    sourceConfiguration = open_api(config_folder, file_type="source")
    if not sourceConfiguration:
        print('cancel')
        exit(0)

    # [Select API Target ]
    targetConfiguration = open_api(config_folder, file_type="target")
    if not targetConfiguration:
        print('cancel')
        exit(0)

    # [Merge Source and Target]
    sourceConfiguration.update(targetConfiguration)
    apiConfiguration = sourceConfiguration

    # setup default environment
    homeDev = HomeDevelopment().setup()

    # [Create missing folders]
    sourceDev = HomeDevelopment().setup()
    targetDev = HomeDevelopment().setup()

    # [Scan configuration for home, source, and target environment configurations]
    pprint(apiConfiguration)
    for apiName in apiConfiguration:
        if apiName == 'source':
            # [Configure input sources from GIT repositories]
            sourceDev = get_environment(apiConfiguration[apiName])

        elif apiName == 'target':
            # [Configure output targets from GIT repositories]
            targetDev = get_environment(apiConfiguration[apiName])

    print('=================')
    #report(homeDev, sourceDev, targetDev)
    print('=================')
    pprint(sourceDev)
    print(sourceDev.getFolder('group'))
    script_list = [{
        "source": fn,
        "target": "README.{}.md".format(fn)
    } for fn in Util().getFileList(sourceDev.getFolder('group'), ext='.sh')]
    print('script_list', script_list)
    for fn in script_list:
        print('script', sourceDev.getFolder('group'), fn['source'])
        print('script', targetDev.getFolder('group'), fn['target'])

        scriptDoc = ScriptReadmeDocument(sourceDev.getFolder('group'),
                                         fn['source']).load()
        pprint(scriptDoc)
        scriptDoc.saveAs(targetDev.getFolder('group'), fn['target'])
    '''
Пример #28
0
from lib.hdfs import Hdfs
from lib.spark import SparkAutomate
from lib.hive import HiveAutomate
from lib.oozie import OozieAutomate
from lib.data import Dataset
from lib.util import Util
from conf.config import *
import subprocess
import time

if __name__ == '__main__':
    util = Util()
    util.get_cluster_details()
    hdfs = Hdfs() 
    
    start_time = time.time()
    
    try:
        print("Setting up the datasets .. ")
        data = Dataset( URL_LIST, util)
        data.automate()
    except Exception as e:
        print("Failed to set up the datasets !")
        print("Following exception has occured " + str(e))
    else :
        try : 
            print("Running spark application..")
            spark = SparkAutomate(SPARK_DIR_LIST, util, hdfs)
            spark.automate()
        except Exception as e:
            print("Failed to run spark application !")
Пример #29
0
def main():
    clause2method = {'insert':'POST', 'update':'PUT', 'query':'GET', 'delete':'DELETE'}
    combined_sql_list = [] # eventually is written to a file
    # [Generate API sql file]
    print('Generate API')
    print('  - load api configuration, generate funcPattern key and values')
    # get configuration file name {folder: "", name: ""}
    # get list of files of type .json in folder ./config

    # [Use a configuration file]
    config_folder = '{}'.format(os.getcwd().replace('_tasks','config'))

    # [Select API Source ]
    sourceConfiguration = open_api(config_folder,file_type="source")
    if not sourceConfiguration:
        print('cancel')
        exit(0)

    # [Select API Target ]
    targetConfiguration = open_api(config_folder,file_type="target")
    if not targetConfiguration:
        print('cancel')
        exit(0)

    # [Merge Source and Target]
    sourceConfiguration.update(targetConfiguration)
    apiConfiguration = sourceConfiguration

    # setup default environment
    homeDev = HomeDevelopment().setup()

    # [Create missing folders]
    sourceDev = HomeDevelopment().setup()
    targetDev = HomeDevelopment().setup()

    # [Scan configuration for home, source, and target environment configurations]
    for apiName in apiConfiguration:
        if apiName == 'source':
            # [Configure input sources from GIT repositories]
            sourceDev = get_environment(apiConfiguration[apiName])

        elif apiName == 'target':
            # [Configure output targets from GIT repositories]
            targetDev = get_environment(apiConfiguration[apiName])

    print('=================')
    report(homeDev, sourceDev, targetDev)
    print('=================')
    print('=================')
    print('Base Tests Combined')
    print('=================')
    #fileList = Util().getFileList(sourceDev.getDbFolder('sql'),'static.sql')
    fileList = Util().getFileList(sourceDev.getFolder('scripts'),'base.test.sql')
    fileList.sort()
    # [Replace project specific values]
    replace_ = ['one_db', 'hapi-api']
    replace_with = [targetDev.getName('db'), targetDev.getName('db_api')]

    targetName = '90.base.test.sql'
    combinedDocument = Document(targetDev.getFolder('scripts'), targetName)


    # [Backup the target file]
    backup = Util().makeBackupFile(targetDev.getFolder('scripts'), targetName)
    print('  - backup : {}'.format(backup))

    # [Move Static Files to Target Folder]
    for fileName in fileList:
        # [Combine base tests into one file]
        # [Copy from a source folder to a target folder]

        staticDocument = Document(sourceDev.getFolder('scripts'), fileName) \
            .load() \
            .replace(replace_, replace_with)

        combinedDocument.extend(staticDocument)

    combinedDocument.save()

    print('=================')
    print('API Tests')
    print('=================')

    apiNameList = [nm for nm in apiConfiguration if apiConfiguration[nm]['kind'] == 'api-definition' or apiConfiguration[nm]['kind'] == 'api-static']

    # [Move Static Files to Target Folder]
    #targetNames = []
    #sourceNames = []
    names = []
    src_folder = sourceDev.getFolder('scripts')
    tmpl_folder = '../templates'
    trg_folder = targetDev.getFolder('scripts')
    clauses = ['insert','query','update','delete']
    for apiName in apiNameList:
        schema = apiConfiguration[apiName]['schema'].replace('_','.')
        prefix = apiConfiguration[apiName]['prefix-test']
        # make list of existing test files
        names = [{"source":'{}.test.{}.{}.{}.api.test.sql'.format(prefix,schema, apiName.upper(), clause),
                  "target":'{}.{}.{}.api.test.sql'.format(prefix,apiName.upper(), clause),
                  "template": '{}.test.sql.template'.format(clause),
                  "clause": '{}'.format(clause),
                  "method": '{}'.format(clause2method[clause])} for clause in clauses]
        names = [pair for pair in names if Util().file_exists(src_folder, pair['source'])]
        #
        for pair in names:
            backup = None
            #print('pair', pair, 'kind', apiConfiguration[apiName]['kind'])
            if apiConfiguration[apiName]['kind'] == 'api-definition':
                # handle backup
                if Util().file_exists(trg_folder, pair['target']):
                    backup = Util().makeBackupFile(targetDev.getFolder('scripts'), pair['target'])

                # Use Template
                if Util().file_exists(tmpl_folder, pair['template']): # Use Template
                    # Use Template
                    print('    - generate test {} FROM {}'.format(pair['target'], pair['template']))
                    if backup: print('       -- backup ', backup)
                    templateDoc = TestTemplate(apiName, pair['method'], pair['template']) \
                        .apply(apiConfiguration)\
                        .saveAs(trg_folder, pair['target'])

                elif Util().file_exists(src_folder, pair['source']): # Copy from existing file
                    # Copy from source
                    print('    - copy test {} FROM {} '.format(pair['target'],pair['source']))
                    # load
                    # replace
                    # save
                    doc = Document(src_folder, pair['source']) \
                        .load() \
                        .replace(replace_, replace_with)\
                        .saveAs(trg_folder,pair['target'])

                else: # No test available
                    print('No Test Available for ', pair['source'])

            elif apiConfiguration[apiName]['kind'] == 'api-static':
                #print('template', tmpl_folder)
                print('    - copy test for api-static ', pair['source'])

        # [Backup the target file]

        #print('  - backup : {}'.format(fileName))

    apiNameList = [nm for nm in apiConfiguration if apiConfiguration[nm]['kind'] == 'api-static']

    # [Move Static Files to Target Folder]
    '''