예제 #1
0
    def get_snaphotid_doc_old(self, sid, container):
        doc = None
        json_dir = get_test_json_dir()
        if exists_dir(json_dir):
            fname = '%s/%s/snapshots/%s' % (json_dir, container, sid)
            if exists_file(fname):
                json_data = json_from_file(fname)
                if json_data and 'json' in json_data:
                    doc = json_data['json']
                    snapshot = {
                        'id': json_data['snapshotId'],
                        'structure': json_data['structure'],
                        'reference': json_data['reference'],
                        'source': json_data['source'],
                        'collection': json_data['collection'],
                        'type': json_data.get("node", {}).get('type'),
                        'region' : json_data.get('region', "")
                    }
                    if 'paths' in json_data:
                        snapshot['paths'] = json_data['paths']
                    else:
                        snapshot['path'] = json_data['path']

                    self.snapshots.append(snapshot)
        return doc
예제 #2
0
    def populate_all_template_snapshot(self):
        """
        crawler function for populate all the files located at given paths and generate returns the updated `snapshot_data`
        """
        root_dir_path = get_field_value(self.connector_data, 'folderPath')
        if not root_dir_path:
            root_dir_path = self.repopath

        self.paths = get_field_value(self.node, 'paths')

        if self.paths and isinstance(self.paths, list):
            count = 0
            for path in self.paths:
                self.dir_path = str('%s/%s' % (root_dir_path, path)).replace(
                    '//', '/')
                if exists_dir(self.dir_path):
                    path = path.rstrip("/")
                    count = self.populate_sub_directory_snapshot(
                        path, self.dir_path, "", self.snapshot, self.dbname,
                        self.node, self.snapshot_data, count)
                    # list_of_file = os.listdir(self.dir_path)
                    # for entry in list_of_file:
                    #     count = self.populate_sub_directory_snapshot(path, self.dir_path, entry, self.snapshot, self.dbname, self.node, self.snapshot_data, count)
                else:
                    logger.error("Invalid path : directory does not exist : " +
                                 self.dir_path)
        else:
            logger.error(
                "\t\tERROR: Invalid json : `paths` is not a list or is not exist in master snapshot"
            )

        return self.snapshot_data
예제 #3
0
def make_snapshots_dir(container):
    snapshot_dir = None
    json_dir = '%s%s' % (get_test_json_dir(), container)
    if exists_dir(json_dir):
        snapshot_dir = '%s/snapshots' % json_dir
        mkdir_path(snapshot_dir)
    return snapshot_dir
def populate_all_arm_snapshot(snapshot, dbname, sub_data, node, repopath,
                              snapshot_data):
    """
    Populate all snapshot by running arm command
    """
    root_dir_path = get_field_value(sub_data, 'folderPath')
    if not root_dir_path:
        root_dir_path = repopath

    location = get_field_value(node, 'location')
    paths = get_field_value(node, 'paths')
    if paths and isinstance(paths, list):
        count = 0
        for path in paths:
            count += 1
            dir_path = str('%s/%s' % (root_dir_path, path)).replace('//', '/')
            if exists_dir(dir_path):
                list_of_file = os.listdir(dir_path)
                for entry in list_of_file:
                    populate_sub_directory_snapshot(dir_path, entry, snapshot,
                                                    dbname, node,
                                                    snapshot_data)
            else:
                logger.error("Invalid json : directory does not exist : " +
                             dir_path)
    else:
        logger.error(
            "Invalid json : `paths` field is missing for 'arm' node type or it is not a list"
        )

    return
예제 #5
0
def search_config_ini():
    """Need the config.ini file to read initial configuration data"""
    error = False
    config_ini = None
    fwdir = os.getenv('FRAMEWORKDIR', None)
    if fwdir:
        if exists_dir(fwdir):
            config_ini = '%s/%s' % (fwdir, CFGFILE)
            error_msg = valid_config_ini(config_ini)
            if error_msg:
                console_log(
                    "FRAMEWORKDIR: %s, env variable directory exists, checking...."
                    % fwdir, currentframe())
                console_log(error_msg, currentframe())
                error = True
        else:
            console_log(
                "FRAMEWORKDIR: %s, env variable set to non-existent directory, exiting....."
                % fwdir, currentframe())
            error = True
    else:
        config_ini = '%s/%s' % (os.getcwd(), CFGFILE)
        error_msg = valid_config_ini(config_ini)
        if error_msg:
            console_log(
                "FRAMEWORDIR environment variable NOT SET, searching in current directory.",
                currentframe())
            console_log(error_msg, currentframe())
            error = True
    return error, config_ini
def get_rego_rule_filename(rego_file, container):
    rego_file_name = None
    json_dir = get_test_json_dir()
    if exists_dir(json_dir):
        rego_file_name = '%s/%s/%s' % (json_dir, container, rego_file)
        if exists_file(rego_file_name):
            pass
        else:
            rego_file_name = None
    return rego_file_name
예제 #7
0
def framework_dir():
    """Return top level framework directory"""
    global FRAMEWORKDIR
    if FRAMEWORKDIR:
        return FRAMEWORKDIR
    fwdir = os.getenv('FRAMEWORKDIR', None)
    if fwdir and exists_dir(fwdir):
        FRAMEWORKDIR = fwdir
    else:
        FRAMEWORKDIR = os.getcwd()
    return FRAMEWORKDIR
예제 #8
0
    def get_snaphotid_doc(self, sid):
        doc = None
        isdb_fetch = get_dbtests()
        if isdb_fetch:
            dbname = self.dbname
            coll = self.collection_data[sid] if sid in self.collection_data else COLLECTION
            docs = get_documents(coll, {'snapshotId': sid}, dbname,
                                 sort=[('timestamp', pymongo.DESCENDING)], limit=1)
            logger.debug('Number of Snapshot Documents: %s', len(docs))
            if docs and len(docs):
                doc = docs[0]['json']
                snapshot = {
                    'id': docs[0]['snapshotId'],
                    'structure': docs[0]['structure'],
                    'reference': docs[0]['reference'],
                    'source': docs[0]['source'],
                    'collection': docs[0]['collection'],
                    'type': docs[0].get("node", {}).get('type'),
                    'region' : docs[0].get('region', "")
                }
                if 'paths' in docs[0]:
                    snapshot['paths'] = docs[0]['paths']
                else:
                    snapshot['path'] = docs[0]['path']
                self.snapshots.append(snapshot)
        else:
            json_dir = '%s%s' % (get_test_json_dir(), self.container)
            if exists_dir(json_dir):
                fname = '%s/snapshots/%s' % (json_dir, sid)
                if exists_file(fname):
                    json_data = json_from_file(fname)
                    if json_data and 'json' in json_data:
                        doc = json_data['json']
                        snapshot_val = {
                            'id': json_data['snapshotId'],
                            'structure': json_data['structure'],
                            'reference': json_data['reference'],
                            'source': json_data['source'],
                            'collection': json_data['collection'],
                            'type': json_data.get("node", {}).get('type'),
                            'region' : json_data.get('region', "")
                        }
                        if 'paths' in json_data:
                            snapshot_val['paths'] = json_data['paths']
                        else:
                            snapshot_val['path'] = json_data['path']

                        singletest = get_from_currentdata(SINGLETEST)
                        if singletest:
                            snapshot_val['json'] = doc
                        self.snapshots.append(snapshot_val)
        return doc
예제 #9
0
    def validate(self, file_path):
        helm_source = file_path.rpartition("/")[0]
        check_file_path = "%s/Chart.yaml" % helm_source
        valeus_file_path = "%s/values.yaml" % helm_source
        template_dir_path = "%s/templates" % helm_source

        if all([
                exists_file(check_file_path),
                exists_file(valeus_file_path),
                exists_dir(template_dir_path)
        ]):
            return True
        return False
예제 #10
0
def _local_file_directory(connector, snapshot):
    final_path, repopath = None, None
    connector_user = get_field_value(connector, 'username')
    snapshot_user = get_field_value(snapshot, 'testUser')
    if snapshot_user == connector_user:
        folder_path = get_field_value(connector, 'folderPath')
        logger.info("Folder path: %s", folder_path)
        if exists_dir(folder_path):
            final_path = folder_path
        else:
            logger.error("Given folder path is not a directory")
        return repopath, final_path
    else:
        logger.error("Connector and snapshot user do not match.")
        return repopath, final_path 
def init_currentdata():
    """ Initialises data structure to store runtime data. """
    started = int(time.time() * 1000)
    runctx = framework_currentdata()
    run_dir = os.path.dirname(runctx)
    if not exists_dir(run_dir):
        mkdir_path(run_dir)
    run_data = {
        'start': started,
        'end': started,
        'errors': [],
        'host': socket.gethostname(),
        'timestamp': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    }
    save_currentdata(run_data)
예제 #12
0
 def get_snaphotid_doc(self, sid):
     doc = None
     isdb_fetch = get_dbtests()
     if isdb_fetch:
         dbname = self.kwargs['dbname']
         coll = self.kwargs['snapshots'][sid] if sid in self.kwargs[
             'snapshots'] else COLLECTION
         docs = get_documents(coll, {'snapshotId': sid},
                              dbname,
                              sort=[('timestamp', pymongo.DESCENDING)],
                              limit=1)
         logger.debug('Number of Snapshot Documents: %s', len(docs))
         if docs and len(docs):
             doc = docs[0]['json']
             self.snapshots.append({
                 'id': docs[0]['snapshotId'],
                 'path': docs[0]['path'],
                 'structure': docs[0]['structure'],
                 'reference': docs[0]['reference'],
                 'source': docs[0]['source']
             })
     else:
         json_dir = '%s%s' % (get_test_json_dir(), self.kwargs['container'])
         if exists_dir(json_dir):
             fname = '%s/snapshots/%s' % (json_dir, sid)
             if exists_file(fname):
                 json_data = json_from_file(fname)
                 if json_data and 'json' in json_data:
                     doc = json_data['json']
                     # self.snapshots.append({
                     #     'id': json_data['snapshotId'],
                     #     'path': json_data['path'],
                     #     'structure': json_data['structure'],
                     #     'reference': json_data['reference'],
                     #     'source': json_data['source']
                     # })
                     snapshot_val = {
                         'id': json_data['snapshotId'],
                         'path': json_data['path'],
                         'structure': json_data['structure'],
                         'reference': json_data['reference'],
                         'source': json_data['source']
                     }
                     singletest = get_from_currentdata(SINGLETEST)
                     if singletest:
                         snapshot_val['json'] = doc
                     self.snapshots.append(snapshot_val)
     return doc
def get_all_nodes(repopath, node, snapshot, ref, connector):
    """ Fetch all the nodes from the cloned git repository in the given path."""
    db_records = []
    collection = node['collection'] if 'collection' in node else COLLECTION
    given_type = get_field_value(connector, "type")
    base_path = get_field_value_with_default(connector, "folderPath", "")
    snapshot_source = get_field_value(snapshot, 'source')
    parts = snapshot_source.split('.')
    d_record = {
        "structure": given_type,
        "reference": ref if not base_path else "",
        "source": parts[0],
        "path": '',
        "timestamp": int(time.time() * 1000),
        "queryuser": "",
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": None,
        "masterSnapshotId": node['masterSnapshotId'],
        "collection": collection.replace('.', '').lower(),
        "json": {}
    }
    node_type = node['type'] if 'type' in node and node['type'] else 'json'
    json_path = '%s/%s' % (repopath, node['path'])
    file_path = json_path.replace('//', '/')
    logger.info('Dir: %s', file_path)
    if exists_dir(file_path):
        count = 0
        for filename in glob.glob('%s/*.json' % file_path.replace('//', '/')):
            parts = filename.rsplit('/', 1)
            path = '%s/%s' % (node['path'], parts[-1])
            json_data = convert_to_json(filename, node_type)
            logger.info('type: %s, json:%s', node_type, json_data)
            if json_data:
                db_record = copy.deepcopy(d_record)
                db_record['snapshotId'] = '%s%s' % (node['masterSnapshotId'],
                                                    str(count))
                db_record['path'] = path.replace('//', '/')
                db_record['json'] = json_data
                data_str = json.dumps(json_data)
                db_record['checksum'] = hashlib.md5(
                    data_str.encode('utf-8')).hexdigest()
                db_records.append(db_record)
                count += 1
    else:
        logger.info('Get requires valid directory for snapshot not present!')
    return db_records
 def get_snaphotid_doc_old(self, sid, container):
     doc = None
     json_dir = get_test_json_dir()
     if exists_dir(json_dir):
         fname = '%s/%s/snapshots/%s' % (json_dir, container, sid)
         if exists_file(fname):
             json_data = json_from_file(fname)
             if json_data and 'json' in json_data:
                 doc = json_data['json']
                 self.snapshots.append({
                     'id': json_data['snapshotId'],
                     'path': json_data['path'],
                     'structure': json_data['structure'],
                     'reference': json_data['reference'],
                     'source': json_data['source']
                 })
     return doc
예제 #15
0
    def rego_rule_filename(self, rego_file, container):
        rego_file_name = None
        if 'dirpath' in self.testcase and self.testcase['dirpath']:
            rego_file_name = '%s/%s' % (self.testcase['dirpath'], rego_file)
            if exists_file(rego_file_name):
                pass
            else:
                rego_file_name = None
            return  rego_file_name
        isdb_fetch = get_dbtests()
        #It give same value for DB and SNAPSHOT, So for SNAPSHOT, we'll check it in 
        #db first and if file isn't there, then we are fetching it from file path '''
        
        if isdb_fetch:
            dbname = self.dbname
            coll = 'structures'
            docs = get_documents(coll, { 'type': 'others', "container" : container}, dbname,
                                 sort=[('timestamp', pymongo.DESCENDING)], limit=1)
            # print('Number of other Documents: %s' % len(docs))
            logger.debug('Number of other Documents: %s', len(docs))
            if docs and len(docs):
                doc = docs[0]['json']
                if doc and 'file' in doc and isinstance(doc['file'], list):
                    for file_doc in doc['file']:
                        name = get_field_value(file_doc, 'name')
                        # print(name, rego_file)
                        if name == rego_file:
                            content = get_field_value(file_doc, 'container_file')
                            if content:
                                rego_file_name = '/tmp/%s' % rego_file
                                open(rego_file_name, 'w', encoding="utf-8").write(content)
                                return rego_file_name
                # print(doc)

        json_dir = get_test_json_dir()
        if exists_dir(json_dir):
            rego_file_name = '%s/%s/%s' % (json_dir, container, rego_file)
            if exists_file(rego_file_name):
                pass
            else:
                rego_file_name = None
        return rego_file_name
예제 #16
0
def test_exists_dir(monkeypatch):
    monkeypatch.setattr(os.path, 'exists', mock_exists_dir_check)
    monkeypatch.setattr(os.path, 'isdir', mock_is_dir_check)
    assert True == exists_dir('~/tmp')
    assert True == exists_dir('~/abc')
    assert False == exists_dir('/xyz')
예제 #17
0
def git_clone_dir(connector):
    clonedir = None
    repopath = tempfile.mkdtemp()
    subdir = False
    if connector and isinstance(connector, dict):
        giturl = get_field_value(connector, 'gitProvider')
        if not giturl:
            logger.error("Git connector does not have valid git provider URL")
            return repopath, clonedir
        
        branch = get_from_currentdata('branch')
        if not branch:
            branch = get_field_value_with_default(connector, 'branchName', 'master')

        isprivate = get_field_value(connector, 'private')
        isprivate = True if isprivate is None or not isinstance(isprivate, bool) else isprivate
        # logger.info("Repopath: %s", repopath)
        logger.info("\t\t\tRepopath: %s", repopath)
        http_match = re.match(r'^http(s)?://', giturl, re.I)
        if http_match:
            logger.info("\t\t\tHttp (private:%s) giturl: %s", "YES" if isprivate else "NO", giturl)

            accessToken = get_field_value(connector, 'httpsAccessToken')
            username = get_field_value(connector, 'httpsUser')
            if accessToken:
                logger.info("AccessToken: %s" % accessToken)
                pwd = get_field_value(connector, 'httpsPassword')
                pwd = pwd if pwd else get_git_pwd(key=accessToken)
                if not pwd:
                    pwd = get_pwd_from_vault(accessToken)
                    if pwd:
                        logger.info("Git access token from vault: %s", '*' * len(pwd))
                if pwd:
                    gh = GithubFunctions()
                    gh.set_access_token(pwd)
                    _ = gh.populate_user()
                    rpo = gh.clone_repo(giturl, repopath, branch)
                    if rpo:
                        logger.info('Successfully cloned in %s dir' % repopath)
                        checkdir = '%s/tmpclone' % repopath if subdir else repopath
                        clonedir = checkdir if exists_dir('%s/.git' % checkdir) else None
                        if not exists_dir(clonedir):
                            logger.error("No valid data provided for connect to git : %s", giturl)
                        return repopath, clonedir
                    elif isprivate:
                        logger.error("Please provide password for connect to git repository.")
                        return repopath, clonedir
                    else:
                        git_cmd = 'git clone %s %s' % (giturl, repopath)
            elif username:
                pwd = get_field_value(connector, 'httpsPassword')
                schema = giturl[:http_match.span()[-1]]
                other_part = giturl[http_match.span()[-1]:]
                # pwd = pwd if (pwd and not json_source()) else (get_git_pwd() if not json_source() else get_pwd_from_vault(pwd))
                pwd = pwd if pwd else get_git_pwd(key=username)

                # populate the password from vault
                if not pwd:
                    pwd = get_pwd_from_vault(username)
                    if pwd:
                        logger.info("Git password from vault: %s", '*' * len(pwd))
                if pwd:
                    git_cmd = 'git clone --depth 1 %s%s:%s@%s %s' % (schema, urllib.parse.quote_plus(username),
                                                        urllib.parse.quote_plus(pwd), other_part, repopath)
                elif isprivate:
                    logger.error("Please provide password for connect to git repository.")
                    return repopath, clonedir
                else:
                    git_cmd = 'git clone --depth 1 %s%s:%s@%s %s' % (schema, urllib.parse.quote_plus(username), "",
                                                     other_part, repopath)
            else:
                git_cmd = 'git clone --depth 1 %s %s' % (giturl, repopath)
        else:
            logger.info("SSH (private:%s) giturl: %s, Repopath: %s", "YES" if isprivate else "NO",
                        giturl, repopath)
            if isprivate:
                ssh_key_file = get_field_value(connector, 'sshKeyfile')
                ssh_key_name = get_field_value(connector, 'sshKeyName')
                ssh_key_file_data = None
                if ssh_key_file:
                    if not exists_file(ssh_key_file):
                        logger.error("Git connector points to a non-existent ssh keyfile!")
                        return repopath, clonedir
                elif ssh_key_name:
                    ssh_key_file_data = get_vault_data(ssh_key_name)
                    if not ssh_key_file_data:
                        logger.info('Git connector points to a non-existent ssh keyName in the vault!')
                        return repopath, clonedir
                ssh_host = get_field_value(connector, 'sshHost')
                ssh_user = get_field_value_with_default(connector, 'sshUser', 'git')
                if not ssh_host:
                    logger.error("SSH host not set, could be like github.com, gitlab.com, 192.168.1.45 etc")
                    return repopath, clonedir
                ssh_dir = '%s/.ssh' % repopath
                if exists_dir(ssh_dir):
                    logger.error("Git ssh dir: %s already exists, cannot recreate it!", ssh_dir)
                    return repopath, clonedir
                os.mkdir('%s/.ssh' % repopath, 0o700)
                if not ssh_key_file and ssh_key_name and ssh_key_file_data:
                    ssh_key_file = create_ssh_file_vault_data(ssh_dir, ssh_key_file_data, ssh_key_name)
                    if not ssh_key_file:
                        logger.info('Git connector points to a non-existent ssh keyName in the vault!')
                        return repopath, clonedir
                ssh_cfg = create_ssh_config(ssh_dir, ssh_key_file, ssh_user)
                if not ssh_cfg:
                    logger.error("Creation of Git ssh config in dir: %s failed!", ssh_dir)
                    return repopath, clonedir
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no" -F %s' % ssh_cfg
                git_cmd = 'git clone %s %s/tmpclone' % (giturl, repopath)
                subdir = True
            else:
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no"'
                git_cmd = 'git clone %s %s' % (giturl, repopath)
            os.environ['GIT_SSH_COMMAND'] = git_ssh_cmd
        git_cmd = '%s --branch %s' % (git_cmd, branch)
        if git_cmd:
            error_result, result = run_subprocess_cmd(git_cmd)
            checkdir = '%s/tmpclone' % repopath if subdir else repopath
            clonedir = checkdir if exists_dir('%s/.git' % checkdir) else None
            if not exists_dir(clonedir):
                logger.error("No valid data provided for connect to git : %s", error_result)
        if 'GIT_SSH_COMMAND' in os.environ:
            os.environ.pop('GIT_SSH_COMMAND')
    return repopath, clonedir
예제 #18
0
    def populate_sub_directory_snapshot(self, file_path, base_dir_path,
                                        sub_dir_path, snapshot, dbname, node,
                                        snapshot_data, count):
        """
        Iterate the subdirectories for process each files inside the directory.
        """
        logger.info("Finding files in : %s" % sub_dir_path)
        dir_path = str('%s/%s' % (base_dir_path, sub_dir_path)).replace(
            '//', '/')
        logger.info("dir_path   %s   : ", dir_path)

        if sub_dir_path in self.exclude_directories:
            logger.info("Excluded directory : %s", sub_dir_path)
            return count

        template_file_list = []
        parameter_file_list = []
        path_is_file, path_is_dir = False, False

        if exists_file(dir_path):
            path_is_file = True
        if exists_dir(dir_path):
            path_is_dir = True

        if any([path_is_dir, path_is_file]):
            if path_is_dir:
                list_of_file = os.listdir(dir_path)
                for entry in list_of_file:
                    new_dir_path = ('%s/%s' % (dir_path, entry)).replace(
                        '//', '/')
                    new_sub_directory_path = ('%s/%s' %
                                              (sub_dir_path, entry)).replace(
                                                  '//', '/')
                    if exists_dir(new_dir_path):
                        count = self.populate_sub_directory_snapshot(
                            file_path, base_dir_path, new_sub_directory_path,
                            snapshot, dbname, node, snapshot_data, count)
                    if self.is_helm_chart_dir(new_dir_path):
                        paths = self.process_helm_chart(dir_path)
                        template_file_list += paths
                    elif is_multiple_yaml_file(new_dir_path):
                        paths = self.break_multiple_yaml_file(new_dir_path)
                        template_file_list += paths
                    elif exists_file(new_dir_path):
                        if self.is_template_file(new_dir_path):
                            template_file_list.append(new_sub_directory_path)
                        elif self.is_parameter_file(new_dir_path):
                            parameter_file_list.append(new_sub_directory_path)

            if path_is_file:
                template_file_list.append('%s' %
                                          (sub_dir_path).replace('//', '/'))

            logger.info("parameter_file_list   %s   : ",
                        str(parameter_file_list))
            logger.info("template_file_list   %s   : ",
                        str(template_file_list))
            generated_template_file_list = []
            if template_file_list:
                for template_file in template_file_list:
                    # template_file_path = str('%s/%s' % (base_dir_path, template_file)).replace('//', '/')
                    if parameter_file_list:
                        self.generate_template_and_parameter_file_list(
                            file_path, template_file, parameter_file_list,
                            generated_template_file_list)
                    else:
                        paths = [template_file]
                        template_json = self.process_template(paths)
                        if template_json:
                            generated_template_file_list.append({
                                "paths": [("%s/%s" %
                                           (file_path, template_file)).replace(
                                               "//", "/")],
                                "status":
                                "active",
                                "validate":
                                node['validate']
                                if 'validate' in node else True
                            })
                        else:
                            generated_template_file_list.append({
                                "paths": [("%s/%s" %
                                           (file_path, template_file)).replace(
                                               "//", "/")],
                                "status":
                                "inactive",
                                "validate":
                                node['validate']
                                if 'validate' in node else True
                            })

                nodes, count = self.create_node_record(
                    generated_template_file_list, count)
                if self.node[
                        'masterSnapshotId'] not in self.snapshot_data or not isinstance(
                            self.snapshot_data[self.node['masterSnapshotId']],
                            list):
                    self.snapshot_data[self.node['masterSnapshotId']] = []
                self.snapshot_data[
                    self.node['masterSnapshotId']] = self.snapshot_data[
                        self.node['masterSnapshotId']] + nodes
        return count
예제 #19
0
def container_exists(container):
    """ Check if the container directory exists"""
    container_dir = '%s/%s' % (get_test_json_dir(), container)
    return True if exists_dir(container_dir) else False
예제 #20
0
    def generate_template_json(self):
        """
        generate the template json from template and parameter files
        """
        template_json = self.get_template()
        parameter_jsons = self.get_paramter_json_list()
        gen_template_json = None
        if template_json:
            gen_template_json = copy.deepcopy(template_json)
            for parameter_json in parameter_jsons:
                for file_type, variable_json in parameter_json.items():
                    if file_type in ["tfvars", "tfvars.json"]:
                        for key, value in variable_json.items():
                            if isinstance(
                                    value,
                                    list) and len(value) == 1 and isinstance(
                                        value[0], str):
                                value[0] = self.parse_string(value[0])
                            key = self.parse_string(key)
                            self.gparams[key] = value
                    else:
                        if "variable" in variable_json:
                            for key, value in variable_json["variable"].items(
                            ):
                                if "default" in value:
                                    self.gparams[key] = value["default"]

            if "variable" in template_json:
                for key, value in template_json["variable"].items():
                    if "default" in value:
                        self.gparams[key] = value["default"]

            new_resources = {}
            if "module" in template_json:
                for key, value in template_json["module"].items():
                    if "source" in value:
                        default_gparams = {}
                        self.module_params["module"][key] = {}
                        for k, v in value.items():
                            if k != "source":
                                processed_data = self.process_resource(v)
                                default_gparams[k] = processed_data
                                self.module_params["module"][key][
                                    k] = processed_data

                        full_path_list = self.template_file.split("/")[:-1]
                        full_path = ("/".join(full_path_list)).replace(
                            "//", "/")
                        module_file_path = (
                            "%s/%s" % (full_path, value["source"])).replace(
                                "//", "/")

                        logger.info("Finding module : %s", value["source"])
                        if exists_dir(module_file_path):
                            list_of_file = os.listdir(module_file_path)

                            template_file_path = ""
                            parameter_file_list = []
                            for entry in list_of_file:
                                new_file_path = (
                                    '%s/%s' %
                                    (module_file_path, entry)).replace(
                                        '//', '/')
                                if exists_file(new_file_path):
                                    if self.is_template_file(new_file_path):
                                        template_file_path = new_file_path
                                    elif self.is_parameter_file(new_file_path):
                                        parameter_file_list.append(
                                            new_file_path)

                            if template_file_path and parameter_file_list:
                                terraform_template_parser = TerraformTemplateParser(
                                    template_file_path,
                                    parameter_file=parameter_file_list,
                                    **{
                                        "default_gparams": default_gparams,
                                        "process_module": True
                                    })
                                new_template_json = terraform_template_parser.parse(
                                )

                                self.template_file_list = self.template_file_list + terraform_template_parser.template_file_list
                                self.parameter_file_list = self.parameter_file_list + terraform_template_parser.parameter_file_list

                                if new_template_json:
                                    for resource, resource_item in new_template_json.items(
                                    ):
                                        # set parameters from modules files to main resource file
                                        if resource == "resource":
                                            for resource_key, resource_value in resource_item.items(
                                            ):
                                                for resource_name, resource_properties in resource_value.items(
                                                ):
                                                    if isinstance(
                                                            resource_properties,
                                                            dict):
                                                        for default_key, default_value in default_gparams.items(
                                                        ):
                                                            if default_key not in resource_properties:
                                                                resource_properties[
                                                                    default_key] = default_value
                                                    if isinstance(
                                                            resource_properties,
                                                            list):
                                                        for resource_property in resource_properties:
                                                            for default_key, default_value in default_gparams.items(
                                                            ):
                                                                if default_key not in resource_property:
                                                                    resource_property[
                                                                        default_key] = default_value
                                        if resource not in new_resources:
                                            new_resources[resource] = [
                                                resource_item
                                            ]
                                        else:
                                            new_resources[resource].append(
                                                resource_item)
                        else:
                            logger.error("module does not exist : %s ",
                                         value["source"])

                if "module" in gen_template_json:
                    del gen_template_json["module"]

            if 'data' in template_json:
                data_resource = {}
                for data_key, data_value in template_json['data'].items():
                    processed_data = self.process_resource(data_value)
                    self.gdata[data_key] = processed_data
                    data_resource[data_key] = processed_data
                gen_template_json['data'] = data_resource

            self.resource = {}
            resources = []
            if "resource" in new_resources and isinstance(
                    new_resources["resource"], list):
                for resource in new_resources["resource"]:
                    for resource_name, properties in resource.items():
                        processed_resource = self.process_resource(properties)
                        if not self.process_module:
                            if resource_name in self.resource:
                                self.resource[resource_name].append(
                                    processed_resource)
                            else:
                                self.resource[resource_name] = [
                                    processed_resource
                                ]
                        else:
                            self.resource[resource_name] = processed_resource

            if 'resource' in template_json:
                for resource_name, properties in template_json[
                        'resource'].items():
                    processed_resource = self.process_resource(properties)
                    if not self.process_module:
                        if resource_name in self.resource:
                            self.resource[resource_name].append(
                                processed_resource)
                        else:
                            self.resource[resource_name] = [processed_resource]
                    else:
                        self.resource[resource_name] = processed_resource

            if not self.process_module:
                for resource_name, processed_resource_list in self.resource.items(
                ):
                    for processed_resource in processed_resource_list:
                        if isinstance(processed_resource, dict):
                            for name, properties in processed_resource.items():
                                if isinstance(properties, list):
                                    for property in properties:
                                        resources.append({
                                            "type": resource_name,
                                            "name": name,
                                            "properties": property
                                        })
                                else:
                                    resources.append({
                                        "type": resource_name,
                                        "name": name,
                                        "properties": properties
                                    })
                    gen_template_json['resources'] = resources

                    if 'resource' in gen_template_json:
                        del gen_template_json['resource']
            else:
                gen_template_json['resource'] = self.resource

        return gen_template_json
예제 #21
0
def store_snapshot(snapshot_dir, data):
    if exists_dir(snapshot_dir):
        snapshot_file = '%s/%s' % (snapshot_dir, data['snapshotId'])
        save_json_to_file(data, snapshot_file)
예제 #22
0
def test_none_directory():
    assert False == exists_dir(None)
def git_clone_dir(connector):
    clonedir = None
    repopath = tempfile.mkdtemp()
    subdir = False
    if connector and isinstance(connector, dict):
        giturl = get_field_value(connector, 'gitProvider')
        if not giturl:
            logger.error("Git connector does not have valid git provider URL")
            return repopath, clonedir
        brnch = get_field_value_with_default(connector, 'branchName', 'master')
        isprivate = get_field_value(connector, 'private')
        isprivate = True if isprivate is None or not isinstance(
            isprivate, bool) else isprivate
        logger.info("Repopath: %s", repopath)
        http_match = re.match(r'^http(s)?://', giturl, re.I)
        if http_match:
            logger.info("Http (private:%s) giturl: %s, Repopath: %s",
                        "YES" if isprivate else "NO", giturl, repopath)
            username = get_field_value(connector, 'httpsUser')
            if username:
                pwd = get_field_value(connector, 'httpsPassword')
                schema = giturl[:http_match.span()[-1]]
                other_part = giturl[http_match.span()[-1]:]
                pwd = pwd if pwd else get_git_pwd()
                if pwd:
                    git_cmd = 'git clone %s%s:%s@%s %s' % (
                        schema, urllib.parse.quote_plus(username),
                        urllib.parse.quote_plus(pwd), other_part, repopath)
                else:
                    git_cmd = 'git clone %s%s@%s %s' % (
                        schema, urllib.parse.quote_plus(username), other_part,
                        repopath)
            else:
                git_cmd = 'git clone %s %s' % (giturl, repopath)
        else:
            logger.info("SSH (private:%s) giturl: %s, Repopath: %s",
                        "YES" if isprivate else "NO", giturl, repopath)
            if isprivate:
                ssh_key_file = get_field_value(connector, 'sshKeyfile')
                if not exists_file(ssh_key_file):
                    logger.error(
                        "Git connector points to a non-existent ssh keyfile!")
                    return repopath, clonedir
                ssh_host = get_field_value(connector, 'sshHost')
                ssh_user = get_field_value_with_default(
                    connector, 'sshUser', 'git')
                if not ssh_host:
                    logger.error(
                        "SSH host not set, could be like github.com, gitlab.com, 192.168.1.45 etc"
                    )
                    return repopath, clonedir
                ssh_dir = '%s/.ssh' % repopath
                if exists_dir(ssh_dir):
                    logger.error(
                        "Git ssh dir: %s already exists, cannot recreate it!",
                        ssh_dir)
                    return repopath, clonedir
                os.mkdir('%s/.ssh' % repopath, 0o700)
                ssh_cfg = create_ssh_config(ssh_dir, ssh_key_file, ssh_user)
                if not ssh_cfg:
                    logger.error(
                        "Creation of Git ssh config in dir: %s failed!",
                        ssh_dir)
                    return repopath, clonedir
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no" -F %s' % ssh_cfg
                git_cmd = 'git clone %s %s/tmpclone' % (giturl, repopath)
                subdir = True
            else:
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no"'
                git_cmd = 'git clone %s %s' % (giturl, repopath)
            os.environ['GIT_SSH_COMMAND'] = git_ssh_cmd
            logger.info("GIT_SSH_COMMAND=%s", git_ssh_cmd)
        git_cmd = '%s --branch %s' % (git_cmd, brnch)
        logger.info("os.system(%s)", git_cmd)
        if git_cmd:
            run_subprocess_cmd(git_cmd)
            checkdir = '%s/tmpclone' % repopath if subdir else repopath
            clonedir = checkdir if exists_dir('%s/.git' % checkdir) else None
        if 'GIT_SSH_COMMAND' in os.environ:
            os.environ.pop('GIT_SSH_COMMAND')
    return repopath, clonedir
def populate_sub_directory_snapshot(base_dir_path, sub_dir_path, snapshot,
                                    dbname, node, snapshot_data):
    dir_path = str('%s/%s' % (base_dir_path, sub_dir_path)).replace('//', '/')
    if exists_dir(dir_path):
        list_of_file = os.listdir(dir_path)
        template_file_path = ""
        deployment_file_path_list = []

        for entry in list_of_file:
            new_dir_path = ('%s/%s' % (dir_path, entry)).replace('//', '/')
            new_sub_directory_path = ('%s/%s' % (sub_dir_path, entry)).replace(
                '//', '/')
            if exists_dir(new_dir_path):
                populate_sub_directory_snapshot(base_dir_path,
                                                new_sub_directory_path,
                                                snapshot, dbname, node,
                                                snapshot_data)
            elif exists_file(new_dir_path):
                if len(entry.split(".")) > 0 and "json" in entry.split(
                        ".")[-1]:
                    json_data = json_from_file(new_dir_path)
                    if json_data and "$schema" in json_data:
                        if "deploymentTemplate.json" in json_data[
                                '$schema'].split("/")[-1]:
                            template_file_path = new_sub_directory_path
                        elif "deploymentParameters.json" in json_data[
                                '$schema'].split("/")[-1]:
                            deployment_file_path_list.append(
                                new_sub_directory_path)

        if template_file_path and deployment_file_path_list:

            location = get_field_value(node, 'location')
            new_deployment_file_path_list = []

            template_file_json_path = str(
                '%s/%s' % (base_dir_path, template_file_path)).replace(
                    '//', '/')
            for deployment_file_path in deployment_file_path_list:
                deployment_file_json_path = str(
                    '%s/%s' % (base_dir_path, deployment_file_path)).replace(
                        '//', '/')

                response = invoke_az_cli("deployment validate --location " +
                                         location + " --template-file " +
                                         template_file_json_path +
                                         " --parameters @" +
                                         deployment_file_json_path)

                if not response['error']:
                    new_deployment_file_path_list.append({
                        "path": deployment_file_path,
                        "status": "active"
                    })
                else:
                    new_deployment_file_path_list.append({
                        "path": deployment_file_path,
                        "status": "inactive"
                    })

            data_record = create_snapshot_record(
                snapshot, new_sub_directory_path, node, template_file_path,
                new_deployment_file_path_list)
            if node['masterSnapshotId'] not in snapshot_data or not isinstance(
                    snapshot_data[node['masterSnapshotId']], list):
                snapshot_data[node['masterSnapshotId']] = []

            snapshot_data[node['masterSnapshotId']] = snapshot_data[node[
                'masterSnapshotId']] + data_record['snapshots'][0]['nodes']
            if get_dbtests():
                insert_one_document(data_record, node['collection'], dbname)
            else:
                snapshot_file = '%s/%s' % (dir_path, "snapshot.json")
                save_json_to_file(data_record, snapshot_file)