Esempio n. 1
0
def _get_new_testcases(testcases, mastersnapshots):
    newcases = []
    for testcase in testcases:
        test_parser_type = testcase.get('type', None)
        if test_parser_type == 'rego':
            new_cases = _get_rego_testcase(testcase, mastersnapshots)
            newcases.extend(new_cases)
        else:
            rule_str = get_field_value_with_default(testcase, 'rule', '')
            ms_ids = re.findall(r'\{(.*)\}', rule_str)
            # detail_method = get_field_value(testcase, 'detailMethod')
            for ms_id in ms_ids:
                for s_id in mastersnapshots[ms_id]:
                    # new_rule_str = re.sub('{%s}' % ms_id, '{%s}' % s_id, rule_str)
                    # if not detail_method or detail_method == snapshots_details_map[s_id]:
                    new_rule_str = rule_str.replace('{%s}' % ms_id, '{%s}' % s_id)
                    new_testcase = {
                        'title': testcase.get('title') if testcase.get('title') else "", 
                        'description': testcase.get('description') if testcase.get('description') else "", 
                        'rule': new_rule_str, 
                        'testId': testcase['masterTestId'],
                        'status' : get_field_value_with_default(testcase, 'status', "enable")
                    }
                    newcases.append(new_testcase)
    return newcases
Esempio n. 2
0
def get_web_client_data(snapshot_type, snapshot_source, snapshot_user):
    client_id = None
    client_secret = None
    sub_id = None
    sub_name = None
    tenant_id = None
    found = False
    if snapshot_type == 'azure':
        sub_data = get_azure_data(snapshot_source)
        if sub_data:
            accounts = get_field_value_with_default(sub_data, 'accounts', [])
            for account in accounts:
                subscriptions = get_field_value_with_default(account, 'subscription', [])
                for subscription in subscriptions:
                    users = get_field_value_with_default(subscription, 'users', [])
                    if users:
                        for user in users:
                            name = get_field_value(user, 'name')
                            if name and name == snapshot_user:
                                client_id = get_field_value(user, 'client_id')
                                client_secret = get_field_value(user, 'client_secret')
                                sub_id = get_field_value(subscription, 'subscription_id')
                                sub_name = get_field_value(subscription, 'subscription_name')
                                tenant_id = get_field_value(sub_data, 'tenant_id')
                                found = True
                            if found:
                                break
                    if found:
                        break
                if found:
                    break
    if not found:
        logger.error("No connector data found, check the connector configuration and snapshot configuration files.")
    return client_id, client_secret, sub_name, sub_id, tenant_id
Esempio n. 3
0
def get_node(credentials, node, snapshot_source, snapshot):
    """
    Fetch node from google using connection. In this case using google client API's
    functions.
    """
    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    project_id = get_field_value_with_default(snapshot, 'project-id',"")
    path = get_field_value_with_default(node, 'path',"")
    zone = re.findall(r"(?<=zones\/)[a-zA-Z0-9\-]*(?=\/)", path)
    db_record = {
        "structure": "google",
        "error": None,
        "reference": project_id,
        "source": parts[0],
        "path": path,
        "timestamp": int(time.time() * 1000),
        "queryuser": get_field_value(snapshot, 'testUser'),
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": node['snapshotId'],
        "collection": collection.replace('.', '').lower(),
        "region" : zone[0] if zone else "",
        "json": {}  # Refactor when node is absent it should None, when empty object put it as {}
    }

    try:
        access_token = credentials.get_access_token().access_token
        header = {
            "Authorization" : ("Bearer %s" % access_token)
        }

        node_type = node['type'] if node and 'type' in node else ""
        base_node_type_list = node_type.split("/")
        if len(base_node_type_list) > 1:
            base_node_type = base_node_type_list[0]
        else:
            logger.error("Invalid node type %s", node_type)
            return db_record
        
        base_url = "%s%s" % (base_node_type, ".googleapis.com")
        request_url = "https://%s/%s" % (base_url, path)
        logger.info("Invoke request for get snapshot: %s", request_url)
        status, data = http_get_request(request_url, header)
        logger.info('Get snapshot status: %s', status)
        
        if status and isinstance(status, int) and status == 200:
            if data:
                db_record['json'] = data
                checksum = get_checksum(data)
                if checksum:
                    db_record['checksum'] = checksum
        else:
            logger.error("Get snapshot returned invalid status: %s", status)
            db_record['error'] = ("Get snapshot returned invalid status: %s" % status)
    except Exception as ex:
        logger.error('Failed to populate the snapshot : %s', ex)
        db_record['error'] = 'Failed to populate the snapshot: %s' % ex
    
    return db_record
def get_node(compute_fn, node, snapshot_source, snapshot):
    """
    Fetch node from google using connection. In this case using google client API's
    functions.
    """
    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    project_id = get_field_value_with_default(snapshot, 'project-id', "")
    path = get_field_value_with_default(node, 'path', "")
    db_record = {
        "structure": "google",
        "error": None,
        "reference": project_id,
        "source": parts[0],
        "path": path,
        "timestamp": int(time.time() * 1000),
        "queryuser": "",
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": node['snapshotId'],
        "collection": collection.replace('.', '').lower(),
        "json": {
        }  # Refactor when node is absent it should None, when empty object put it as {}
    }
    fn_str_list, kwargs = get_google_call_function(node)

    if fn_str_list:
        for i in range(0, len(fn_str_list)):
            compute_fn = getattr(compute_fn, fn_str_list[i], None)
            if compute_fn and i != len(fn_str_list) - 1:
                compute_fn = compute_fn()

        response_param = ""
        if fn_str_list and len(fn_str_list) > 1:
            response_param = fn_str_list[-2]
        elif fn_str_list and len(fn_str_list) == 1:
            response_param = fn_str_list[0]

        if compute_fn and callable(compute_fn):
            try:
                data = compute_fn(**kwargs["params"]).execute()
                if data:
                    db_record['json'] = data
                    checksum = get_checksum(data)
                    if checksum:
                        db_record['checksum'] = checksum
            except Exception as ex:
                logger.info('Compute function exception: %s', ex)
                db_record['error'] = 'Compute function exception: %s' % ex
        else:
            logger.info('Invalid Compute function exception: %s',
                        str(fn_str_list))
            db_record[
                'error'] = 'Invalid Compute function exception: %s' % str(
                    fn_str_list)
    else:
        logger.info('Missing Compute function')
        db_record['error'] = 'Missing Compute function'
    return db_record
def container_snapshots_filesystem(container):
    """
    Get snapshot and mastersnapshot list used in all test/mastertest files of a container from the filesystem.
    This gets list of all the snapshots/mastersnapshots used in the container.
    The list will be used to not populate the snapshots/mastersnapshots multiple times, if the same
    snapshots/mastersnapshots are used in different test/mastertest files of a container.
    The configuration of the default path is configured in config.ini.
    """
    snapshots = []
    logger.info("Starting to get list of snapshots")
    reporting_path = config_value('REPORTING', 'reportOutputFolder')
    json_dir = '%s/%s/%s' % (framework_dir(), reporting_path, container)
    logger.info(json_dir)
    singletest = get_from_currentdata(SINGLETEST)
    test_files = get_json_files(json_dir, JSONTEST)
    logger.info('\n'.join(test_files))
    for test_file in test_files:
        test_json_data = json_from_file(test_file)
        if test_json_data:
            snapshot = test_json_data['snapshot'] if 'snapshot' in test_json_data else ''
            if snapshot:
                file_name = snapshot if snapshot.endswith('.json') else '%s.json' % snapshot
                if singletest:
                    testsets = get_field_value_with_default(test_json_data, 'testSet', [])
                    for testset in testsets:
                        for testcase in testset['cases']:
                            if ('testId' in testcase and testcase['testId'] == singletest) or \
                                    ('masterTestId' in testcase and testcase['masterTestId'] == singletest):
                                if file_name not in snapshots:
                                    snapshots.append(file_name)
                else:
                    snapshots.append(file_name)

    test_files = get_json_files(json_dir, MASTERTEST)
    logger.info('\n'.join(test_files))
    for test_file in test_files:
        test_json_data = json_from_file(test_file)
        if test_json_data:
            snapshot = test_json_data['masterSnapshot'] if 'masterSnapshot' in test_json_data else ''
            if snapshot:
                file_name = snapshot if snapshot.endswith('.json') else '%s.json' % snapshot
                parts = file_name.split('.')
                file_name = '%s_gen.%s' % (parts[0], parts[-1])
                if singletest:
                    testsets = get_field_value_with_default(test_json_data, 'testSet', [])
                    for testset in testsets:
                        for testcase in testset['cases']:
                            if ('testId' in testcase and testcase['testId'] == singletest) or \
                                    ('masterTestId' in testcase and testcase['masterTestId'] == singletest):
                                if file_name not in snapshots:
                                    snapshots.append(file_name)
                else:
                    snapshots.append(file_name)
    return list(set(snapshots))
Esempio n. 6
0
 def process_files(self, test_files, doctype, replace=False):
     """ Process Test or masterTest json files."""
     snapshots = []
     for test_file in test_files:
         test_json_data = json_from_file(test_file)
         if test_json_data:
             snapshot = test_json_data[
                 doctype] if doctype in test_json_data else ''
             if snapshot:
                 file_name = snapshot if snapshot.endswith(
                     '.json') else '%s.json' % snapshot
                 if replace:
                     file_name = file_name.replace('.json', '_gen.json')
                 if self.singleTest:
                     testsets = get_field_value_with_default(
                         test_json_data, 'testSet', [])
                     for testset in testsets:
                         for testcase in testset['cases']:
                             if ('testId' in testcase and testcase['testId'] == self.singleTest) or \
                                     ('masterTestId' in testcase and testcase['masterTestId'] == self.singleTest):
                                 if file_name not in snapshots:
                                     snapshots.append(file_name)
                 else:
                     snapshots.append(file_name)
     return snapshots
def get_all_nodes(repopath, node, snapshot, ref, connector):
    """ Fetch all the nodes from the cloned git repository in the given path."""
    db_records = []
    collection = node['collection'] if 'collection' in node else COLLECTION
    given_type = get_field_value(connector, "type")
    base_path = get_field_value_with_default(connector, "folderPath", "")
    snapshot_source = get_field_value(snapshot, 'source')
    parts = snapshot_source.split('.')
    d_record = {
        "structure": given_type,
        "reference": ref if not base_path else "",
        "source": parts[0],
        "path": '',
        "timestamp": int(time.time() * 1000),
        "queryuser": "",
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": None,
        "masterSnapshotId": node['masterSnapshotId'],
        "collection": collection.replace('.', '').lower(),
        "json": {}
    }
    node_type = node['type'] if 'type' in node and node['type'] else 'json'
    json_path = '%s/%s' % (repopath, node['path'])
    file_path = json_path.replace('//', '/')
    logger.info('Dir: %s', file_path)
    if exists_dir(file_path):
        count = 0
        for filename in glob.glob('%s/*.json' % file_path.replace('//', '/')):
            parts = filename.rsplit('/', 1)
            path = '%s/%s' % (node['path'], parts[-1])
            json_data = convert_to_json(filename, node_type)
            logger.info('type: %s, json:%s', node_type, json_data)
            if json_data:
                db_record = copy.deepcopy(d_record)
                db_record['snapshotId'] = '%s%s' % (node['masterSnapshotId'],
                                                    str(count))
                db_record['path'] = path.replace('//', '/')
                db_record['json'] = json_data
                data_str = json.dumps(json_data)
                db_record['checksum'] = hashlib.md5(
                    data_str.encode('utf-8')).hexdigest()
                db_records.append(db_record)
                count += 1
    else:
        logger.info('Get requires valid directory for snapshot not present!')
    return db_records
Esempio n. 8
0
def generate_crawler_snapshot(snapshot,node,snapshot_data):
    node_type= get_field_value(node,'type')
    node_type = get_field_value_with_default(node, 'type',"")
    resource_items = []
    resource_items=get_lits(snapshot=snapshot,node=node)

    snapshot_data[node['masterSnapshotId']] = []
    for index,resource_namespaced_item in enumerate(resource_items) :
        snapshot_data[node['masterSnapshotId']].append({
                        "masterSnapshotId" : [node['masterSnapshotId']],
                        "snapshotId": '%s%s_%s' % (node['masterSnapshotId'],resource_namespaced_item['namespace'], str(index)),
                        "type": node_type,
                        "collection": node['collection'],
                        "paths": resource_namespaced_item['paths'],
                        "status" : "active",
                        "validate" : node['validate'] if 'validate' in node else True
                    })
    return snapshot_data
Esempio n. 9
0
def run_file_validation_tests(test_file, container, filesystem=True, snapshot_status=None):
    # logger.info("*" * 50)
    logger.info("\tTEST: %s", test_file)
    dirpath = None
    test_json_data = json_from_file(test_file)
    if not test_json_data:
        logger.info("\t\tTest file %s looks to be empty, next!...", test_file)

    if test_json_data and "connector" in test_json_data and "remoteFile" in test_json_data and test_json_data["connector"] and test_json_data["remoteFile"]:
        dirpath, pull_response = pull_json_data(test_json_data)
        if not pull_response:
            return {}

    singletest = get_from_currentdata(SINGLETEST)
    if singletest:
        testsets = get_field_value_with_default(test_json_data, 'testSet', [])
        for testset in testsets:
            newtestcases = []
            for testcase in testset['cases']:
                if ('testId' in testcase and testcase['testId'] == singletest) or \
                        ('masterTestId' in testcase and testcase['masterTestId'] == singletest):
                    newtestcases.append(testcase)
            testset['cases'] = newtestcases
    resultset = run_json_validation_tests(test_json_data, container, filesystem, snapshot_status, dirpath=dirpath)
    finalresult = True
    if resultset:
        snapshot = test_json_data['snapshot'] if 'snapshot' in test_json_data else ''
        if singletest:
            print(json.dumps(resultset, indent=2))
        else:
            dump_output_results(resultset, container, test_file, snapshot, filesystem)
        for result in resultset:
            if 'result' in result:
                if not re.match(r'passed', result['result'], re.I):
                    finalresult = False
                    break
    else:
        # TODO: NO test cases in this file.
        # LOG HERE that no test cases are present in this file.
        finalresult = False
    return finalresult
def get_node(repopath, node, snapshot, ref, connector):
    """ Fetch node from the cloned git repository."""
    collection = node['collection'] if 'collection' in node else COLLECTION
    given_type = get_field_value(connector, "type")
    base_path = get_field_value_with_default(connector, "folderPath", "")
    snapshot_source = get_field_value(snapshot, 'source')
    parts = snapshot_source.split('.')
    db_record = {
        "structure": given_type,
        "reference": ref if not base_path else "",
        "source": parts[0],
        "path": base_path + node['path'],
        "timestamp": int(time.time() * 1000),
        "queryuser": "",
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": node['snapshotId'],
        "collection": collection.replace('.', '').lower(),
        "json": {}
    }
    json_path = '%s/%s' % (repopath, node['path'])
    file_path = json_path.replace('//', '/')
    logger.info('File: %s', file_path)
    if exists_file(file_path):
        node_type = node['type'] if 'type' in node and node['type'] else 'json'
        json_data = convert_to_json(file_path, node_type)
        logger.info('type: %s, json:%s', node_type, json_data)
        # json_data = json_from_file(file_path)
        if json_data:
            db_record['json'] = json_data
            data_str = json.dumps(json_data)
            db_record['checksum'] = hashlib.md5(
                data_str.encode('utf-8')).hexdigest()
    else:
        logger.info('Get requires valid file for snapshot not present!')
    logger.debug('DB: %s', db_record)
    return db_record
def git_clone_dir(connector):
    clonedir = None
    repopath = tempfile.mkdtemp()
    subdir = False
    if connector and isinstance(connector, dict):
        giturl = get_field_value(connector, 'gitProvider')
        if not giturl:
            logger.error("Git connector does not have valid git provider URL")
            return repopath, clonedir
        brnch = get_field_value_with_default(connector, 'branchName', 'master')
        isprivate = get_field_value(connector, 'private')
        isprivate = True if isprivate is None or not isinstance(
            isprivate, bool) else isprivate
        logger.info("Repopath: %s", repopath)
        http_match = re.match(r'^http(s)?://', giturl, re.I)
        if http_match:
            logger.info("Http (private:%s) giturl: %s, Repopath: %s",
                        "YES" if isprivate else "NO", giturl, repopath)
            username = get_field_value(connector, 'httpsUser')
            if username:
                pwd = get_field_value(connector, 'httpsPassword')
                schema = giturl[:http_match.span()[-1]]
                other_part = giturl[http_match.span()[-1]:]
                pwd = pwd if pwd else get_git_pwd()
                if pwd:
                    git_cmd = 'git clone %s%s:%s@%s %s' % (
                        schema, urllib.parse.quote_plus(username),
                        urllib.parse.quote_plus(pwd), other_part, repopath)
                else:
                    git_cmd = 'git clone %s%s@%s %s' % (
                        schema, urllib.parse.quote_plus(username), other_part,
                        repopath)
            else:
                git_cmd = 'git clone %s %s' % (giturl, repopath)
        else:
            logger.info("SSH (private:%s) giturl: %s, Repopath: %s",
                        "YES" if isprivate else "NO", giturl, repopath)
            if isprivate:
                ssh_key_file = get_field_value(connector, 'sshKeyfile')
                if not exists_file(ssh_key_file):
                    logger.error(
                        "Git connector points to a non-existent ssh keyfile!")
                    return repopath, clonedir
                ssh_host = get_field_value(connector, 'sshHost')
                ssh_user = get_field_value_with_default(
                    connector, 'sshUser', 'git')
                if not ssh_host:
                    logger.error(
                        "SSH host not set, could be like github.com, gitlab.com, 192.168.1.45 etc"
                    )
                    return repopath, clonedir
                ssh_dir = '%s/.ssh' % repopath
                if exists_dir(ssh_dir):
                    logger.error(
                        "Git ssh dir: %s already exists, cannot recreate it!",
                        ssh_dir)
                    return repopath, clonedir
                os.mkdir('%s/.ssh' % repopath, 0o700)
                ssh_cfg = create_ssh_config(ssh_dir, ssh_key_file, ssh_user)
                if not ssh_cfg:
                    logger.error(
                        "Creation of Git ssh config in dir: %s failed!",
                        ssh_dir)
                    return repopath, clonedir
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no" -F %s' % ssh_cfg
                git_cmd = 'git clone %s %s/tmpclone' % (giturl, repopath)
                subdir = True
            else:
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no"'
                git_cmd = 'git clone %s %s' % (giturl, repopath)
            os.environ['GIT_SSH_COMMAND'] = git_ssh_cmd
            logger.info("GIT_SSH_COMMAND=%s", git_ssh_cmd)
        git_cmd = '%s --branch %s' % (git_cmd, brnch)
        logger.info("os.system(%s)", git_cmd)
        if git_cmd:
            run_subprocess_cmd(git_cmd)
            checkdir = '%s/tmpclone' % repopath if subdir else repopath
            clonedir = checkdir if exists_dir('%s/.git' % checkdir) else None
        if 'GIT_SSH_COMMAND' in os.environ:
            os.environ.pop('GIT_SSH_COMMAND')
    return repopath, clonedir
Esempio n. 12
0
def run_container_validation_tests_filesystem(container, snapshot_status=None):
    """Get test files from the filesystem."""
    # logger.info("Starting validation tests")
    logger.info("VALIDATION:")
    logger.info("\tCollection: %s,  Type: FILESYSTEM", container)
    reporting_path = config_value('REPORTING', 'reportOutputFolder')
    json_dir = '%s/%s/%s' % (framework_dir(), reporting_path, container)
    logger.info('\tLOCATION: %s', json_dir)
    test_files = get_json_files(json_dir, JSONTEST)
    # logger.info('\n'.join(test_files))
    result = True
    for test_file in test_files:
        logger.info('\tCOLLECTION: %s', test_file)
        val = run_file_validation_tests(test_file, container, True, snapshot_status)
        result = result and val
    if test_files:
        # return the result value if "test" file is processed collection
        logger.critical("VALIDATION COMPLETE:")
        return result

    # mastertest files
    test_files = get_json_files(json_dir, MASTERTEST)
    # logger.info('\n'.join(test_files))
    if not test_files:
        logger.error("ERROR: No `test` or `mastertest` file found. collection should contain either `test` or `mastertest` file")
        return False

    finalresult = result
    for test_file in test_files:
        logger.info('\tCOLLECTION: %s', test_file)
        # logger.info("*" * 50)
        # logger.info("validator tests: %s", test_file)
        dirpath = None
        test_json_data = json_from_file(test_file)
        if not test_json_data:
            logger.info("Test file %s looks to be empty, next!...", test_file)
            continue

        if "connector" in test_json_data and "remoteFile" in test_json_data and test_json_data["connector"] and test_json_data["remoteFile"]:
            dirpath, pull_response = pull_json_data(test_json_data)
            if not pull_response:
                return {}

        snapshot_key = '%s_gen' % test_json_data['masterSnapshot']
        mastersnapshots = defaultdict(list)
        snapshot_data = snapshot_status[snapshot_key] if snapshot_key in snapshot_status else {}
        for snapshot_id, mastersnapshot_id in snapshot_data.items():
            if isinstance(mastersnapshot_id, list):
                for master_snapshot_id in mastersnapshot_id:
                    mastersnapshots[master_snapshot_id].append(snapshot_id)
            elif isinstance(mastersnapshot_id, str):
                mastersnapshots[mastersnapshot_id].append(snapshot_id)
        if not mastersnapshots:
            logger.error("No generated snapshots found for validation.")
            continue
        test_json_data['snapshot'] = snapshot_key
        testsets = get_field_value_with_default(test_json_data, 'testSet', [])
        for testset in testsets:
            testcases = get_field_value_with_default(testset, 'cases', [])
            testset['cases'] = _get_new_testcases(testcases, mastersnapshots)
        # print(json.dumps(test_json_data, indent=2))
        singletest = get_from_currentdata(SINGLETEST)
        if singletest:
            for testset in testsets:
                newtestcases = []
                for testcase in testset['cases']:
                    if ('testId' in testcase and  testcase['testId'] == singletest) or \
                            ('masterTestId' in testcase and testcase['masterTestId'] == singletest):
                        newtestcases.append(testcase)
                testset['cases'] = newtestcases
        resultset = run_json_validation_tests(test_json_data, container, True, snapshot_status, dirpath=dirpath)
        if test_json_data.get('testSet') and not resultset:
            logger.error('\tERROR: Testset does not contains any testcases or all testcases are skipped due to invalid rules.')
        elif resultset:
            snapshot = test_json_data['snapshot'] if 'snapshot' in test_json_data else ''
            if singletest:
                print(json.dumps(resultset, indent=2))
            else:
                dump_output_results(resultset, container, test_file, snapshot, True)
            for result in resultset:
                if 'result' in result:
                    if not re.match(r'passed', result['result'], re.I):
                        finalresult = False
                        break
        else:
            logger.error('\tERROR: No mastertest Documents found!')
            finalresult = False
    logger.critical("VALIDATION COMPLETE:")
    return finalresult
Esempio n. 13
0
def git_clone_dir(connector):
    clonedir = None
    repopath = tempfile.mkdtemp()
    subdir = False
    if connector and isinstance(connector, dict):
        giturl = get_field_value(connector, 'gitProvider')
        if not giturl:
            logger.error("Git connector does not have valid git provider URL")
            return repopath, clonedir
        
        branch = get_from_currentdata('branch')
        if not branch:
            branch = get_field_value_with_default(connector, 'branchName', 'master')

        isprivate = get_field_value(connector, 'private')
        isprivate = True if isprivate is None or not isinstance(isprivate, bool) else isprivate
        # logger.info("Repopath: %s", repopath)
        logger.info("\t\t\tRepopath: %s", repopath)
        http_match = re.match(r'^http(s)?://', giturl, re.I)
        if http_match:
            logger.info("\t\t\tHttp (private:%s) giturl: %s", "YES" if isprivate else "NO", giturl)

            accessToken = get_field_value(connector, 'httpsAccessToken')
            username = get_field_value(connector, 'httpsUser')
            if accessToken:
                logger.info("AccessToken: %s" % accessToken)
                pwd = get_field_value(connector, 'httpsPassword')
                pwd = pwd if pwd else get_git_pwd(key=accessToken)
                if not pwd:
                    pwd = get_pwd_from_vault(accessToken)
                    if pwd:
                        logger.info("Git access token from vault: %s", '*' * len(pwd))
                if pwd:
                    gh = GithubFunctions()
                    gh.set_access_token(pwd)
                    _ = gh.populate_user()
                    rpo = gh.clone_repo(giturl, repopath, branch)
                    if rpo:
                        logger.info('Successfully cloned in %s dir' % repopath)
                        checkdir = '%s/tmpclone' % repopath if subdir else repopath
                        clonedir = checkdir if exists_dir('%s/.git' % checkdir) else None
                        if not exists_dir(clonedir):
                            logger.error("No valid data provided for connect to git : %s", giturl)
                        return repopath, clonedir
                    elif isprivate:
                        logger.error("Please provide password for connect to git repository.")
                        return repopath, clonedir
                    else:
                        git_cmd = 'git clone %s %s' % (giturl, repopath)
            elif username:
                pwd = get_field_value(connector, 'httpsPassword')
                schema = giturl[:http_match.span()[-1]]
                other_part = giturl[http_match.span()[-1]:]
                # pwd = pwd if (pwd and not json_source()) else (get_git_pwd() if not json_source() else get_pwd_from_vault(pwd))
                pwd = pwd if pwd else get_git_pwd(key=username)

                # populate the password from vault
                if not pwd:
                    pwd = get_pwd_from_vault(username)
                    if pwd:
                        logger.info("Git password from vault: %s", '*' * len(pwd))
                if pwd:
                    git_cmd = 'git clone --depth 1 %s%s:%s@%s %s' % (schema, urllib.parse.quote_plus(username),
                                                        urllib.parse.quote_plus(pwd), other_part, repopath)
                elif isprivate:
                    logger.error("Please provide password for connect to git repository.")
                    return repopath, clonedir
                else:
                    git_cmd = 'git clone --depth 1 %s%s:%s@%s %s' % (schema, urllib.parse.quote_plus(username), "",
                                                     other_part, repopath)
            else:
                git_cmd = 'git clone --depth 1 %s %s' % (giturl, repopath)
        else:
            logger.info("SSH (private:%s) giturl: %s, Repopath: %s", "YES" if isprivate else "NO",
                        giturl, repopath)
            if isprivate:
                ssh_key_file = get_field_value(connector, 'sshKeyfile')
                ssh_key_name = get_field_value(connector, 'sshKeyName')
                ssh_key_file_data = None
                if ssh_key_file:
                    if not exists_file(ssh_key_file):
                        logger.error("Git connector points to a non-existent ssh keyfile!")
                        return repopath, clonedir
                elif ssh_key_name:
                    ssh_key_file_data = get_vault_data(ssh_key_name)
                    if not ssh_key_file_data:
                        logger.info('Git connector points to a non-existent ssh keyName in the vault!')
                        return repopath, clonedir
                ssh_host = get_field_value(connector, 'sshHost')
                ssh_user = get_field_value_with_default(connector, 'sshUser', 'git')
                if not ssh_host:
                    logger.error("SSH host not set, could be like github.com, gitlab.com, 192.168.1.45 etc")
                    return repopath, clonedir
                ssh_dir = '%s/.ssh' % repopath
                if exists_dir(ssh_dir):
                    logger.error("Git ssh dir: %s already exists, cannot recreate it!", ssh_dir)
                    return repopath, clonedir
                os.mkdir('%s/.ssh' % repopath, 0o700)
                if not ssh_key_file and ssh_key_name and ssh_key_file_data:
                    ssh_key_file = create_ssh_file_vault_data(ssh_dir, ssh_key_file_data, ssh_key_name)
                    if not ssh_key_file:
                        logger.info('Git connector points to a non-existent ssh keyName in the vault!')
                        return repopath, clonedir
                ssh_cfg = create_ssh_config(ssh_dir, ssh_key_file, ssh_user)
                if not ssh_cfg:
                    logger.error("Creation of Git ssh config in dir: %s failed!", ssh_dir)
                    return repopath, clonedir
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no" -F %s' % ssh_cfg
                git_cmd = 'git clone %s %s/tmpclone' % (giturl, repopath)
                subdir = True
            else:
                git_ssh_cmd = 'ssh -o "StrictHostKeyChecking=no"'
                git_cmd = 'git clone %s %s' % (giturl, repopath)
            os.environ['GIT_SSH_COMMAND'] = git_ssh_cmd
        git_cmd = '%s --branch %s' % (git_cmd, branch)
        if git_cmd:
            error_result, result = run_subprocess_cmd(git_cmd)
            checkdir = '%s/tmpclone' % repopath if subdir else repopath
            clonedir = checkdir if exists_dir('%s/.git' % checkdir) else None
            if not exists_dir(clonedir):
                logger.error("No valid data provided for connect to git : %s", error_result)
        if 'GIT_SSH_COMMAND' in os.environ:
            os.environ.pop('GIT_SSH_COMMAND')
    return repopath, clonedir
Esempio n. 14
0
def populate_custom_snapshot(snapshot, container=None):
    """ Populates the resources from git."""
    dbname = config_value('MONGODB', 'dbname')
    snapshot_source = get_field_value(snapshot, 'source')
    connector_data = get_from_currentdata('connector')
    if connector_data:
        sub_data = get_custom_data(connector_data)
        if not sub_data:
            logger.error("No connector data found in '%s'", connector_data)
    else:
        sub_data = get_custom_data(snapshot_source)
    snapshot_nodes = get_field_value(snapshot, 'nodes')
    snapshot_data, valid_snapshotids = validate_snapshot_nodes(snapshot_nodes)
    if valid_snapshotids and sub_data and snapshot_nodes:
        baserepo, repopath = _get_repo_path(sub_data, snapshot)
        if repopath:
            brnch = get_field_value_with_default(sub_data, 'branchName', 'master')
            for node in snapshot_nodes:
                node_type = node['type'] if 'type' in node and node['type'] else ''
                if node_type in TEMPLATE_NODE_TYPES:
                    template_data = {
                        "container" : container,
                        "dbname" : dbname,
                        "snapshot_source" : snapshot_source,
                        "connector_data" : sub_data,
                        "snapshot_data" : snapshot_data,
                        "repopath" : repopath,
                        "snapshot" : snapshot
                    }
                    template_processor = TEMPLATE_NODE_TYPES[node_type](node, **template_data)
                    if 'snapshotId' in node:
                        snapshot_data = template_processor.populate_template_snapshot()
                    elif 'masterSnapshotId' in node:
                        snapshot_data = template_processor.populate_all_template_snapshot()
                elif 'paths' in node:
                    logger.error("ERROR: Invalid json : `%s` is not a valid node type." % (node_type))
                else:
                    # logger.debug(node)
                    # data = get_node(repopath, node, snapshot_source, brnch)
                    # if data:
                    #     insert_one_document(data, data['collection'], dbname)
                    #     snapshot_data[node['snapshotId']] = True
                    validate = node['validate'] if 'validate' in node else True
                    if 'snapshotId' in node:
                        logger.debug(node)
                        data = get_node(repopath, node, snapshot, brnch, sub_data)
                        if data:
                            if validate:
                                if get_dbtests():
                                    if get_collection_size(data['collection']) == 0:
                                        #Creating indexes for collection
                                        create_indexes(data['collection'],
                                            config_value(DATABASE, DBNAME), 
                                            [('snapshotId', pymongo.ASCENDING),
                                            ('timestamp', pymongo.DESCENDING)])
                                            
                                        create_indexes(
                                            data['collection'], 
                                            config_value(DATABASE, DBNAME), 
                                            [
                                                ('_id', pymongo.DESCENDING),
                                                ('timestamp', pymongo.DESCENDING),
                                                ('snapshotId', pymongo.ASCENDING)
                                            ]
                                        )
                                    insert_one_document(data, data['collection'], dbname)
                                else:
                                    snapshot_dir = make_snapshots_dir(container)
                                    if snapshot_dir:
                                        store_snapshot(snapshot_dir, data)
                                if 'masterSnapshotId' in node:
                                    snapshot_data[node['snapshotId']] = node['masterSnapshotId']
                                else:
                                    snapshot_data[node['snapshotId']] = True
                            # else:
                            #     snapshot_data[node['snapshotId']] = False
                            node['status'] = 'active'
                        else:
                            node['status'] = 'inactive'
                        logger.debug('Type: %s', type(data))
                    elif 'masterSnapshotId' in node:
                        alldata = get_all_nodes(repopath, node, snapshot, brnch, sub_data)
                        if alldata:
                            snapshot_data[node['masterSnapshotId']] = []
                            for data in alldata:
                                snapshot_data[node['masterSnapshotId']].append(
                                    {
                                        'snapshotId': data['snapshotId'],
                                        'path': data['path'],
                                        'validate': validate
                                    })
                        logger.debug('Type: %s', type(alldata))
        if baserepo and os.path.exists(baserepo):
            # logger.info('\t\tCLEANING Repo: %s', baserepo)
            shutil.rmtree(baserepo)
    return snapshot_data
Esempio n. 15
0
def get_all_nodes(credentials, node, snapshot_source, snapshot, snapshot_data):
    """
    Fetch all nodes from google using connection using google client API's functions.
    """
    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    project_id = get_field_value_with_default(snapshot, 'project-id',"")
    node_type = get_field_value_with_default(node, 'type',"")
    
    db_record = {
        "structure": "google",
        "error": None,
        "reference": project_id,
        "source": parts[0],
        "path": "",
        "timestamp": int(time.time() * 1000),
        "queryuser": get_field_value(snapshot, 'testUser'),
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": None,
        "masterSnapshotId": [node['masterSnapshotId']],
        "collection": collection.replace('.', '').lower(),
        "json": {},  # Refactor when node is absent it should None, when empty object put it as {}
        "items": []
    }

    if node_type:
        access_token = credentials.get_access_token().access_token
        header = {
            "Authorization" : ("Bearer %s" % access_token)
        }

        base_node_type_list = node_type.split("/")
        if len(base_node_type_list) > 1:
            base_node_type = base_node_type_list[1]
        else:
            logger.error("Invalid node type '%s'", node_type)
            return db_record

        request_url = get_api_path(base_node_type)
        request_url = generate_request_url(request_url, project_id)
        logger.info("Invoke request for get snapshot: %s", request_url)
        
        status, data = http_get_request(request_url, header)
        logger.info('Get snapshot status: %s', status)

        fn_str_list = ""
        if node and 'type' in node and node['type']:
            fn_str_list = get_field_value(node, 'type').split(".")
        
        response_param = ""
        if fn_str_list and len(fn_str_list) > 1:
            response_param = fn_str_list[-2]
        elif fn_str_list and len(fn_str_list) == 1:
            response_param = fn_str_list[0]
        
        if data:
            check_node_type = node_type 
            node_type_list = node_type.split(".")
            if len(node_type_list) > 1:
                del node_type_list[-1]
                check_node_type = ".".join(node_type_list)

            db_record['json'] = data
            if response_param in data:
                db_record['items'] = data[response_param]
            elif "items" in data:
                if isinstance(data['items'], dict):
                    for name, scoped_dict in data['items'].items():
                        if response_param in scoped_dict:
                            db_record['items'] = db_record['items'] + scoped_dict[check_node_type]

                if not db_record['items']:
                    db_record['items'] = data['items']

            set_snapshot_data(node, db_record['items'], snapshot_data)

            checksum = get_checksum(data)
            if checksum:
                db_record['checksum'] = checksum

    return db_record
Esempio n. 16
0
def populate_google_snapshot(snapshot, container=None):
    """
    This is an entrypoint for populating a snapshot of type google.
    All snapshot connectors should take snapshot object and based on
    'source' field create a method to connect to the service for the
    connector.
    The 'source' field could be used by more than one snapshot, so the
    'testuser' attribute should match to the user the 'source'
    """
    dbname = config_value('MONGODB', 'dbname')
    snapshot_source = get_field_value(snapshot, 'source')
    snapshot_user = get_field_value(snapshot, 'testUser')
    project_id = get_field_value(snapshot, 'project-id')
    sub_data = get_google_data(snapshot_source)
    snapshot_nodes = get_field_value(snapshot, 'nodes')
    snapshot_data, valid_snapshotids = validate_snapshot_nodes(snapshot_nodes)
    if valid_snapshotids and sub_data and snapshot_nodes:
        logger.debug(sub_data)
        try:
            for node in snapshot['nodes']:
                validate = node['validate'] if 'validate' in node else True
                logger.info(node)
                node_type = get_field_value_with_default(node, 'type',"")
                credentials = get_google_client_data(sub_data, snapshot_user, node_type, project_id)
                if not credentials:
                    logger.info("No  GCE connection in the snapshot to access Google resource!...")
                    return snapshot_data
                if 'snapshotId' in node:
                    if validate:
                        data = get_node(credentials, node, snapshot_source, snapshot)
                        if data:
                            error_str = data.pop('error', None)
                            if get_dbtests():
                                if get_collection_size(data['collection']) == 0:
                                    #Creating indexes for collection
                                    create_indexes(
                                        data['collection'], 
                                        config_value(DATABASE, DBNAME), 
                                        [
                                            ('snapshotId', pymongo.ASCENDING),
                                            ('timestamp', pymongo.DESCENDING)
                                        ]
                                    )
                                    create_indexes(
                                        data['collection'], 
                                        config_value(DATABASE, DBNAME), 
                                        [
                                            ('_id', pymongo.DESCENDING),
                                            ('timestamp', pymongo.DESCENDING),
                                            ('snapshotId', pymongo.ASCENDING)
                                        ]
                                    )
                                insert_one_document(data, data['collection'], dbname)
                            else:
                                snapshot_dir = make_snapshots_dir(container)
                                if snapshot_dir:
                                    store_snapshot(snapshot_dir, data)
                    
                            if 'masterSnapshotId' in node:
                                snapshot_data[node['snapshotId']] = node['masterSnapshotId']
                            else:
                                snapshot_data[node['snapshotId']] = False if error_str else True
                        else:
                            node['status'] = 'inactive'
                elif 'masterSnapshotId' in node:
                        data = get_all_nodes(credentials, node, snapshot_source, snapshot, snapshot_data)
                        logger.debug('Type: %s', type(data))
        except Exception as ex:
            logger.info('Unable to create Google client: %s', ex)
            raise ex
    return snapshot_data
def populate_custom_snapshot(snapshot, container=None):
    """ Populates the resources from git."""
    dbname = config_value('MONGODB', 'dbname')
    snapshot_source = get_field_value(snapshot, 'source')
    sub_data = get_custom_data(snapshot_source)
    snapshot_nodes = get_field_value(snapshot, 'nodes')
    snapshot_data, valid_snapshotids = validate_snapshot_nodes(snapshot_nodes)
    if valid_snapshotids and sub_data and snapshot_nodes:
        baserepo, repopath = _get_repo_path(sub_data, snapshot)
        if repopath:
            brnch = get_field_value_with_default(sub_data, 'branchName',
                                                 'master')
            for node in snapshot_nodes:
                node_type = node[
                    'type'] if 'type' in node and node['type'] else 'json'
                if node_type == 'arm':
                    if 'snapshotId' in node:
                        populate_arm_snapshot(container, dbname,
                                              snapshot_source, sub_data,
                                              snapshot_data, node, repopath)
                    elif 'masterSnapshotId' in node:
                        populate_all_arm_snapshot(snapshot, dbname, sub_data,
                                                  node, repopath,
                                                  snapshot_data)
                else:
                    # logger.debug(node)
                    # data = get_node(repopath, node, snapshot_source, brnch)
                    # if data:
                    #     insert_one_document(data, data['collection'], dbname)
                    #     snapshot_data[node['snapshotId']] = True
                    validate = node['validate'] if 'validate' in node else True
                    if 'snapshotId' in node:
                        logger.debug(node)
                        data = get_node(repopath, node, snapshot, brnch,
                                        sub_data)
                        if data:
                            if validate:
                                if get_dbtests():
                                    if get_collection_size(
                                            data['collection']) == 0:
                                        #Creating indexes for collection
                                        create_indexes(
                                            data['collection'],
                                            config_value(DATABASE, DBNAME),
                                            [('snapshotId', pymongo.ASCENDING),
                                             ('timestamp', pymongo.DESCENDING)
                                             ])
                                    insert_one_document(
                                        data, data['collection'], dbname)
                                else:
                                    snapshot_dir = make_snapshots_dir(
                                        container)
                                    if snapshot_dir:
                                        store_snapshot(snapshot_dir, data)
                                if 'masterSnapshotId' in node:
                                    snapshot_data[node['snapshotId']] = node[
                                        'masterSnapshotId']
                                else:
                                    snapshot_data[node['snapshotId']] = True
                            else:
                                snapshot_data[node['snapshotId']] = False
                            node['status'] = 'active'
                        else:
                            node['status'] = 'inactive'
                        logger.debug('Type: %s', type(data))
                    elif 'masterSnapshotId' in node:
                        alldata = get_all_nodes(repopath, node, snapshot,
                                                brnch, sub_data)
                        if alldata:
                            snapshot_data[node['masterSnapshotId']] = []
                            for data in alldata:
                                snapshot_data[node['masterSnapshotId']].append(
                                    {
                                        'snapshotId': data['snapshotId'],
                                        'path': data['path'],
                                        'validate': True
                                    })
                        logger.debug('Type: %s', type(alldata))
        if baserepo and os.path.exists(baserepo):
            logger.info('Repo path: %s', baserepo)
            shutil.rmtree(baserepo)
    return snapshot_data
Esempio n. 18
0
def run_container_validation_tests_filesystem(container, snapshot_status=None):
    """Get test files from the filesystem."""
    logger.info("Starting validation tests")
    reporting_path = config_value('REPORTING', 'reportOutputFolder')
    json_dir = '%s/%s/%s' % (framework_dir(), reporting_path, container)
    logger.info(json_dir)
    test_files = get_json_files(json_dir, JSONTEST)
    logger.info('\n'.join(test_files))
    result = True
    for test_file in test_files:
        val = run_file_validation_tests(test_file, container, True,
                                        snapshot_status)
        result = result and val
    # mastertest files
    test_files = get_json_files(json_dir, MASTERTEST)
    logger.info('\n'.join(test_files))
    finalresult = True
    for test_file in test_files:
        logger.info("*" * 50)
        logger.info("validator tests: %s", test_file)
        test_json_data = json_from_file(test_file)
        if not test_json_data:
            logger.info("Test file %s looks to be empty, next!...", test_file)
            continue
        snapshot_key = '%s_gen' % test_json_data['masterSnapshot']
        mastersnapshots = defaultdict(list)
        snapshot_data = snapshot_status[
            snapshot_key] if snapshot_key in snapshot_status else {}
        for snapshot_id, mastersnapshot_id in snapshot_data.items():
            if isinstance(mastersnapshot_id, list):
                for master_snapshot_id in mastersnapshot_id:
                    mastersnapshots[master_snapshot_id].append(snapshot_id)
            elif isinstance(mastersnapshot_id, str):
                mastersnapshots[mastersnapshot_id].append(snapshot_id)
        test_json_data['snapshot'] = snapshot_key
        testsets = get_field_value_with_default(test_json_data, 'testSet', [])
        for testset in testsets:
            testcases = get_field_value_with_default(testset, 'cases', [])
            testset['cases'] = _get_new_testcases(testcases, mastersnapshots)
        # print(json.dumps(test_json_data, indent=2))
        singletest = get_from_currentdata(SINGLETEST)
        if singletest:
            for testset in testsets:
                newtestcases = []
                for testcase in testset['cases']:
                    if ('testId' in testcase and  testcase['testId'] == singletest) or \
                            ('masterTestId' in testcase and testcase['masterTestId'] == singletest):
                        newtestcases.append(testcase)
                testset['cases'] = newtestcases
        resultset = run_json_validation_tests(test_json_data, container, False,
                                              snapshot_status)
        if resultset:
            snapshot = test_json_data[
                'snapshot'] if 'snapshot' in test_json_data else ''
            if singletest:
                print(json.dumps(resultset, indent=2))
            else:
                dump_output_results(resultset, container, test_file, snapshot,
                                    True)
            for result in resultset:
                if 'result' in result:
                    if not re.match(r'passed', result['result'], re.I):
                        finalresult = False
                        break
        else:
            logger.info('No mastertest Documents found!')
            finalresult = False
    return finalresult
Esempio n. 19
0
def run_container_validation_tests_database(container, snapshot_status=None):
    """ Get the test files from the database"""
    dbname = config_value(DATABASE, DBNAME)
    # For test files
    collection = config_value(DATABASE, collectiontypes[TEST])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    finalresult = True
    if docs and len(docs):
        logger.info('Number of test Documents: %s', len(docs))
        for doc in docs:
            if doc['json']:
                resultset = run_json_validation_tests(doc['json'], container,
                                                      False)
                if resultset:
                    snapshot = doc['json']['snapshot'] if 'snapshot' in doc[
                        'json'] else ''
                    test_file = doc['name'] if 'name' in doc else ''
                    dump_output_results(resultset, container, test_file,
                                        snapshot, False)
                    for result in resultset:
                        if 'result' in result:
                            if not re.match(r'passed', result['result'], re.I):
                                finalresult = False
                                break
    else:
        logger.info('No test Documents found!')
        finalresult = False
    # For mastertest files
    collection = config_value(DATABASE, collectiontypes[MASTERTEST])
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    # snapshots_details_map = _get_snapshot_type_map(container)
    if docs and len(docs):
        logger.info('Number of mastertest Documents: %s', len(docs))
        for doc in docs:
            test_json_data = doc['json']
            if test_json_data:
                snapshot_key = '%s_gen' % test_json_data['masterSnapshot']
                mastersnapshots = defaultdict(list)
                snapshot_data = snapshot_status[
                    snapshot_key] if snapshot_key in snapshot_status else {}
                for snapshot_id, mastersnapshot_id in snapshot_data.items():
                    if isinstance(mastersnapshot_id, list):
                        for msnp_id in mastersnapshot_id:
                            mastersnapshots[msnp_id].append(snapshot_id)
                    else:
                        mastersnapshots[mastersnapshot_id].append(snapshot_id)
                test_json_data['snapshot'] = snapshot_key
                testsets = get_field_value_with_default(
                    test_json_data, 'testSet', [])
                for testset in testsets:
                    testcases = get_field_value_with_default(
                        testset, 'cases', [])
                    testset['cases'] = _get_new_testcases(
                        testcases, mastersnapshots)
                # print(json.dumps(test_json_data, indent=2))
                resultset = run_json_validation_tests(test_json_data,
                                                      container, False,
                                                      snapshot_status)
                if resultset:
                    snapshot = doc['json']['snapshot'] if 'snapshot' in doc[
                        'json'] else ''
                    test_file = doc['name'] if 'name' in doc else ''
                    dump_output_results(resultset, container, test_file,
                                        snapshot, False)
                    for result in resultset:
                        if 'result' in result:
                            if not re.match(r'passed', result['result'], re.I):
                                finalresult = False
                                break
    else:
        logger.info('No mastertest Documents found!')
        finalresult = False
    return finalresult
Esempio n. 20
0
def run_container_validation_tests_database(container, snapshot_status=None):
    """ Get the test files from the database"""
    dirpath = None
    dbname = config_value(DATABASE, DBNAME)
    test_files_found = True
    mastertest_files_found = True
    # For test files
    collection = config_value(DATABASE, collectiontypes[TEST])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    finalresult = True
    if docs and len(docs):
        logger.info('Number of test Documents: %s', len(docs))
        for doc in docs:
            if doc['json']:
                try:
                    snapshot = doc['json']['snapshot'] if 'snapshot' in doc['json'] else ''
                    if "connector" in doc['json'] and "remoteFile" in doc['json'] and doc['json']["connector"] and doc['json']["remoteFile"]:
                        dirpath, pull_response = pull_json_data(doc['json'])
                        if not pull_response:
                            return {}
                    resultset = run_json_validation_tests(doc['json'], container, False, dirpath=dirpath)
                    if resultset:
                        test_file = doc['name'] if 'name' in doc else ''
                        dump_output_results(resultset, container, test_file, snapshot, False)
                        for result in resultset:
                            if 'result' in result:
                                if not re.match(r'passed', result['result'], re.I):
                                    finalresult = False
                                    break
                except Exception as e:
                    dump_output_results([], container, "-", snapshot, False)
                    raise e
    else:
        logger.info('No test Documents found!')
        test_files_found = False
        finalresult = False
    # For mastertest files
    collection = config_value(DATABASE, collectiontypes[MASTERTEST])
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    # snapshots_details_map = _get_snapshot_type_map(container)
    if docs and len(docs):
        logger.info('Number of mastertest Documents: %s', len(docs))
        for doc in docs:
            test_json_data = doc['json']
            if test_json_data:
                snapshot = doc['json']['snapshot'] if 'snapshot' in doc['json'] else ''
                test_file = doc['name'] if 'name' in doc else '-'
                try:
                    if "connector" in test_json_data and "remoteFile" in test_json_data and test_json_data["connector"] and test_json_data["remoteFile"]:
                        dirpath, pull_response = pull_json_data(test_json_data)
                        if not pull_response:
                            return {}
                    snapshot_key = '%s_gen' % test_json_data['masterSnapshot']
                    mastersnapshots = defaultdict(list)
                    snapshot_data = snapshot_status[snapshot_key] if snapshot_key in snapshot_status else {}
                    for snapshot_id, mastersnapshot_id in snapshot_data.items():
                        if isinstance(mastersnapshot_id, list):
                            for msnp_id in mastersnapshot_id:
                                mastersnapshots[msnp_id].append(snapshot_id)    
                        else:
                            mastersnapshots[mastersnapshot_id].append(snapshot_id)
                    test_json_data['snapshot'] = snapshot_key
                    testsets = get_field_value_with_default(test_json_data, 'testSet', [])
                    for testset in testsets:
                        testcases = get_field_value_with_default(testset, 'cases', [])
                        testset['cases'] = _get_new_testcases(testcases, mastersnapshots)
                    # print(json.dumps(test_json_data, indent=2))
                    resultset = run_json_validation_tests(test_json_data, container, False, snapshot_status, dirpath=dirpath)
                    if resultset:
                        dump_output_results(resultset, container, test_file, snapshot, False)
                        for result in resultset:
                            if 'result' in result:
                                if not re.match(r'passed', result['result'], re.I):
                                    finalresult = False
                                    break
                except Exception as e:
                    dump_output_results([], container, test_file, snapshot, False)
                    raise e
    else:
        logger.info('No mastertest Documents found!')
        mastertest_files_found = False
        finalresult = False
    if not test_files_found and not mastertest_files_found:
        raise Exception("No complaince tests for this container: %s, add and run!", container)
    return finalresult
def get_all_nodes(compute_fn, node, snapshot_source, snapshot, snapshot_data):
    """
    Fetch all nodes from google using connection using google client API's functions.
    """
    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    project_id = get_field_value_with_default(snapshot, 'project-id', "")
    node_type = get_field_value_with_default(node, 'type', "")

    db_record = {
        "structure": "google",
        "error": None,
        "reference": project_id,
        "source": parts[0],
        "path": "",
        "timestamp": int(time.time() * 1000),
        "queryuser": "",
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": None,
        "masterSnapshotId": [node['masterSnapshotId']],
        "collection": collection.replace('.', '').lower(),
        "json":
        {},  # Refactor when node is absent it should None, when empty object put it as {}
        "items": []
    }

    if node_type:
        fn_str_list, kwargs = get_google_call_function_for_crawler(
            node, project_id)

        if fn_str_list and kwargs:
            for i in range(0, len(fn_str_list)):
                compute_fn = getattr(compute_fn, fn_str_list[i], None)
                if compute_fn and i != len(fn_str_list) - 1:
                    compute_fn = compute_fn()

            response_param = ""
            if fn_str_list and len(fn_str_list) > 1:
                response_param = fn_str_list[-2]
            elif fn_str_list and len(fn_str_list) == 1:
                response_param = fn_str_list[0]

            if compute_fn and callable(compute_fn):
                try:
                    data = compute_fn(**kwargs).execute()
                    if data:
                        check_node_type = node_type
                        node_type_list = node_type.split(".")
                        if len(node_type_list) > 1:
                            del node_type_list[-1]
                            check_node_type = ".".join(node_type_list)

                        db_record['json'] = data
                        if response_param in data:
                            db_record['items'] = data[response_param]
                        elif "items" in data:
                            if isinstance(data['items'], dict):
                                for name, scoped_dict in data['items'].items():
                                    if response_param in scoped_dict:
                                        db_record['items'] = db_record[
                                            'items'] + scoped_dict[
                                                check_node_type]

                            if not db_record['items']:
                                db_record['items'] = data['items']

                        set_snapshot_data(node, db_record['items'],
                                          snapshot_data)

                        checksum = get_checksum(data)
                        if checksum:
                            db_record['checksum'] = checksum
                    else:
                        put_in_currentdata('errors', data)
                        logger.info("Compute function does not exist: %s",
                                    str(fn_str_list))
                        db_record[
                            'error'] = "Compute function does not exist: %s" % str(
                                fn_str_list)
                except Exception as ex:
                    logger.info('Compute function exception: %s', ex)
                    db_record['error'] = 'Compute function exception: %s' % ex
            else:
                logger.info('Invalid Compute function exception: %s',
                            str(fn_str_list))
                db_record[
                    'error'] = 'Invalid Compute function exception: %s' % str(
                        fn_str_list)
        else:
            logger.info('Missing Compute function')
            db_record['error'] = 'Missing Compute function'
    return db_record