예제 #1
0
def get_google_data(snapshot_source):
    """
    The Google source object to be fetched from database or the filesystem
    The initial configuration for database is 'validator' and collection
    is 'structures', whereas for the filesystem the path to fetch the
    'structures' is  $SOLUTIONDIR/realm/<structure>.json
    """
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
        logger.info('Number of Google structure Documents: %d', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        google_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Google source: %s', google_source)
        if exists_file(google_source):
            sub_data = json_from_file(google_source)

    if not sub_data:
        logger.error("Google connector file %s does not exist, or it does not contains the valid JSON.", snapshot_source)
    return sub_data
예제 #2
0
def dump_output_results(results,
                        container,
                        test_file,
                        snapshot,
                        filesystem=True):
    """ Dump the report in the json format for test execution results."""
    od = OrderedDict()
    od["$schema"] = ""
    od["contentVersion"] = "1.0.0.0"
    od["fileType"] = OUTPUT
    od["timestamp"] = int(time.time() * 1000)
    od["snapshot"] = snapshot
    od["container"] = container
    dblog = get_dblogger()
    od["log"] = dblog if dblog else ""
    if filesystem:
        test_file_parts = test_file.rsplit('/', 1)
        od["test"] = test_file_parts[-1]
        output_file = '%s/output-%s' % (test_file_parts[0],
                                        test_file_parts[-1])
        od["results"] = results
        save_json_to_file(od, output_file)
    else:
        od["test"] = test_file
        od["results"] = results
        del od["$schema"]
        doc = json_record(container, OUTPUT, test_file, od)
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[OUTPUT])
        insert_one_document(doc, collection, dbname)
def get_call_kwargs(node):
    """Get argument names and their values in kwargs"""
    kwargs = {"params": {}}
    logger.info("Get node's kwargs")
    params_source = config_value('GOOGLE', 'params')
    paramsversions = None
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = params_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Google Params versions: %s', len(docs))
        if docs and len(docs):
            paramsversions = docs[0]['json']
    else:
        paramsversions_file = '%s/%s' % (framework_dir(), params_source)
        logger.info(paramsversions_file)
        if exists_file(paramsversions_file):
            paramsversions = json_from_file(paramsversions_file)

    path = node['path']
    if paramsversions and "queryprameters" in paramsversions:
        if node['type'] in paramsversions["queryprameters"]:
            for param, parameter_type in paramsversions["queryprameters"][
                    node['type']].items():
                add_argument_parameter(path, kwargs, param, parameter_type)

    return kwargs
예제 #4
0
def mastersnapshots_used_in_mastertests_database(container):
    """
    Get mastersnapshot list used in mastertest files of a container from the database.
    The mastersnapshots list are read from database. The default configuration of database and
    snapshot collections is configured in config.ini file.
    """
    snapshots = []
    logger.info("Starting to get list of mastersnapshots from database")
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[MASTERTEST])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry)
    logger.info('Number of mastertest Documents: %s', len(docs))
    if docs and len(docs):
        for doc in docs:
            if doc['json']:
                snapshot = doc['json'][
                    'masterSnapshot'] if 'masterSnapshot' in doc['json'] else ''
                if snapshot:
                    if snapshot.endswith('.json'):
                        parts = snapshot.split('.')
                        snapshots.append(parts[0])
                    else:
                        snapshots.append(snapshot)
    return list(set(snapshots))
def get_version_for_type(node):
    """Url version of the resource."""
    version = None
    apiversions = None
    logger.info("Get type's version")
    api_source = config_value('AZURE', 'api')
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = api_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, limit=1)
        logger.info('Number of Azure API versions: %s', len(docs))
        if docs and len(docs):
            apiversions = docs[0]['json']
    else:
        apiversions_file = '%s/%s' % (framework_dir(), api_source)
        logger.info(apiversions_file)
        if exists_file(apiversions_file):
            apiversions = json_from_file(apiversions_file)
    if apiversions:
        if node and 'type' in node and node['type'] in apiversions:
            version = apiversions[node['type']]['version']
    return version
def populate_container_snapshots_database(container):
    """
    Get the snapshot files from the container with storage system as database.
    The table or collection and database is configured in the config.ini, for the default
    location configuration is "validator" database with "snapshots" as its collections.
    """
    snapshots_status = {}
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
    qry = {'container': container}
    sort = [sort_field('timestamp', False)]
    docs = get_documents(collection, dbname=dbname, sort=sort, query=qry, _id=True)
    if docs and len(docs):
        logger.info('Number of Snapshot Documents: %s', len(docs))
        snapshots = container_snapshots_database(container)
        populated = []
        for doc in docs:
            if doc['json']:
                snapshot = doc['name']
                if snapshot in snapshots and snapshot not in populated:
                    # Take the snapshot and populate whether it was successful or not.
                    # Then pass it back to the validation tests, so that tests for those
                    # snapshots that have been susccessfully fetched shall be executed.
                    snapshot_file_data = populate_snapshots_from_json(doc['json'], container)
                    update_one_document(doc, collection, dbname)
                    populated.append(snapshot)
                    snapshots_status[snapshot] = snapshot_file_data
    return snapshots_status
예제 #7
0
def get_azure_data(snapshot_source):
    sub_data = {}
    if json_source():
        dbname = config_value(DATABASE, DBNAME)
        collection = config_value(DATABASE, collectiontypes[STRUCTURE])
        parts = snapshot_source.split('.')
        qry = {'name': parts[0]}
        sort = [sort_field('timestamp', False)]
        docs = get_documents(collection,
                             dbname=dbname,
                             sort=sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Snapshot Documents: %s', len(docs))
        if docs and len(docs):
            sub_data = docs[0]['json']
    else:
        json_test_dir = get_test_json_dir()
        file_name = '%s.json' % snapshot_source if snapshot_source and not \
            snapshot_source.endswith('.json') else snapshot_source
        azure_source = '%s/../%s' % (json_test_dir, file_name)
        logger.info('Azure source: %s', azure_source)
        if exists_file(azure_source):
            sub_data = json_from_file(azure_source)
    return sub_data
예제 #8
0
def generate_crawler_run_output(container):
    """
    This creates a entry in the output collection, whenever a crawler runs
    to fetch data. 
    """
    timestamp = int(time.time() * 1000)
    sort = [sort_field('timestamp', False)]
    qry = {'container': container}
    output_collection = config_value(DATABASE, collectiontypes[OUTPUT])
    dbname = config_value(DATABASE, DBNAME)

    collection = config_value(DATABASE, collectiontypes[MASTERTEST])
    tests = get_documents(collection,
                          dbname=dbname,
                          sort=sort,
                          query=qry,
                          _id=True)
    master_tests = [{
        "id": str(test['_id']),
        "name": test['name']
    } for test in tests]

    mastersp_collection = config_value(DATABASE,
                                       collectiontypes[MASTERSNAPSHOT])
    snapshots = get_documents(mastersp_collection,
                              dbname=dbname,
                              sort=sort,
                              query=qry,
                              _id=True)
    master_snapshots = [{
        "id": str(snapshot['_id']),
        "name": snapshot['name']
    } for snapshot in snapshots]

    db_record = {
        "timestamp": timestamp,
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "collection": output_collection,
        "container": container,
        "name": "Crawlertest_%s" % (container),
        "type": "crawlerOutput",
        "json": {
            "container": container,
            "contentVersion": "",
            "fileType": "output",
            "snapshot": "",
            "test": "Crawlertest_%s" % (container),
            "log": get_dblog_handler().get_log_collection(),
            "timestamp": timestamp,
            "master_test_list": master_tests,
            "master_snapshot_list": master_snapshots,
            "output_type": "crawlerrun",
            "results": []
        }
    }
    insert_one_document(db_record, db_record['collection'], dbname, False)
예제 #9
0
def _get_vault_token():
    """Fetches the bearer token for Azure Vault API calls"""
    if 'UAMI' in os.environ and os.environ['UAMI'] == 'true':
        vaulttoken = get_uami_vault_access_token()
    else:
        client_id = config_value('VAULT', 'client_id')
        client_secret = get_config_value('VAULT', 'client_secret', 'CLIENTKEY')
        tenant_id = config_value('VAULT', 'tenant_id')
        vaulttoken = get_vault_access_token(tenant_id, client_id,
                                            client_secret)
    return vaulttoken
예제 #10
0
def _get_vault_token():
    """Fetches the bearer token for Azure Vault API calls"""
    if 'UAMI' in os.environ and os.environ['UAMI'] == 'true':
        vaulttoken = get_uami_vault_access_token()
    else:
        client_id = config_value('VAULT', 'client_id')
        client_secret = get_config_value(
            'VAULT', 'client_secret', 'CLIENTKEY',
            'Enter the client secret to access keyvault: ')
        # client_secret = config_value('VAULT', 'client_secret')
        tenant_id = config_value('VAULT', 'tenant_id')
        # logger.info('Id: %s, secret: %s, tenant: %s', client_id, client_secret, tenant_id)
        vaulttoken = get_vault_access_token(tenant_id, client_id,
                                            client_secret)
    return vaulttoken
예제 #11
0
def get_node_version(node, snapshot):
    """Url version of the resource."""
    version = None
    apiversions = None
    logger.info("Get type's version")
    api_source = config_value('AZURE', 'api')
    if snapshot.isDb:
        parts = api_source.rsplit('/')
        name = parts[-1].split('.')
        qry = {'name': name[0]}
        docs = get_documents(snapshot.collection(STRUCTURE),
                             dbname=snapshot.dbname,
                             sort=snapshot.sort,
                             query=qry,
                             limit=1)
        logger.info('Number of Azure API versions: %s', len(docs))
        if docs and len(docs):
            apiversions = docs[0]['json']
    else:
        apiversions_file = '%s/%s' % (framework_dir(), api_source)
        logger.info(apiversions_file)
        if exists_file(apiversions_file):
            apiversions = json_from_file(apiversions_file)
    if apiversions:
        if node and 'type' in node and node['type'] in apiversions:
            version = apiversions[node['type']]['version']
    return version
예제 #12
0
def mastersnapshots_used_in_mastertests_filesystem(container):
    """
    Get mastersnapshot list used in all mastertest files of a container from the filesystem.
    This gets list of all the mastersnapshots used in the container.
    The list will be used to make sure the snapshots are not generated multiple times, if the same
    mastersnapshots are used in different mastertest files of a container.
    The configuration of the default path is configured in config.ini.
    """
    snapshots = []
    # logger.info("Starting to get list of mastersnapshots used in test files.")
    reporting_path = config_value('REPORTING', 'reportOutputFolder')
    json_dir = '%s/%s/%s' % (framework_dir(), reporting_path, container)
    # logger.info(json_dir)
    # Only get list of mastertest files.
    test_files = get_json_files(json_dir, MASTERTEST)
    # logger.info('\n'.join(test_files))
    for test_file in test_files:
        logger.info('\tMASTERTEST:%s', test_file)
        test_json_data = json_from_file(test_file)
        if test_json_data:
            snapshot = test_json_data[
                'masterSnapshot'] if 'masterSnapshot' in test_json_data else ''
            if snapshot:
                file_name = snapshot if snapshot.endswith(
                    '.json') else '%s.json' % snapshot
                snapshots.append(file_name)
    return list(
        set(snapshots))  # set so that unique list of files are returned.
예제 #13
0
def run_json_validation_tests(test_json_data,
                              container,
                              filesystem=True,
                              snapshot_status=None):
    resultset = []
    if not test_json_data:
        return resultset
    if not snapshot_status:
        snapshot_status = {}
    logger.debug(json.dumps(test_json_data, indent=2))
    testsets = get_field_value(test_json_data, 'testSet')
    if not testsets or not isinstance(testsets, list):
        logger.info("Test json does not contain testset, next!...")
        return resultset
    dbname = config_value(DATABASE, DBNAME)
    # Populate the snapshotId => collection for the snapshot.json in the test file.
    collection_data = get_snapshot_id_to_collection_dict(
        test_json_data['snapshot'], container, dbname, filesystem)
    if test_json_data['snapshot'] in snapshot_status:
        current_snapshot_status = snapshot_status[test_json_data['snapshot']]
    else:
        current_snapshot_status = {}
    for testset in testsets:
        version = get_field_value(testset, 'version')
        testcases = get_field_value(testset, 'cases')
        if not testcases or not isinstance(testcases, list):
            logger.info("No testcases in testSet!...")
            continue
        for testcase in testset['cases']:
            result_val = run_validation_test(version, container, dbname,
                                             collection_data, testcase)
            resultset.append(result_val)
    return resultset
예제 #14
0
def populate_json_main(arg_vals=None):
    """Main driver utility for running validator tests."""
    logger.info("Comand: '%s %s'",
                sys.executable.rsplit('/', 1)[-1], ' '.join(sys.argv))
    cmd_parser = argparse.ArgumentParser("Populate json files")
    cmd_parser.add_argument('container',
                            action='store',
                            help='Container name for the json files.')
    # cmd_parser.add_argument('--dir', action='store', default=None,
    #                         help='Populate all json files from this directory.')
    cmd_parser.add_argument('--file',
                            action='store',
                            default=None,
                            help='Populate only this file')
    # cmd_parser.add_argument('--type', action='store', default='structure',
    #                         choices=['test', 'structure', 'snapshot', 'output', 'notifications'])

    args = cmd_parser.parse_args(arg_vals)
    # Delete the rundata at the end of the script.
    atexit.register(delete_currentdata)
    logger.info(args)
    init_currentdata()
    dbname, db_init_res = init_db()
    if db_init_res:
        for _, collection in collectiontypes.items():
            create_indexes(config_value(DATABASE, collection), dbname,
                           [('timestamp', TEXT)])
        populate_json_files(args)
    else:
        logger.error("Error initializing DB, exiting....!")
    return 0
예제 #15
0
 def __init__(self, container, snapshot_refactored_fns, singleTest=None):
     """ Default isDb is false, singletest shall be set to the test that needs to be run."""
     super().__init__(container, snapshot_refactored_fns)
     self.singleTest = singleTest
     reporting_path = config_value('REPORTING', 'reportOutputFolder')
     self.container_dir = '%s/%s/%s' % (framework_dir(), reporting_path,
                                        container)
예제 #16
0
    def store_data_node(self, data):
        """ Store to database"""
        if get_collection_size(data['collection']) == 0:
            # Creating indexes for collection
            create_indexes(data['collection'], config_value(DATABASE, DBNAME),
                           [('snapshotId', pymongo.ASCENDING),
                            ('timestamp', pymongo.DESCENDING)])

            create_indexes(data['collection'], config_value(DATABASE, DBNAME),
                           [('_id', pymongo.DESCENDING),
                            ('timestamp', pymongo.DESCENDING),
                            ('snapshotId', pymongo.ASCENDING)])
        insert_one_document(data,
                            data['collection'],
                            self.dbname,
                            check_keys=False)
예제 #17
0
 def __init__(self, container, snapshot_refactored_fns):
     """"DB is true, will be usefule to make checks."""
     super().__init__(container)
     self.dbname = config_value(DATABASE, DBNAME)
     self.qry = {'container': container}
     self.sort = [sort_field('timestamp', False)]
     self.isDb = True
예제 #18
0
def set_vault_data(key_name=None, value=None):
    """Update vault data"""
    vaulttype = config_value('VAULT', 'type')
    val = None
    if vaulttype:
        if vaulttype == 'azure':
            val = set_azure_vault_data(key_name, value)
    return val
예제 #19
0
def get_all_vault_secrets():
    """Read all vault secrets"""
    vaulttype = config_value('VAULT', 'type')
    val = None
    if vaulttype:
        if vaulttype == 'azure':
            val = get_all_azure_secrets()
    return val
def opa_binary():
    opa_exe = None
    opa_enabled = parsebool(config_value("OPA", "opa"), False)
    if opa_enabled:
        opa_exe = os.getenv('OPAEXE', None)
        # print(opa_exe)
        if opa_exe and exists_file(opa_exe):
            # print('%' * 50)
            pass
        else:
            # print('$' * 50)
            opa_exe = config_value("OPA", "opaexe")
            if opa_exe and exists_file(opa_exe):
                pass
            else:
                opa_exe = None
    return opa_exe
예제 #21
0
def delete_vault_data(secret_key=None):
    """Delete vault data from config"""
    vaulttype = config_value('VAULT', 'type')
    val = None
    if vaulttype:
        if vaulttype == 'azure':
            val = delete_azure_vault_data(secret_key)
    return val
예제 #22
0
def _get_snapshot_type_map(container):
    dbname = config_value(DATABASE, DBNAME)
    collection = config_value(DATABASE, collectiontypes[SNAPSHOT])
    qry = {'container': container}
    docs = get_documents(collection, dbname=dbname, query=qry)
    mappings = {}
    if docs and len(docs):
        for doc in docs:
            given_data = doc['json']
            if given_data:
                snapshots = given_data.get("snapshots", [])
                for snapshot in snapshots:
                    given_type = snapshot.get("type","")
                    if given_type == "aws":
                        nodes = snapshot.get("nodes",[])
                        for node in nodes:
                            mappings[node['snapshotId']] = node['type']
    return mappings
예제 #23
0
def set_vault_data_with_response(key_name=None, value=None):
    """Update vault data"""
    vaulttype = config_value('VAULT', 'type')
    status, response = None, None
    if vaulttype:
        if vaulttype == 'azure':
            status, response = set_azure_vault_data_with_response(
                key_name, value)
    return status, response
예제 #24
0
def get_azure_vault_data(secret_key=None):
    """Fetches the bearer token for Azure Vault API calls"""
    val = None
    vaulttoken = _get_vault_token()
    if vaulttoken and secret_key:
        keyvault = config_value('VAULT', 'keyvault')
        secret_data = get_keyvault_secret(keyvault, secret_key, vaulttoken)
        if secret_data and 'value' in secret_data:
            val = secret_data['value']
    return val
예제 #25
0
def get_vault_data(secret_key=None):
    """Read vault data from config"""
    vaulttype = config_value('VAULT', 'type')
    val = None
    if vaulttype:
        if vaulttype == 'azure':
            val = get_azure_vault_data(secret_key)
        elif vaulttype == 'cyberark':
            val = get_cyberark_data(secret_key)
    return val
예제 #26
0
def get_kubernetes_structure_path(snapshot_source):
    """
    get_kubernetes_structure_path will get kubernetes connector file path
    from configuration file.
    """
    folder = config_value('KUBERNETES','kubernetesStructureFolder')
    if folder:
        connector_path = '%s/%s/%s.json' % (framework_dir(), folder, snapshot_source)
    else:
        connector_path = '%s/%s.json' % (framework_dir(), snapshot_source)
    return connector_path
예제 #27
0
def init_db():
    dbname = None
    try:
        dbconn = mongoconnection()
        _ = dbconn.list_database_names()
        dbname = config_value(DATABASE, DBNAME)
        create_indexes(COLLECTION, dbname, [('timestamp', TEXT)])
        db_init = True
    except ServerSelectionTimeoutError as ex:
        db_init = False
    return dbname, db_init
예제 #28
0
def delete_azure_vault_data(secret_key=None):
    """"Delete a key from vault"""
    success = None
    vaulttoken = _get_vault_token()
    logger.debug('Vault Token: %s', vaulttoken)
    if vaulttoken and secret_key:
        keyvault = config_value('VAULT', 'keyvault')
        logger.info('Keyvault: %s, key:%s', keyvault, secret_key)
        success = delete_keyvault_secret(keyvault, secret_key, vaulttoken)
    logger.info('Secret Deleted: %s', success)
    return success
예제 #29
0
 def process_helm_chart(self, dir_path):
     helm_source_dir_name = dir_path.rpartition("/")[-1]
     helm_path = config_value('HELM', 'helmexe')
     result = os.system(
         '%s template %s > %s/%s_prancer_helm_template.yaml' %
         (helm_path, dir_path, dir_path, helm_source_dir_name))
     paths = self.break_multiple_yaml_file(
         '%s/%s_prancer_helm_template.yaml' %
         (dir_path, helm_source_dir_name))
     # os.remove('%s/Chart.yaml' % dir_path)
     return paths
예제 #30
0
def get_all_azure_secrets():
    val = None
    vaulttoken = _get_vault_token()
    logger.debug('Vault Token: %s', vaulttoken)
    if vaulttoken:
        keyvault = config_value('VAULT', 'keyvault')
        logger.info('Keyvault: %s', keyvault)
        data = get_all_secrets(keyvault, vaulttoken)
        if data:
            return data
    return []