Beispiel #1
0
def test_delete_run_config():
    runcfg = framework_currentdata()
    init_currentdata()
    assert True == os.path.exists(runcfg)
    put_in_currentdata('token', 'abcd')
    delete_currentdata()
    assert False == os.path.exists(runcfg)
Beispiel #2
0
 def call_azure_api(self, url):
     hdrs = {'Authorization': 'Bearer %s' % self.token}
     status, data = http_get_request(url, hdrs, name='\tRESOURCE:')
     if status and isinstance(status, int) and status == 200:
         for resource in data.get("value", []):
             put_in_currentdata('resources', resource)
         self.resources += data.get("value", [])
Beispiel #3
0
def get_snapshot_node(snapshot, token, sub_name, sub_id, node, user,
                      snapshot_source, connector_type):
    """ Fetch node from azure portal using rest API."""
    version = get_node_version(node, snapshot)
    if sub_id and token and node and node['path'] and version:
        db_record = get_data_record(sub_name, node, user, snapshot_source,
                                    connector_type)
        hdrs = {'Authorization': 'Bearer %s' % token}
        if node['path'].startswith('/subscriptions'):
            urlstr = 'https://management.azure.com%s?api-version=%s'
            url = urlstr % (node['path'], version)
        else:
            urlstr = 'https://management.azure.com/subscriptions/%s%s?api-version=%s'
            url = urlstr % (sub_id, node['path'], version)
        db_record['path'] = node['path']
        logger.info('Get Id REST API invoked!')
        status, data = http_get_request(url, hdrs)
        logger.info('Get Id status: %s', status)
        if status and isinstance(status, int) and status == 200:
            db_record['json'] = data
            data_str = json.dumps(data)
            db_record['checksum'] = hashlib.md5(
                data_str.encode('utf-8')).hexdigest()
        else:
            put_in_currentdata('errors', data)
            logger.info("Get Id returned invalid status: %s, response: %s",
                        status, data)
            logger.error(
                "Failed to get Azure resourse with given path : %s, please verify your azure connector detail and path given in snapshot.",
                node['path'])
    else:
        logger.info('Get requires valid subscription, token and path.!')
    return db_record
Beispiel #4
0
def get_all_secrets(keyvault, vaulttoken):
    hdrs = {
        'Authorization': 'Bearer %s' % vaulttoken
    }
    logger.info('Get Id REST API invoked!')
    urlstr = 'https://%s.vault.azure.net/secrets?api-version=7.0'
    url = urlstr % (keyvault)
    keys_response = []
    keys = []
    while url != None:
        status, data = http_get_request(url, hdrs)
        if status and isinstance(status, int) and status == 200:
            logger.debug('Data: %s', data)
            values = data.get("value", [])
            url = data.get("nextLink",None)
            keys_response.extend(values)
        else:
            put_in_currentdata('errors', data)
            url = None
            logger.info("Get Id returned invalid status: %s", status)
    for each_key in keys_response:
        key_url = each_key.get("id",None)
        if key_url:
            secret_key = key_url.split("secrets/",1)[1].split("/")[0]
            keys.append(secret_key)
    return keys
def get_all_nodes(token, sub_name, sub_id, node, user, snapshot_source):
    """ Fetch all nodes from azure portal using rest API."""
    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    db_records = []
    d_record = {
        "structure": "azure",
        "reference": sub_name,
        "source": parts[0],
        "path": '',
        "timestamp": int(time.time() * 1000),
        "queryuser": user,
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": None,
        "mastersnapshot": True,
        "masterSnapshotId": [node['masterSnapshotId']],
        "collection": collection.replace('.', '').lower(),
        "json": {}  # Refactor when node is absent it should None, when empty object put it as {}
    }
    # version = get_version_for_type(node)
    # if sub_id and token and node and version:
    nodetype = None
    if node and 'type' in node and node['type']:
        nodetype = node['type']
    if sub_id and token and nodetype:
        hdrs = {
            'Authorization': 'Bearer %s' % token
        }
        # urlstr = 'https://management.azure.com/subscriptions/%s/providers/%s?api-version=%s'
        # url = urlstr % (sub_id, node['type'], version)
        # db_record['path'] = node['path']
        resources = get_from_currentdata('resources')
        if not resources:
            urlstr = 'https://management.azure.com/subscriptions/%s/resources?api-version=2017-05-10'
            url = urlstr % sub_id
            logger.info('Get Id REST API invoked!')
            status, data = http_get_request(url, hdrs)
            logger.info('Get Id status: %s', status)
            if status and isinstance(status, int) and status == 200:
                resources = data['value']
                put_in_currentdata('resources', resources)
            else:
                put_in_currentdata('errors', data)
                logger.info("Get Id returned invalid status: %s", status)
        if resources:
            for idx, value in enumerate(resources):
                if nodetype in value['type']:
                    db_record = copy.deepcopy(d_record)
                    db_record['snapshotId'] = '%s%s' % (node['masterSnapshotId'], str(idx))
                    db_record['path'] = value['id']
                    db_record['json'] = value
                    data_str = json.dumps(value)
                    db_record['checksum'] = hashlib.md5(data_str.encode('utf-8')).hexdigest()
                    db_records.append(db_record)
    else:
        logger.info('Get requires valid subscription, token and path.!')
    return db_records
Beispiel #6
0
def test_delete_from_run_config(load_json_file):
    runcfg = framework_currentdata()
    init_currentdata()
    assert True == os.path.exists(runcfg)
    put_in_currentdata('a', 'val1')
    runconfig = load_json_file(runcfg)
    result = True if runconfig and 'a' in runconfig and runconfig[
        'a'] == 'val1' else False
    assert result == True
    delete_from_currentdata('a')
    runconfig = load_json_file(runcfg)
    result = False if runconfig and 'a' in runconfig else True
    assert result == True
Beispiel #7
0
def get_keyvault_secret(keyvault, secret_key, vaulttoken):
    hdrs = {'Authorization': 'Bearer %s' % vaulttoken}
    logger.info('Get Id REST API invoked!')
    urlstr = 'https://%s.vault.azure.net/secrets/%s?api-version=7.0'
    url = urlstr % (keyvault, secret_key)
    status, data = http_get_request(url, hdrs)
    logger.debug('Get Id status: %s', status)

    if status and isinstance(status, int) and status == 200:
        logger.debug('Data: %s', data)
    else:
        put_in_currentdata('errors', data)
        logger.info("Get Id returned invalid status: %s", status)
    return data
Beispiel #8
0
def get_node(token, sub_name, sub_id, node, user, snapshot_source):
    """ Fetch node from azure portal using rest API."""
    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    db_records = []
    db_record = {
        "structure": "azure",
        "reference": sub_name,
        "source": parts[0],
        "path": '',
        "timestamp": int(time.time() * 1000),
        "queryuser": user,
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": node['snapshotId'],
        "mastersnapshot": False,
        "masterSnapshotId": None,
        "collection": collection.replace('.', '').lower(),
        "region" : "",
        "json": {}  # Refactor when node is absent it should None, when empty object put it as {}
    }
    version = get_version_for_type(node)
    if sub_id and token and node and node['path'] and version:
        hdrs = {
            'Authorization': 'Bearer %s' % token
        }
        if node['path'].startswith('/subscriptions'):
            urlstr = 'https://management.azure.com%s?api-version=%s'
            url = urlstr % (node['path'], version)
        else:
            urlstr = 'https://management.azure.com/subscriptions/%s%s?api-version=%s'
            url = urlstr % (sub_id, node['path'], version)
        db_record['path'] = node['path']
        # logger.info('Get Id REST API invoked!')
        status, data = http_get_request(url, hdrs, name='\tRESOURCE:')
        # logger.info('Get Id status: %s', status)
        if status and isinstance(status, int) and status == 200:
            db_record['json'] = data
            db_record['region'] = db_record['json'].get("location")
            data_str = json.dumps(data)
            db_record['checksum'] = hashlib.md5(data_str.encode('utf-8')).hexdigest()
        else:
            put_in_currentdata('errors', data)
            logger.info("Get Id returned invalid status: %s, response: %s", status, data)
            logger.error("Failed to get Azure resourse with given path : %s, please verify your azure connector detail and path given in snapshot.", node['path'])
    else:
        db_record = {}
        logger.info('Get requires valid subscription, token and path.!')
    return db_record
def get_config_value(section, key, env_var, prompt_str=None):
    """ Return the client secret used for the current run"""
    client_secret = config_value(section, key)
    if not client_secret and env_var:
        client_secret = os.getenv(env_var, None)
    if not client_secret and prompt_str:
        key_str = '%s_%s' % (section, key)
        client_secret = get_from_currentdata(key_str)
        if not client_secret:
            client_secret = input(prompt_str)
            if client_secret:
                put_in_currentdata(key_str, client_secret)
                logger.info('Key:%s, sec:%s', key_str, client_secret)
                add_to_exclude_list(key_str)
    return client_secret
Beispiel #10
0
def set_keyvault_secret(keyvault, vaulttoken, secret_key, value):

    hdrs = {
        'Authorization': 'Bearer %s' % vaulttoken,
        'Content-Type': 'application/json'
    }
    logger.info('Put Id REST API invoked!')
    urlstr = 'https://%s.vault.azure.net/secrets/%s?api-version=7.0'
    url = urlstr % (keyvault, secret_key)
    request_data = {
        "value" : value
    }
    status, data = http_put_request(url, request_data, headers=hdrs, json_type=True)
    logger.info('Set Id status: %s', status)
    if status and isinstance(status, int) and status == 200:
        logger.debug('Data: %s', data)
        return True
    else:
        put_in_currentdata('errors', data)
        logger.info("Set Id returned invalid status: %s", status)
        return False
Beispiel #11
0
def get_uami_vault_access_token():
    """
    Get the vault access token to get all the other passwords/secrets.
    """
    hdrs = {"Metadata": "true", "Cache-Control": "no-cache"}
    vaulttoken = get_from_currentdata(UAMIVAULTACCESSTOKEN)
    # print(vaulttoken)
    expiry_time = get_from_currentdata(UAMIVAULTOKENEXPIRY)
    is_token_valid = isinstance(expiry_time, str) and \
        datetime.now() < datetime.fromtimestamp(float(expiry_time))
    if (not vaulttoken) or (not is_token_valid):
        url = 'http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https%3A%2F%2Fvault.azure.net'
        # logger.info('Get Azure UAMI token REST API invoked!')
        print('Get Azure UAMI token REST API invoked!')
        status, data = http_get_request(url, headers=hdrs)
        print(data)
        if status and isinstance(status, int) and status == 200:
            vaulttoken = data['access_token']
            expiry_time = data['expires_on']
            put_in_currentdata(UAMIVAULTACCESSTOKEN, vaulttoken)
            put_in_currentdata(UAMIVAULTOKENEXPIRY, expiry_time)
        else:
            put_in_currentdata('errors', data)
            # logger.info("Get Azure token returned invalid status: %s", status)
            print("Get Azure token returned invalid status: %s" % status)
    return vaulttoken
Beispiel #12
0
def get_vault_access_token(tenant_id, vault_client_id, client_secret=None):
    """
    Get the vault access token to get all the other passwords/secrets.
    """
    vaulttoken = get_from_currentdata(VAULTACCESSTOKEN)
    expiry_time = get_from_currentdata(VAULTOKENEXPIRY)
    is_token_valid = isinstance(expiry_time, str) and \
        datetime.now() < datetime.fromtimestamp(float(expiry_time))
    if (not vaulttoken) or (not is_token_valid):
        vault_client_secret = client_secret if client_secret else get_vault_client_secret(
        )
        data = {
            'grant_type': 'client_credentials',
            'client_id': vault_client_id,
            'client_secret': vault_client_secret,
            'resource': 'https://vault.azure.net'
        }
        hdrs = {'Cache-Control': "no-cache", "Accept": "application/json"}
        if tenant_id:
            url = 'https://login.microsoftonline.com/%s/oauth2/token' % tenant_id
            logger.info('Get Azure token REST API invoked!')
            status, data = http_post_request(url, data, headers=hdrs)
            if status and isinstance(status, int) and status == 200:
                vaulttoken = data['access_token']
                expiry_time = data['expires_on']
                put_in_currentdata(VAULTACCESSTOKEN, vaulttoken)
                put_in_currentdata(VAULTOKENEXPIRY, expiry_time)
            else:
                put_in_currentdata('errors', data)
                logger.info("Get Azure token returned invalid status: %s",
                            status)
    return vaulttoken
Beispiel #13
0
def get_snapshot_nodes(snapshot, token, sub_name, sub_id, node, user,
                       snapshot_source, connector_type):
    """ Fetch all nodes from azure portal using rest API."""
    db_records = []
    d_record = get_data_record(sub_name, node, user, snapshot_source,
                               connector_type)
    nodetype = node[
        'type'] if node and 'type' in node and node['type'] else None
    if sub_id and token and nodetype:
        hdrs = {'Authorization': 'Bearer %s' % token}
        resources = snapshot.get_value('resources')
        if not resources:
            urlstr = 'https://management.azure.com/subscriptions/%s/resources?api-version=2017-05-10'
            url = urlstr % sub_id
            logger.info('Get Id REST API invoked!')
            status, data = http_get_request(url, hdrs)
            logger.info('Get Id status: %s', status)
            if status and isinstance(status, int) and status == 200:
                resources = data['value']
                snapshot.store_value('resources', resources)
            else:
                put_in_currentdata('errors', data)
                logger.info("Get Id returned invalid status: %s", status)
        if resources:
            for idx, value in enumerate(resources):
                if nodetype in value['type']:
                    db_record = copy.deepcopy(d_record)
                    db_record['snapshotId'] = '%s%s' % (
                        node['masterSnapshotId'], str(idx))
                    db_record['path'] = value['id']
                    db_record['json'] = value
                    data_str = json.dumps(value)
                    db_record['checksum'] = hashlib.md5(
                        data_str.encode('utf-8')).hexdigest()
                    db_records.append(db_record)
    else:
        logger.info('Get requires valid subscription, token and path.!')
    return db_records
Beispiel #14
0
def get_access_token():
    """
    Get the access token if stored in rundata, otherwise get the token from
    management.azure.com portal for the webapp.
    """
    token = get_from_currentdata(ACCESSTOKEN)
    if not token:
        tenant_id = get_tenant_id()
        client_id = get_client_id()
        if client_id:
            # client_secret = get_client_secret()
            client_secret = get_client_secret(key='CLIENTKEY',
                                              client_id=client_id)
        else:
            logger.info('client Id required for REST API access!')
            return None
        data = {
            'grant_type': 'client_credentials',
            'client_id': client_id,
            'client_secret': client_secret,
            'resource': 'https://management.azure.com'
        }
        hdrs = {'Cache-Control': "no-cache", "Accept": "application/json"}
        if tenant_id:
            url = 'https://login.microsoftonline.com/%s/oauth2/token' % tenant_id
            logger.info('Get Azure token REST API invoked!')
            status, data = http_post_request(url,
                                             data,
                                             headers=hdrs,
                                             json_type=True)
            if status and isinstance(status, int) and status == 200:
                token = data['access_token']
                put_in_currentdata(ACCESSTOKEN, token)
            else:
                put_in_currentdata('errors', data)
                logger.info("Get Azure token returned invalid status: %s",
                            status)
    return token
Beispiel #15
0
def test_add_to_run_config(load_json_file):
    runcfg = framework_currentdata()
    init_currentdata()
    assert True == os.path.exists(runcfg)
    put_in_currentdata('a', 'val1')
    runconfig = load_json_file(runcfg)
    result = True if runconfig and 'a' in runconfig and runconfig[
        'a'] == 'val1' else False
    assert result == True
    put_in_currentdata('b', ['val1'])
    runconfig = load_json_file(runcfg)
    result = True if runconfig and 'b' in runconfig and runconfig['b'] == [
        'val1'
    ] else False
    assert result == True
    put_in_currentdata('b', 'val2')
    runconfig = load_json_file(runcfg)
    result = True if runconfig and 'b' in runconfig and runconfig['b'] == [
        'val1', 'val2'
    ] else False
    assert result == True
Beispiel #16
0
def populate_azure_snapshot(snapshot, container=None, snapshot_type='azure'):
    """ Populates the resources from azure."""
    dbname = config_value('MONGODB', 'dbname')
    snapshot_source = get_field_value(snapshot, 'source')
    snapshot_user = get_field_value(snapshot, 'testUser')
    snapshot_nodes = get_field_value(snapshot, 'nodes')
    snapshot_data, valid_snapshotids = validate_snapshot_nodes(snapshot_nodes)
    client_id, client_secret, sub_name, sub_id, tenant_id = \
        get_web_client_data(snapshot_type, snapshot_source, snapshot_user)
    if not client_id:
        # logger.info("No client_id in the snapshot to access azure resource!...")
        raise Exception("No client id in the snapshot to access azure resource!...")

    # Read the client secrets from envirnment variable
    if not client_secret:
        client_secret = os.getenv(snapshot_user, None)
        if client_secret:
            logger.info('Client Secret from environment variable, Secret: %s', '*' * len(client_secret))
        
    # Read the client secrets from the vault
    if not client_secret:
        client_secret = get_vault_data(client_id)
        if client_secret:
            logger.info('Client Secret from Vault, Secret: %s', '*' * len(client_secret))
        elif get_from_currentdata(CUSTOMER):
            logger.error("Client Secret key does not set in a vault")
            raise Exception("Client Secret key does not set in a vault")

    if not client_secret:
        raise Exception("No `client_secret` key in the connector file to access azure resource!...")

    logger.info('\t\tSubscription: %s', sub_id)
    logger.info('\t\tTenant: %s', tenant_id)
    logger.info('\t\tclient: %s', client_id)
    put_in_currentdata('clientId', client_id)
    put_in_currentdata('clientSecret', client_secret)
    put_in_currentdata('subscriptionId', sub_id)
    put_in_currentdata('tenant_id', tenant_id)
    token = get_access_token()
    logger.debug('TOKEN: %s', token)
    if not token:
        logger.info("Unable to get access token, will not run tests....")
        raise Exception("Unable to get access token, will not run tests....")
        # return {}

    # snapshot_nodes = get_field_value(snapshot, 'nodes')
    # snapshot_data, valid_snapshotids = validate_snapshot_nodes(snapshot_nodes)
    if valid_snapshotids and token and snapshot_nodes:
        for node in snapshot_nodes:
            validate = node['validate'] if 'validate' in node else True
            if 'path' in  node:
                data = get_node(token, sub_name, sub_id, node, snapshot_user, snapshot_source)
                if data:
                    if validate:
                        if get_dbtests():
                            if get_collection_size(data['collection']) == 0:
                                # Creating indexes for collection
                                create_indexes(
                                    data['collection'], 
                                    config_value(DATABASE, DBNAME), 
                                    [
                                        ('snapshotId', pymongo.ASCENDING),
                                        ('timestamp', pymongo.DESCENDING)
                                    ]
                                )

                                create_indexes(
                                    data['collection'], 
                                    config_value(DATABASE, DBNAME), 
                                    [
                                        ('_id', pymongo.DESCENDING),
                                        ('timestamp', pymongo.DESCENDING),
                                        ('snapshotId', pymongo.ASCENDING)
                                    ]
                                )
                            insert_one_document(data, data['collection'], dbname, check_keys=False)
                        else:
                            snapshot_dir = make_snapshots_dir(container)
                            if snapshot_dir:
                                store_snapshot(snapshot_dir, data)
                        if 'masterSnapshotId' in node:
                            snapshot_data[node['snapshotId']] = node['masterSnapshotId']
                        else:
                            snapshot_data[node['snapshotId']] = True
                    # else:
                    #     snapshot_data[node['snapshotId']] = False
                    node['status'] = 'active'
                else:
                    # TODO alert if notification enabled or summary for inactive.
                    node['status'] = 'inactive'
                logger.debug('Type: %s', type(data))
            else:
                alldata = get_all_nodes(
                    token, sub_name, sub_id, node, snapshot_user, snapshot_source)
                if alldata:
                    snapshot_data[node['masterSnapshotId']] = []
                    for data in alldata:
                        # insert_one_document(data, data['collection'], dbname)
                        found_old_record = False
                        for masterSnapshotId, snapshot_list in snapshot_data.items():
                            old_record = None
                            if isinstance(snapshot_list, list):
                                for item in snapshot_list:
                                    if item["path"] == data['path']:
                                        old_record = item

                                if old_record:
                                    found_old_record = True
                                    if node['masterSnapshotId'] not in old_record['masterSnapshotId']:
                                        old_record['masterSnapshotId'].append(
                                            node['masterSnapshotId'])

                        if not found_old_record:
                            snapshot_data[node['masterSnapshotId']].append(
                                {
                                    'masterSnapshotId': [node['masterSnapshotId']],
                                    'snapshotId': data['snapshotId'],
                                    'path': data['path'],
                                    'validate': validate,
                                    'status': 'active'
                                })
                    # snapshot_data[node['masterSnapshotId']] = True
                logger.debug('Type: %s', type(alldata))
        delete_from_currentdata('resources')
        delete_from_currentdata('clientId')
        delete_from_currentdata('client_secret')
        delete_from_currentdata('subscriptionId')
        delete_from_currentdata('tenant_id')
        delete_from_currentdata('token')
    return snapshot_data
def get_node(awsclient, node, snapshot_source):
    """
    Fetch node from aws using connection. In this case using boto API's
    describe functions.
    """

    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    function_to_call = None
    db_record = {
        "structure": "aws",
        "error": None,
        "reference": "",
        "source": parts[0],
        "path": '',
        "timestamp": int(time.time() * 1000),
        "queryuser": "",
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": node['snapshotId'],
        "collection": collection.replace('.', '').lower(),
        "json": {
        }  # Refactor when node is absent it should None, when empty object put it as {}
    }
    detail_methods = get_field_value(node, "detailMethods")
    if detail_methods is None:
        function_to_call = _get_aws_function(awsclient, node)
        if function_to_call and callable(function_to_call):
            queryval = get_field_value(node, 'id')
            try:
                data = function_to_call(**queryval)
                if data:
                    db_record['json'] = data
                    checksum = get_checksum(data)
                    if checksum:
                        db_record['checksum'] = checksum
                    else:
                        put_in_currentdata('errors', data)
                        logger.info("Describe function does not exist: %s",
                                    str(function_to_call))
                        db_record[
                            'error'] = "Describe function does not exist: %s" % str(
                                function_to_call)
            except Exception as ex:
                logger.info('Describe function exception: %s', ex)
                db_record['error'] = 'Describe function exception: %s' % ex
        else:
            logger.info('Invalid function exception: %s',
                        str(function_to_call))
            db_record['error'] = 'Invalid function exception: %s' % str(
                function_to_call)
    else:
        json_to_put = {}
        arn_str = get_field_value(node, "arn")
        arn_obj = arnparse(arn_str)
        client_str = arn_obj.service
        resourceid = arn_obj.resource
        for each_method_str in detail_methods:
            function_to_call = getattr(awsclient, each_method_str, None)
            if function_to_call and callable(function_to_call):
                params = _get_function_kwargs(arn_str, each_method_str,
                                              json_to_put)
                try:
                    data = function_to_call(**params)
                    if data:
                        json_to_put.update(data)
                except Exception as ex:
                    logger.info('Describe function exception: %s', ex)
                    db_record['error'] = 'Describe function exception: %s' % ex
            else:
                logger.info('Invalid function exception: %s',
                            str(function_to_call))
                db_record['error'] = 'Invalid function exception: %s' % str(
                    function_to_call)
        db_record['json'] = json_to_put
    return db_record
Beispiel #18
0
def check_and_add_error(status, errmsg):
    """Add error based on the status code of the http response"""
    if status and isinstance(status, int) and status != 200:
        if status >= 400:
            put_in_currentdata('errors', errmsg)
        logger.info(errmsg)
def get_all_nodes(compute_fn, node, snapshot_source, snapshot, snapshot_data):
    """
    Fetch all nodes from google using connection using google client API's functions.
    """
    collection = node['collection'] if 'collection' in node else COLLECTION
    parts = snapshot_source.split('.')
    project_id = get_field_value_with_default(snapshot, 'project-id', "")
    node_type = get_field_value_with_default(node, 'type', "")

    db_record = {
        "structure": "google",
        "error": None,
        "reference": project_id,
        "source": parts[0],
        "path": "",
        "timestamp": int(time.time() * 1000),
        "queryuser": "",
        "checksum": hashlib.md5("{}".encode('utf-8')).hexdigest(),
        "node": node,
        "snapshotId": None,
        "masterSnapshotId": [node['masterSnapshotId']],
        "collection": collection.replace('.', '').lower(),
        "json":
        {},  # Refactor when node is absent it should None, when empty object put it as {}
        "items": []
    }

    if node_type:
        fn_str_list, kwargs = get_google_call_function_for_crawler(
            node, project_id)

        if fn_str_list and kwargs:
            for i in range(0, len(fn_str_list)):
                compute_fn = getattr(compute_fn, fn_str_list[i], None)
                if compute_fn and i != len(fn_str_list) - 1:
                    compute_fn = compute_fn()

            response_param = ""
            if fn_str_list and len(fn_str_list) > 1:
                response_param = fn_str_list[-2]
            elif fn_str_list and len(fn_str_list) == 1:
                response_param = fn_str_list[0]

            if compute_fn and callable(compute_fn):
                try:
                    data = compute_fn(**kwargs).execute()
                    if data:
                        check_node_type = node_type
                        node_type_list = node_type.split(".")
                        if len(node_type_list) > 1:
                            del node_type_list[-1]
                            check_node_type = ".".join(node_type_list)

                        db_record['json'] = data
                        if response_param in data:
                            db_record['items'] = data[response_param]
                        elif "items" in data:
                            if isinstance(data['items'], dict):
                                for name, scoped_dict in data['items'].items():
                                    if response_param in scoped_dict:
                                        db_record['items'] = db_record[
                                            'items'] + scoped_dict[
                                                check_node_type]

                            if not db_record['items']:
                                db_record['items'] = data['items']

                        set_snapshot_data(node, db_record['items'],
                                          snapshot_data)

                        checksum = get_checksum(data)
                        if checksum:
                            db_record['checksum'] = checksum
                    else:
                        put_in_currentdata('errors', data)
                        logger.info("Compute function does not exist: %s",
                                    str(fn_str_list))
                        db_record[
                            'error'] = "Compute function does not exist: %s" % str(
                                fn_str_list)
                except Exception as ex:
                    logger.info('Compute function exception: %s', ex)
                    db_record['error'] = 'Compute function exception: %s' % ex
            else:
                logger.info('Invalid Compute function exception: %s',
                            str(fn_str_list))
                db_record[
                    'error'] = 'Invalid Compute function exception: %s' % str(
                        fn_str_list)
        else:
            logger.info('Missing Compute function')
            db_record['error'] = 'Missing Compute function'
    return db_record
Beispiel #20
0
def validator_main(arg_vals=None, delete_rundata=True):
    """
    Main driver utility for running validator tests
    The arg_vals, if passed should be array of string. A set of examples are:
      1) arg_vals = ['container1'] - Use container1 to process test files from filesystem
      2) args_vals = ['container1', '--db'] - Use container1 to process test documents from database.
    When arg_vals is None, the argparse library parses from sys.argv array list.
    The successful argument parsing initializes the system for the run.
    On exit will run cleanup. The return values of this main entry function are as:
       0 - Success, tests executed.
       1 - Failure, Tests execution error.
       2 - Exception, missing config.ini, Mongo connection failure or http connection exception,
           the tests execution could not be started or completed.
    """
    cmd_parser = argparse.ArgumentParser(
        "prancer",
        formatter_class=argparse.RawDescriptionHelpFormatter,
        epilog='''\
Example: prancer collection1
Runs the prancer framework based on the configuration files available in collection1 folder
                                         ''')
    cmd_parser.add_argument('-v',
                            '--version',
                            action='version',
                            version=("Prancer %s" % __version__),
                            help='Show prancer version')
    cmd_parser.add_argument(
        'container',
        metavar='collection',
        action='store',
        help=
        'The name of the folder which contains the collection of files related to one scenario'
    )
    cmd_parser.add_argument(
        '--db',
        action='store',
        default=None,
        choices=['NONE', 'SNAPSHOT', 'FULL'],
        help=
        '''NONE - Database will not be used, all the files reside on file system,
                            SNAPSHOT - Resource snapshots will be stored in db, everything else will be on file system,
                            FULL - tests, configurations, outputs and snapshots will be stored in the database'''
    )
    cmd_parser.add_argument('--crawler',
                            action='store_true',
                            default=False,
                            help='Crawls and generates snapshot files only')
    cmd_parser.add_argument('--test',
                            action='store',
                            default=None,
                            help='Run a single test in NODB mode')
    cmd_parser.add_argument('--customer',
                            action='store',
                            default=None,
                            help='customer name for config')
    cmd_parser.add_argument(
        '--connector',
        action='store',
        default=None,
        help=
        'specify the name of the connector which you want to run from the collection'
    )
    cmd_parser.add_argument(
        '--branch',
        action='store',
        default=None,
        help=
        'specify the name of the branch to populate snapshots, for the filesystem connector'
    )
    args = cmd_parser.parse_args(arg_vals)

    retval = 2
    set_customer()
    cfg_error, config_ini = search_config_ini()
    if cfg_error:
        return retval

    if args.customer:
        set_customer(args.customer)
    if args.db:
        if args.db.upper() in DBVALUES:
            args.db = DBVALUES.index(args.db.upper())
        else:
            args.db = DBVALUES.index(SNAPSHOT)
    else:
        nodb = config_value(TESTS, DBTESTS)
        if nodb and nodb.upper() in DBVALUES:
            args.db = DBVALUES.index(nodb.upper())
        else:
            args.db = DBVALUES.index(SNAPSHOT)

    if args.test:
        args.db = DBVALUES.index(NONE)

    # Check if we want to run in NO DATABASE MODE
    if args.db:
        # returns the db connection handle and status, handle is ignored.
        from processor.database.database import init_db, TIMEOUT
        _, db_init_res = init_db()
        if not db_init_res:
            msg = "Mongo DB connection timed out after %d ms, check the mongo server, exiting!....." % TIMEOUT
            console_log(msg, currentframe())
            return retval

    # Check the log directory and also check if it is writeable.
    from processor.logging.log_handler import init_logger, get_logdir, default_logging, add_file_logging
    fw_cfg = get_config_data(framework_config())
    log_writeable, logdir = get_logdir(fw_cfg, framework_dir())
    if not log_writeable:
        console_log(
            'Logging directory(%s) is not writeable, exiting....' % logdir,
            currentframe())
        return retval

    # Alls well from this point, check container exists in the directory configured
    retval = 0
    logger = init_logger(args.db, framework_config())
    # logger = add_file_logging(config_ini)
    logger.info("START: Argument parsing and Run Initialization. Version %s",
                __version__)

    from processor.connector.snapshot import populate_container_snapshots
    from processor.connector.validation import run_container_validation_tests
    from processor.crawler.master_snapshot import generate_container_mastersnapshots
    try:
        from processor_enterprise.notifications.notification import check_send_notification
    except:
        check_send_notification = lambda container, db: None

    logger.info("Command: '%s %s'",
                sys.executable.rsplit('/', 1)[-1], ' '.join(sys.argv))
    try:
        from processor.helper.config.rundata_utils import init_currentdata, \
            delete_currentdata, put_in_currentdata
        # Delete the rundata at the end of the script as per caller, default is True.
        if delete_rundata:
            atexit.register(delete_currentdata)
        init_currentdata()

        logger.info("Using Framework dir: %s", framework_dir())
        logger.info("Args: %s", args)
        logger.debug("Running tests from %s.", DBVALUES[args.db])
        fs = True if args.db > DBVALUES.index(SNAPSHOT) else False
        put_in_currentdata('jsonsource', fs)
        put_in_currentdata(DBTESTS, args.db)
        put_in_currentdata('container', args.container)
        # if args.db == DBVALUES.index(FULL):
        #     from processor.logging.log_handler import get_dblogger
        #     log_name = get_dblogger()
        #     if log_name:
        #         pid = open('/tmp/pid_%s' % os.getpid(), 'w')
        #         pid.write(log_name)
        #         pid.close()
        if args.customer:
            put_in_currentdata(CUSTOMER, args.customer)
        if args.test:
            put_in_currentdata(SINGLETEST, args.test)
        else:
            put_in_currentdata(SINGLETEST, False)
        if args.connector:
            put_in_currentdata("connector", args.connector)
        if args.branch:
            put_in_currentdata("branch", args.branch)
        if not args.db:
            retval = 0 if container_exists(args.container) else 2
            if retval:
                logger.critical(
                    "Container(%s) is not present in Framework dir: %s",
                    args.container,
                    framework_dir(),
                    extra={"type": "critical"})
                # TODO: Log the path the framework looked for.
                return retval
        if args.crawler:
            # Generate snapshot files from here.
            generate_container_mastersnapshots(args.container, fs)
        else:
            # Normal flow
            snapshot_status = populate_container_snapshots(args.container, fs)
            logger.debug(json.dumps(snapshot_status, indent=2))
            if snapshot_status:
                status = run_container_validation_tests(
                    args.container, fs, snapshot_status)
                retval = 0 if status else 1
            else:
                retval = 1
            check_send_notification(args.container, args.db)
    except (Exception, KeyboardInterrupt) as ex:
        logger.error("Execution exception: %s", ex)
        print(traceback.format_exc())
        retval = 2
    return retval