Beispiel #1
0
    def train(self,
              targets,
              training_dict,
              total_epochs=10,
              learning_rate=0.1):
        """Create the training loop"""
        # Construct the optimizer
        params = [
            self.w_comb1, self.w_comb2, self.w_t, self.w_l, self.w_r,
            self.b_conv, self.w_hidden, self.b_hidden
        ]
        optimizer = torch.optim.SGD(params, lr=learning_rate)
        criterion = nn.BCELoss()
        print('The correct value of the files is: ', targets)

        for epoch in range(total_epochs):
            # Time
            start = time()
            outputs = self.forward(training_dict)

            try:
                loss = criterion(outputs, targets)
            except AttributeError:
                print(
                    f'The size of outputs is {len(outputs)} and is of type {type(outputs)}'
                )
                print('Check that the path is a folder and not a file')
                raise AttributeError

            # zero the parameter gradients
            print('outputs: \n', outputs)
            #print('Matrix w_r_conv: \n', params[4])

            optimizer.zero_grad()

            # Calculates the derivative
            loss.backward(retain_graph=True)

            # Update parameters
            optimizer.step()  #w_r = w_r - lr * w_r.grad

            #Time
            end = time()

            print('Epoch: ', epoch, ', Time: ', end - start, ', Loss: ', loss)

        message = f'''
The loss we have for the training network is: {loss}
        '''
        writer(message)
        self.save()
Beispiel #2
0
def cartoonize(color, rep):
    """draw a cartoon representation of glycans"""
    stored.ResiduesNumber = []
    cmd.iterate('name c1', 'stored.ResiduesNumber.append((resi))')
    resn_list = [int(i) for i in stored.ResiduesNumber]
    bonds = get_glyco_bonds(resn_list[0], resn_list[-1]+1)
    con_matrix = writer(bonds)
    #con_matrix = writer2(bonds)
    rings = find_rings(resn_list)
    rings_coords = get_ring_coords(resn_list, rings)
    bonds_coords = get_bonds_coords(resn_list, con_matrix)
    colors = get_colors_c1(resn_list, color)
    bonds_colors = get_bonds_colors(resn_list, con_matrix, color)
    cmd.set('suspend_updates', 'on')
    for state, coords in enumerate(rings_coords):
        obj = []
        if rep == 'beads':
            radius_s = 1.8
            radius_b = 0.18
            obj = beads(obj, coords, colors[state], radius_s)
            obj = cylinder(obj, bonds_coords[state], bonds_colors[state], radius_b)
        else:
            if rep == 'cartoon':
                radius = 0.075
            else: 
                radius = 0.035
            obj = hexagon(obj, coords, colors[state], rep, radius)
            obj = cylinder(obj, bonds_coords[state], bonds_colors[state], radius)
        cmd.load_cgo(obj,'cgo01', state+1)

    cmd.select('glycan', 'byres name C1')
    cmd.delete('glycan')
    cmd.delete('tmp')
    cmd.set('two_sided_lighting', 1)
    cmd.set('suspend_updates', 'off')
Beispiel #3
0
def faults(apic, **kwargs):
	if 'filename' in kwargs:
		wb = kwargs['filename']
	else:
		wb = 'discovery.xlsx'
	if 'writer' in kwargs:
		writer = kwargs['writer']
	else:
		writer = utils.writer(wb)
	faultsUri = '/api/node/class/faultSummary.json?query-target-filter=' \
	    'and()&order-by=faultSummary.severity'
	faultsUrl = apic.baseUrl + faultsUri
	faultsResp = apic.session.get(faultsUrl, verify=False)
	utils.responseCheck(faultsResp)
	faultsJson = json.loads(faultsResp.text)
	data = []
	try:
		for fault in faultsJson['imdata']:
			data.append(fault['faultSummary']['attributes'])
	except:
		logging.critical('Error iterating through faults. Exiting')
		apic.session.close()
		sys.exit()
	apic.session.close()
	utils.dictDumpTwo(writer,
			        data,
			        list(data[0].keys()),
			        'faultSummary')
    def validation(self,
                   targets,
                   validation_dict,
                   learning_rate=0.3,
                   momentum=0,
                   l2_penalty=0,
                   epoch_first=45):
        """Create the validation loop"""
        print('########################################')
        print(
            '\n\n\nFinished training process. Entering validation process\n\n\n'
        )
        print("The correct value of the files is: ", targets)

        # We calculate the predictions
        predicts = self.prediction(validation_dict, learning_rate, momentum,
                                   l2_penalty, epoch_first)
        # print the predictions
        print('predictions: \n', predicts)

        # Loss function
        criterion = nn.BCELoss()
        loss = criterion(predicts, targets)

        # TODO Build the accuracy evaluation method for each file
        # Confusion matrix
        conf_matrix = self.conf_matrix(predicts, targets)
        print(conf_matrix)
        plot_confusion_matrix(conf_matrix, ['no generator', 'generator'])

        message = f'''

For the validation set we have the following results:
loss: {loss}
confusion_matrix:
{conf_matrix}
        '''
        writer(message)
        print('Loss validation: ', loss)
        # correct += (predicted == labels).sum()
        accuracy = self.accuracy(predicts, targets)
        print('accuracy: ', accuracy)
    def validation(self, validation_path, validation_dict):
        """Create the validation loop"""
        print('########################################')
        print(
            '\n\n\nFinished training process. Entering validation process\n\n\n'
        )
        ### Validation set
        # this is to have all the information of each file in the folder contained in a dictionary
        #validation_dict = self.validation_dict_set_up(validation_path)
        # this is the tensor with all target values associated to the validation set
        targets = self.target_tensor_set_up(validation_path, validation_dict)

        # We calculate the predictions
        predicts = self.prediction(validation_dict)
        # print the predictions
        print('predictions: \n', predicts)

        # Loss function
        criterion = nn.BCELoss()
        loss = criterion(predicts, targets)

        # TODO Build the accuracy evaluation method for each file
        # Confusion matrix
        conf_matrix = self.conf_matrix(predicts, targets)
        print(conf_matrix)
        plot_confusion_matrix(conf_matrix, ['no generator', 'generator'])

        message = f'''

For the validation set we have the following results:
loss: {loss}
confusion_matrix:
{conf_matrix}
        '''
        writer(message)
        print('Loss validation: ', loss)
        # correct += (predicted == labels).sum()
        accuracy = self.accuracy(predicts, targets)
        print('accuracy: ', accuracy)
Beispiel #6
0
    def __init__(self, config, resume: bool, model, loss_function, optimizer,
                 scheduler, lr_override, test):
        self.optimizer = optimizer
        self.scheduler = scheduler
        self.loss_function = loss_function
        self.lr_override = lr_override
        self.test = test

        self.gpu_ids = config['gpu_ids']
        self.model = model.to(self.gpu_ids[0])
        if len(self.gpu_ids) > 1:
            self.model = torch.nn.DataParallel(self.model,
                                               device_ids=self.gpu_ids)
        # Trainer
        self.epochs = config["trainer"]["epochs"]

        # The following args is not in the config file. We will update it if the resume is True in later.
        self.start_epoch = 0
        self.best_score = np.inf
        self.root_dir = Path(config["root"]).expanduser().absolute(
        ) / "experiments" / config["experiment_name"]
        self.checkpoints_dir = self.root_dir / "checkpoints"
        self.logs_dir = self.root_dir / "logs"
        prepare_empty_dir([self.checkpoints_dir, self.logs_dir], resume=resume)

        if not self.test:
            self.writer = writer(self.logs_dir.as_posix())
            self.writer.add_text(
                tag="Configuration",
                text_string=
                f"<pre>  \n{json5.dumps(config, indent=4, sort_keys=False)}  \n</pre>",
                global_step=1)
            # print("Configurations are as follows: ")
            # print(json5.dumps(config, indent=2, sort_keys=False))

            with open(
                (self.root_dir /
                 f"{time.strftime('%Y-%m-%d-%H-%M-%S')}.json").as_posix(),
                    "w") as handle:
                json5.dump(config, handle, indent=2, sort_keys=False)

        if resume: self._resume_checkpoint()

        self._print_networks([self.model])
Beispiel #7
0
def mcm(pose, mc_steps, SASA, randomize):
    ################################# MCM Parameters ##########################
    T = 300. # Temperature 
    k = 0.0019872041 # Boltzmann constant
    angles_prob = [1/3, 1/3, 1/3] # probability to sample phi, psi or chi
    accepted = 0
    ############################################################################
    # 
    first, last = pose_from_pdb(pose)
    if first or last:
        print('Starting MCM')
        from energy import minimize, set_sasa, get_sasa
        cmd.set('suspend_updates', 'on')
        cmd.feedback('disable', 'executive', 'everything')   ##uncomment for debugging
        cmd.set('pdb_conect_all', 1)
        
        glyco_bonds = get_glyco_bonds(first, last)
        con_matrix = writer(glyco_bonds)
    
        # Remove previous pdb files
        prev_files = glob.glob('mcm_*.pdb')
        for prev_file in prev_files:
            os.remove(prev_file)
    
        # set all paramenters for sasa-energy computation
        if SASA:
            params, points, const = set_sasa(n=1000)
            ## randomize initial conformation
        if randomize:
            for i in range(len(con_matrix)-1):
                bond = con_matrix[i]
                angle_values = np.random.uniform(-180, 180, size=2)
                set_psi(pose, bond, angle_values[0])
                set_phi(pose, bond, angle_values[1])
                for i in range(6):
                    set_chi(pose, bond)
    
        # minimize energy of starting conformation and save it
        NRG_old = minimize(pose, nsteps=5000, rigid_geometry=False)
        NRG_min = NRG_old
        cmd.save('mcm_%08d.pdb' % accepted)
    
        ## start MCM routine
        fd = open("mcm_log.txt", "w")
        print('# iterations remaining = %s' % (mc_steps))
        for i in range(1, mc_steps+1):
            if i % (mc_steps//10) == 0:
                print('#remaining iterations = %s' % (mc_steps-i))
            if True:
                sample_uniform(pose, con_matrix, angles_prob)
            NRG_new = minimize('tmp', nsteps=100, rigid_geometry=False)
            if SASA:
                solvatation_nrg = get_sasa(params, points, const, selection='all',
                 probe=0)[0]
                NRG_new = NRG_new + solvatation_nrg
            if NRG_new < NRG_old:
                NRG_old = NRG_new 
                fd.write('%8d%10.2f\n' % (accepted, NRG_new))
                cmd.copy(pose, 'tmp')
                cmd.delete('tmp')
                cmd.save('mcm_%08d.pdb' % accepted)
                accepted += 1
            else:
                delta = np.exp(-(NRG_new-NRG_old)/(T*k))
                if delta > np.random.uniform(0, 1):
                    NRG_old = NRG_new
                    fd.write('%8d%10.2f\n' % (accepted, NRG_new))
    
                    cmd.copy(pose, 'tmp')
                    cmd.delete('tmp')
                    cmd.save('mcm_%08d.pdb' % accepted)
                    accepted += 1 
            cmd.delete('tmp')
            if NRG_new < NRG_min:
                NRG_min = NRG_new
                cmd.save('mcm_min.pdb')
        fd.close()
    
        cmd.delete('all')
        print('Savings all accepted conformations on a single file')
        cmd.set('defer_builds_mode', 5)
        for i in range(0, accepted):
            cmd.load('mcm_%08d.pdb' % i, 'mcm_trace')
        cmd.save('mcm_trace.pdb', 'all', state=0)
        cmd.delete('all')
        cmd.load('mcm_trace.pdb')
        cmd.intra_fit('mcm_trace')
        print(' MCM completed')
        cmd.set('suspend_updates', 'off')
Beispiel #8
0
remover()

for vector_size in vector_size_ls:
    for learning_rate in learning_rate_ls:
        for momentum in momentum_ls:
            for learning_rate2 in [0.1]:
                for feature_size in feature_size_ls:
                    for l2_penalty in l2_penalty_ls:
                        message = f'''

########################################

The parameters we're using are the following:
vector_size = {vector_size}
learning_rate = {learning_rate}
momentum = {momentum}
learning_rate2 = {learning_rate2}
feature_size = {feature_size}
number of epochs for second neural network: {epoch}


                    '''
                    # We append the results in a results.txt file
                    writer(message)
                    main(vector_size, learning_rate, momentum, epoch_first, learning_rate2,\
                         feature_size, epoch, pooling, l2_penalty)




Beispiel #9
0
def mcm(pose, mc_steps, SASA, randomize):
    ################################# MCM Parameters ##########################
    T = 300.  # Temperature
    k = 0.0019872041  # Boltzmann constant
    kT = k * T
    # probability to sample phi, psi or chi
    angles_prob = [1 / 3, 1 / 3, 1 / 3]
    accepted = 0
    ##########################################################################
    #
    first, last = pose_from_pdb(pose)
    if first or last:
        print('Starting MCM')
        from energy import minimize, set_sasa, get_sasa
        sus_updates = cmd.get('suspend_updates')
        cmd.set('suspend_updates', 'on')
        # uncomment for debugging
        cmd.feedback('disable', 'executive', 'everything')
        pdb_conect = cmd.get('pdb_conect_all')
        cmd.set('pdb_conect_all', 1)

        glyco_bonds = get_glyco_bonds(first, last)
        con_matrix = writer(glyco_bonds)

        # Remove previous pdb files
        prev_files = glob.glob('mcm_*.pdb')
        for prev_file in prev_files:
            os.remove(prev_file)

        # set all paramenters for sasa-energy computation
        if SASA:
            params, points, const = set_sasa(n=1000)
            # randomize initial conformation
        if randomize:
            for i in range(len(con_matrix) - 1):
                bond = con_matrix[i]
                angle_values = np.random.uniform(-180, 180, size=2)
                set_psi(pose, bond, angle_values[0])
                set_phi(pose, bond, angle_values[1])
                for i in range(6):
                    set_chi(pose, bond)

        # minimize energy of starting conformation and save it
        NRG_old = minimize(pose, nsteps=5000, rigid_geometry=False)
        NRG_min = NRG_old
        cmd.save('mcm_%08d.pdb' % accepted)

        # start MCM routine
        fd = open("mcm_log.txt", "w")
        print('# iterations remaining = %s' % (mc_steps))
        for i in range(1, mc_steps + 1):
            if i % (mc_steps // 10) == 0:
                print('#remaining iterations = %s' % (mc_steps - i))
            if True:
                sample_uniform(pose, con_matrix, angles_prob)
            NRG_new = minimize('tmp', nsteps=100, rigid_geometry=False)
            if SASA:
                solvatation_nrg = get_sasa(params,
                                           points,
                                           const,
                                           selection='all',
                                           probe=0)[0]
                NRG_new = NRG_new + solvatation_nrg
            if NRG_new < NRG_old:
                NRG_old = NRG_new
                fd.write('%8d%10.2f\n' % (accepted, NRG_new))
                cmd.copy(pose, 'tmp')
                cmd.delete('tmp')
                cmd.save('mcm_%08d.pdb' % accepted)
                accepted += 1
            else:
                delta = np.exp(-(NRG_new - NRG_old) / (kT))
                if delta > np.random.uniform(0, 1):
                    NRG_old = NRG_new
                    fd.write('%8d%10.2f\n' % (accepted, NRG_new))

                    cmd.copy(pose, 'tmp')
                    cmd.delete('tmp')
                    cmd.save('mcm_%08d.pdb' % accepted)
                    accepted += 1
            cmd.delete('tmp')
            if NRG_new < NRG_min:
                NRG_min = NRG_new
                cmd.save('mcm_min.pdb')
        fd.close()

        cmd.delete('all')
        print('Savings all accepted conformations on a single file')
        de_builds = cmd.get('defer_builds_mode')
        cmd.set('defer_builds_mode', 5)
        for i in range(0, accepted):
            cmd.load('mcm_%08d.pdb' % i, 'mcm_trace')
        cmd.save('mcm_trace.pdb', 'all', state=0)
        cmd.delete('all')
        cmd.load('mcm_trace.pdb')
        cmd.intra_fit('mcm_trace')
        print('MCM completed')
        # restore settings
        cmd.set('suspend_updates', sus_updates)
        cmd.set('pdb_conect_all', pdb_conect)
        cmd.set('defer_builds_mode', de_builds)
Beispiel #10
0
 def write_header(self, outfile):
     insr = self.instru_info.instru_name
     vers = self.instru_info.version
     outwriter = utils.writer(outfile, delimiter=self.delimiter)
     outwriter.writerow([insr, vers])
Beispiel #11
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('cmd')
    parser.add_argument('--params',help='Parameters for models.')
    parser.add_argument('--dset',help='Corpus path for hypernymy extraction.')
    parser.add_argument('--output',help='Output path for hypernymy pairs.')
    parser.add_argument('--evaluate',help='Evaluation for hypernymy extraction models.')
    args = parser.parse_args()

    if args.cmd == 'hearst':
        hp = hearst.HearstPatterns(extended=args.params)
        if args.dset == None:
            pass
        else:
            data = utils.reader(args.dset)
            pair_list = hp.find_hyponyms(data)
            if args.output is not None:
                utils.writer(args.output,pair_list)
                print('Finished ...')
            else:
                print('Do not specifiy output file, but still show 5 of the results here.')
                for i in range(5):
                    print(pair_list[i])
                print('Finished ...')

    elif args.cmd == 'PPMI':
        # Read prediction from precomputed file
        shutil.copyfile("./patternBased/result_ppmi.txt", args.output)
        with open(args.output) as f:
            print("======== PPMI : score ======== ")
            print("Print out first 5 examples.")
            for i in range(6):
                print(f.readline())
    elif args.cmd == 'PPMISVD':
        path = "result_SvdPpmi_"+str(args.params)+".txt"
        shutil.copyfile("./patternBased/"+path, args.output)
        with open(args.output) as f:
            print("======== PPMI-SVD : score ======== ")
            print("k = ", args.params)
            print("Print out first 5 examples.")
            for i in range(6):
                print(f.readline())

    elif args.cmd == 'dist':
        if args.evaluate is not None:
            res = pd.read_table("./distributional/data/results.txt", sep="\t")
            print("======== Distributional Model : score ======== ")
            print(res.head(6))

    elif args.cmd == 'embed':
        shutil.copyfile("./termEmbed/term_embed_result.txt", args.output)
        if args.evaluate is not None:
            res = pd.read_table("./termEmbed/results.txt", sep="\t")
            print("======== Term Embedding : score ======== ")
            print(res.head(10))

    elif args.cmd == 'Projection':
        if args.output == None:
            pass
        else:
            print("Loading model...\n")
            model = train_model('./projection/dumped.data','./projection/modLog.txt', 9510, use_gpu = False)
            # save_model('./projection/projectMod.pt', model)
            print("Loading test data...\n")
            data=joblib.load('./projection/dumped.data')
            candidates = data["candidates"]
            test_q_cand_ids = data["test_query_cand_ids"]
            test_q_embed = make_embedder(data["test_query_embeds"], grad=False,
                                         cuda=model.use_cuda, sparse=False)
            print("Writing predictions...")
            test_eval = Evaluator(model, test_q_embed, test_q_cand_ids)
            test_eval.write_predictions(args.output, list(candidates))
            print("Done.\n")
        if args.evaluate == 'True':
            Dev_score={}
            Dev_score['DevMAP'] = [];Dev_score['DevAP'] = [];Dev_score['DevMRR'] = []
            for line in open('./projection/modLog.txt',"r"):
                if line.split()[0] == 'Epoch':
                    # header, skip
                    continue
                else:
                    Dev_score['DevMAP'].append(float(line.split()[5]))
                    Dev_score['DevAP'].append(float(line.split()[6]))
                    Dev_score['DevMRR'].append(float(line.split()[7]))
            print("======== Projection Learning : score ======== ")
            print("SemEval2018 Task9.")
            print("DevMAP\tDevAP\tDevMRR")
            print(str(max(Dev_score['DevMAP']))+"\t"+str(max(Dev_score['DevAP']))+"\t"+str(max(Dev_score['DevMRR'])))

    elif args.cmd == 'BiLSTM':
        # use default test data
        if args.output == None:
            pass
        else:
            if args.evaluate is not None:
                print("======== BiLSTM Model : f1 score ======== ")
                with open("./BiLSTM/data/res.score.txt", "r") as score:
                    print(score.read())
Beispiel #12
0
 def write_header(self, outfile):
     insr = self.instrument_name
     vers = self.version
     outwriter = utils.writer(outfile, delimiter=self.delimiter)
     outwriter.writerow([insr, vers])
Beispiel #13
0
def main(**kwargs):

    # Set Variables
    date = time.strftime("%Y%m%d")
    if 'filename' in kwargs:
        wb = date + '-' + kwargs['filename']
    else:

        wb = date + '-discovery.xlsx'
    tnData = []
    tnCols = ['descr', 'dn', 'name', 'nameAlias', 'ownerKey', 'ownerTag']
    bdData = []
    bdCols = [
        'tenant', 'bdName', 'subnetIp', 'subnetScope', 'primaryIp', 'virtualIp'
    ]
    epgData = []
    epgCols = [
        'tenant', 'app', 'epg', 'nameAlias', 'bd', 'bdTenant',
        'prefGroupMember'
    ]
    dhcpData = []
    dhcpCols = [
        'podId', 'fabricId', 'nodeId', 'model', 'name', 'nameAlias',
        'configNodeRole', 'nodeRole', 'nodeType', 'ip', 'spineLevel', 'fwVer',
        'runningVer', 'supported', 'extPoolId', 'decommissioned',
        'configIssues'
    ]
    nodePData = []
    nodePCols = ['name', 'descr', 'dn', 'leafSelector']
    rsAccPortPData = []
    rsAccPortPCols = ['nodeP', 'interfaceProfile', 'dn']

    # Create login session to APIC
    apic = utils.apicSession()

    # Get Fabric Info
    dhcpClientData = get(apic,
                         apic.baseUrl + '/api/node/class/dhcpClient.json')
    dhcpClientList = utils.cleanListDict(dhcpClientData)
    for node in dhcpClientList:
        dhcpData.append(
            (node['podId'], node['fabricId'], node['nodeId'], node['model'],
             node['name'], node['nameAlias'], node['configNodeRole'],
             node['nodeRole'], node['nodeType'], node['ip'],
             node['spineLevel'], node['fwVer'], node['runningVer'],
             node['supported'], node['extPoolId'], node['decomissioned'],
             node['configIssues']))

    # Get the current tenants
    tenantsResp = get(apic, apic.baseUrl + '/api/class/fvTenant.json?' \
                      'rsp-prop-include=config-only')
    '''
	 Loop through tenants and collect additional information. Including:
	 	1) BDs
	 		a) Subnets within BDs
	 	2) EPGs
	 		a) EPG Details
	 		b) Related Domains
	 		c) Related BD
	 		d) Related Consumed Contracts
	 		e) Related Provided Contracts
	'''
    tenantList = utils.cleanListDict(tenantsResp)
    for tenant in tenantList:
        tnData.append(tenant)
        bdResp = get(apic, apic.baseUrl + '/api/class/fvBD.json?' \
                     'query-target-filter=wcard(fvBD.dn,"{}")&rsp-prop-' \
                     'include=config-only'.format(tenant['name']))
        bdList = utils.cleanListDict(bdResp)
        for bd in bdList:
            subnetResp = get(apic,
             apic.baseUrl + '/api/class/fvSubnet.json?query-target-filter=' \
             'wcard(fvSubnet.dn,"tn-{}/BD-{}/")'.format(tenant['name'],
             bd['name']))
            subnetList = utils.cleanListDict(subnetResp)
            if not subnetList:
                bdData.append((tenant['name'], bd['name'], 'Null', 'Null',
                               'Null', 'Null'))
            else:
                for sub in subnetList:
                    bdData.append(
                        (tenant['name'], bd['name'], sub['ip'], sub['scope'],
                         sub['preferred'], sub['virtual']))
        apResp = get(apic, apic.baseUrl + '/api/class/fvAp.json?query-target-filter=wcard('\
               'fvAp.dn,"tn-{}")'.format(tenant['name']))
        if not apResp:
            pass
        else:
            appList = utils.cleanListDict(apResp)
            for app in appList:
                epgResp = get(apic,
                               apic.baseUrl + '/api/class/fvAEPg.json?' \
                               'query-target-filter=wcard(fvAEPg.dn, '  \
                               '"tn-{}/ap-{}/")&rsp-prop-include=config-only'.format(
                                tenant['name'],
                                   app['name']))
                if not epgResp:
                    pass
                else:
                    epgList = utils.cleanListDict(epgResp)
                    for epg in epgList:
                        rsBd = get(apic,
                               apic.baseUrl + '/api/node/mo/uni/tn-{}/ap-{}/epg-{}.json?' \
                                   'query-target=children&target-subtree-class=fvRsBd'.format(
                                       tenant['name'],
                                       app['name'],
                                       epg['name']))
                        if not rsBd:
                            epgData.append(
                                (tenant['name'], app['name'], epg['name'],
                                 epg['nameAlias'], '', '', epg['prefGrMemb']))
                        else:
                            rsBd = utils.cleanListDict(rsBd)
                            epgData.append(
                                (tenant['name'], app['name'], epg['name'],
                                 epg['nameAlias'], rsBd[0]['tnFvBDName'],
                                 rsBd[0]['dn'].split('tn-')[1].split('/')[0],
                                 epg['prefGrMemb']))
    '''
	get access policy information
	
	   1) To include
	       -Switch Profiles
		   -Interface Profiles
		   -AAEPs
		   -Interface Policy Groups
		   -Interface Policies
	
	'''

    nodePResp = get(apic, apic.baseUrl + '/api/mo/uni/infra.json?query-target=children&target-subtree-class=infraNodeP&' \
                      'rsp-prop-include=config-only')
    nodePList = utils.cleanListDict(nodePResp)
    for nodeP in nodePList:
        infraLeafSResp = get(apic, apic.baseUrl + '/api/mo/' + nodeP['dn'] + '.json?query-target=children&target-subtree' \
                            '-class=infraLeafS')
        if infraLeafSResp:
            for infraLeafS in infraLeafSResp:
                nodeBlkResp = get(apic, apic.baseUrl + '/api/mo/uni/infra.json?query-target=children&target-subtree-class=' \
                                  'infraNodeBlk&query-target-filter=wcard(infraNodeBlk.dn,"{}")'.format(
                                  infraLeafS['infraLeafS']['attributes']['dn']))
                nodeBlkList = utils.cleanListDict(nodeBlkResp)
                for nodeBlk in nodeBlkList:
                    logging.info(nodeBlk['from_'], nodeBlk['to_'])
                nodeTup = (nodeP['name'], nodeP['descr'], nodeP['dn'],
                           infraLeafS['infraLeafS']['attributes']['name'])
                nodePData.append(nodeTup)
        elif not infraLeafSResp:
            nodeTup = (nodeP['name'], nodeP['descr'], nodeP['dn'], '')
            nodePData.append(nodeTup)
        rsAccPortPResp = get(apic, apic.baseUrl + '/api/mo/' + nodeP['dn'] + '.json?query-target=children&target-subtree' \
                        '-class=infraRsAccPortP')
        if rsAccPortPResp:
            rsAccPortPList = utils.cleanListDict(rsAccPortPResp)
            for rsAccPortP in rsAccPortPList:
                rsAccPortPTup = (nodeP['name'],
                                 rsAccPortP['tDn'].split('accportprof-')[1],
                                 rsAccPortP['dn'])
                rsAccPortPData.append(rsAccPortPTup)
        elif not rsAccPortPResp:
            pass

    writer = utils.writer(wb)
    try:
        faults(apic, writer=writer)
        utils.dictDumpTwo(writer, dhcpData, dhcpCols, 'dhcpClient')
        utils.dictDumpTwo(writer, nodePData, nodePCols, 'nodeProfiles')
        utils.dictDumpTwo(writer, rsAccPortPData, rsAccPortPCols,
                          'interfaceProfiles')
        utils.dictDumpTwo(writer, tnData, tnCols, 'fvTenant')
        utils.dictDumpTwo(writer, bdData, bdCols, 'fvBD')
        utils.dictDumpTwo(writer, epgData, epgCols, 'fvAEPg')
        apic.session.close()
    except AssertionError:
        raise AssertionError
    except Exception as ex:
        utils.exceptTempl(ex)