def __init__(self, pkts_window_size=10, single_csv=True):
        self.pkts_window_size = pkts_window_size
        assert self.pkts_window_size >= 1, "Valore per la finestra non valido"
        self.single_csv = single_csv
        assert (self.single_csv is
                True) or (self.single_csv is
                          False), "Valore non valido per il flag single_csv"
        self.featuresCalc = FeaturesCalc(flow_type="malware",
                                         min_window_size=pkts_window_size)
        ip_to_ignore = ["127.0.0.1"]
        self.filter_1 = PacketFilter(ip_whitelist_filter=[],
                                     ip_blacklist_filter=ip_to_ignore,
                                     TCP=True)
        self.filter_2 = PacketFilter(ip_whitelist_filter=[],
                                     ip_blacklist_filter=ip_to_ignore,
                                     UDP=True)
        self.filter_3 = PacketFilter(ip_whitelist_filter=[],
                                     ip_blacklist_filter=ip_to_ignore,
                                     ICMP=True)
        self.filters = [self.filter_1, self.filter_2, self.filter_3]

        if (self.single_csv):
            self.csv = CSV(file_name="features")
            self.csv.create_empty_csv()
            self.csv.add_row(self.featuresCalc.get_features_name())
Esempio n. 2
0
def malware_train(line):
    global malware_train_nb
    if (mta.check_if_link_is_in_downloaded_file(line) is False):
        pcap_file_name = mta.download_pcap([line])
        for pcap in pcap_file_name:
            if (pcap is not None):
                if (check_if_already_trained(pcap) is False):
                    attacker = AttackerCalc(pcap=mta.get_folder_name() + "/" +
                                            pcap)
                    ip_to_consider = attacker.compute_attacker()
                    flow_type = "malware"
                    filter_1.set_ip_whitelist_filter(ip_to_consider)
                    filter_2.set_ip_whitelist_filter(ip_to_consider)
                    filter_3.set_ip_whitelist_filter(ip_to_consider)
                    featuresCalc = FeaturesCalc(flow_type=flow_type,
                                                min_window_size=5)
                    csv = CSV(file_name="features_" + flow_type,
                              folder_name="Features")
                    csv.create_empty_csv()
                    csv.add_row(featuresCalc.get_features_name())
                    argument = {
                        "features_calc": featuresCalc,
                        "packets": [],
                        'filter': [filter_1, filter_2, filter_3],
                        'csv_obj': csv
                    }
                    sniffer = Sniffer(iface_sniffer,
                                      callback_prn=callback_sniffer,
                                      callback_prn_kwargs=argument)
                    sniffer.start()
                    while (sniffer.get_started_flag() is False):
                        pass
                    try:
                        sender = Sender(iface_sender,
                                        fast=False,
                                        verbose=False,
                                        time_to_wait=10)
                        sender.send(mta.get_folder_name() + "/" + pcap)
                        sniffer.stop()
                    except Exception as e:
                        print(e)
                    csv.close_csv()
                    env.set_csv(csv.get_folder_name() + "/" +
                                csv.get_current_file_name())
                    agent.train_agent(
                        steps=csv.get_number_of_rows() - 1,
                        log_interval=csv.get_number_of_rows() - 1,
                        verbose=2,
                        nb_max_episode_steps=csv.get_number_of_rows() - 1)
                    malware_train_nb -= 1
                    trained_file.write(pcap + "\n")
                else:
                    print("\nPcap gia' utilizzato in passato. Saltato.\n")
            else:
                pass
    else:
        pass
Esempio n. 3
0
    def _parserCSVFile(self):
        # I get first argument passed in command line, in this case,
        # the PATH of CSV file
        csvFile = File(self._file)

        # If file does not exist, then print a message warning the-
        # user and stop running program
        if csvFile.exists() == False:
            print("File not found: %s", csvFile._file)
            sys.exit(os.EX_SOFTWARE)

        # Parser CSV File
        self._log.logger.info('Parser CSV File')
        return CSV(csvFile._file)
Esempio n. 4
0
    def __init__(self, depth, sampleSize, keyword):
        self.depth = depth
        self.contents = []
        self.crawlContents = []
        self.titles = []
        self.linksCrawled = []
        self.datecreated = {}
        self.keyword = keyword
        self.sampleSize = sampleSize
        self.voidedTitles = ["BBC Homepage", "Sign in"]
        self.results = {}

        self.knownlinks = [".mp4"]
        today = str(date.today())
        self.csvFile = CSV(keyword + "," + today + ".csv",
                           ["Word", "Tag", "Weight", "Link", "DateCreated"])
        def legitimate_features():
            folder_name = "Pcaps_Legitimate"
            flow_type = "legitimate"
            if (self.featuresCalc.get_flow_type() == flow_type):
                pass
            else:
                self.featuresCalc.set_flow_type(flow_type)
            for filter in self.filters:
                filter.set_ip_whitelist_filter([])
            for pcap in glob.glob(folder_name + "/" + "*.pcap"):
                if (self.single_csv):
                    csv = self.csv
                else:
                    pcap_name = pcap.split("/")
                    pcap_name = pcap_name[len(pcap_name) - 1].replace(
                        ".pcap", "")
                    csv = CSV(file_name=pcap_name,
                              folder_name="Legitimate_Features")
                    csv.create_empty_csv()
                    csv.add_row(self.featuresCalc.get_features_name())
                array_of_pkts = []
                filter_res = []
                print("\nCalculation of features " + pcap + "\n")
                try:
                    pkts = rdpcap(pcap)
                except:
                    sys.exit()

                for pkt in pkts:
                    for filter in self.filters:
                        if (filter.check_packet_filter(pkt)):
                            filter_res.append(True)
                        else:
                            filter_res.append(False)
                    if (True in filter_res):
                        array_of_pkts.append(pkt)
                    if (len(array_of_pkts) >=
                            self.featuresCalc.get_min_window_size()):
                        features = self.featuresCalc.compute_features(
                            array_of_pkts)
                        csv.add_row(features)
                        array_of_pkts.clear()
                    filter_res.clear()
Esempio n. 6
0
def legitimate_train(line):
    global legitimate_train_nb
    if (check_if_already_trained(line) is False):
        flow_type = "legitimate"
        filter_1.set_ip_whitelist_filter([])
        filter_2.set_ip_whitelist_filter([])
        filter_3.set_ip_whitelist_filter([])
        featuresCalc = FeaturesCalc(flow_type=flow_type, min_window_size=5)
        csv = CSV(file_name="features_" + flow_type, folder_name="Features")
        csv.create_empty_csv()
        csv.add_row(featuresCalc.get_features_name())
        argument = {
            "features_calc": featuresCalc,
            "packets": [],
            'filter': [filter_1, filter_2, filter_3],
            'csv_obj': csv
        }
        sniffer = Sniffer(iface_sniffer,
                          callback_prn=callback_sniffer,
                          callback_prn_kwargs=argument)
        sniffer.start()
        while (sniffer.get_started_flag() is False):
            pass
        try:
            sender = Sender(iface_sender,
                            fast=False,
                            verbose=False,
                            time_to_wait=10)
            sender.send(lg.get_folder_name() + "/" + line)
            sniffer.stop()
        except Exception as e:
            print(e)
        csv.close_csv()
        env.set_csv(csv.get_folder_name() + "/" + csv.get_current_file_name())
        agent.train_agent(steps=csv.get_number_of_rows() - 1,
                          log_interval=csv.get_number_of_rows() - 1,
                          verbose=2,
                          nb_max_episode_steps=csv.get_number_of_rows() - 1)
        legitimate_train_nb -= 1
        trained_file.write(line + "\n")
    else:
        print("\nPcap gia' utilizzato in passato. Saltato.\n")
Esempio n. 7
0
    def __init__(self,split):
#       self.coco=MSCOCO(split)
        self.map_path = 'map.csv'
        self.csv_path = 'detections.csv'
        self.coco=CSV(self.csv_path, self.map_path)
        self.data_rng   = cfg.data_rng
        self.num_image  = len(self.coco.get_all_img())
        self.categories   = cfg.categories
        self.input_size   = cfg.input_size
        self.output_size  = cfg.output_sizes[0]

        self.border        = cfg.border
        #self.lighting      = cfg.lighting
        self.rand_crop     = cfg.rand_crop
        print(self.rand_crop)
        self.rand_color    = cfg.rand_color
        self.rand_scales   = cfg.rand_scales
        self.gaussian_bump = cfg.gaussian_bump
        self.gaussian_iou  = cfg.gaussian_iou
        self.gaussian_rad  = cfg.gaussian_radius
 def malware_features():
     folder_name = "Pcaps_Malware"
     flow_type = "malware"
     if (self.featuresCalc.get_flow_type() == flow_type):
         pass
     else:
         self.featuresCalc.set_flow_type(flow_type)
     for pcap in glob.glob(folder_name + "/" + "*.pcap"):
         if (self.single_csv):
             csv = self.csv
         else:
             pcap_name = pcap.split("/")
             pcap_name = pcap_name[len(pcap_name) - 1].replace(
                 ".pcap", "")
             csv = CSV(file_name=pcap_name,
                       folder_name="Malware_Features")
             csv.create_empty_csv()
             csv.add_row(self.featuresCalc.get_features_name())
         array_of_pkts = []
         print("\nCalcolo features di " + pcap + "\n")
         attacker = AttackerCalc(pcap=pcap)
         ip_to_consider = attacker.compute_attacker()
         for filter in self.filters:
             filter.set_ip_whitelist_filter(ip_to_consider)
         pkts = rdpcap(pcap)
         filter_res = []
         for pkt in pkts:
             for filter in self.filters:
                 if (filter.check_packet_filter(pkt)):
                     filter_res.append(True)
                 else:
                     filter_res.append(False)
             if (True in filter_res):
                 array_of_pkts.append(pkt)
             if (len(array_of_pkts) >=
                     self.featuresCalc.get_min_window_size()):
                 features = self.featuresCalc.compute_features(
                     array_of_pkts)
                 csv.add_row(features)
                 array_of_pkts.clear()
             filter_res.clear()
Esempio n. 9
0
def start():

    csv = CSV(args)
    database = Database(csv.get_schema(), csv.get_table_name())
    if not args.file:
        print('Please include a filename using the -f flag')
    try:
        print('Inserting Entries')
        if args.copy:
            database.copy_csv()
        else:
            for entry in csv.get_entries():
                database.insert(entry)
            print('\033[92mDone Inserting\033[0m')

    except (Exception) as e:
        print(e)
        print('Failed to Insert Data')
    # When Finished, Disconnect
    finally:
        print('done')
        database.close_connection()
Esempio n. 10
0
    def _sbu_report(self):
        """Compute the surface areas and report them to a .csv file."""
        # WARNING - this assumes that SBUs with the same name but in
        # different topologies are the same, and will take the last instance

        met_sbus = {}
        org_sbus = {}
        for sbu in self.sbu_pool:
            if sbu.is_metal:
                met_sbus[sbu.name] = sbu
            else:
                org_sbus[sbu.name] = sbu
        filename = os.path.join(self.options.job_dir,
                                self.options.jobname + ".SBU_report.csv")
        report = CSV(name=filename)
        report.set_headings("sbu_id")
        if self.options.calc_sbu_surface_area:
            report.set_headings("surface_area")
        if self.options.calc_max_sbu_span:
            report.set_headings("sbu_span")
        # metal sbus first.
        for name, sbu in met_sbus.items():
            info("Computing data for %s" % name)
            report.add_data(**{"sbu_id.1": sbu.identifier})
            if self.options.calc_sbu_surface_area:
                report.add_data(**{"surface_area.1": sbu.surface_area})
            if self.options.calc_max_sbu_span:
                report.add_data(**{"sbu_span.1": sbu.max_span})

        # list organic SBUs second.
        for name, sbu in org_sbus.items():
            info("Computing data for %s" % name)
            report.add_data(**{"sbu_id.1": sbu.identifier})
            if self.options.calc_sbu_surface_area:
                report.add_data(**{"surface_area.1": sbu.surface_area})
            if self.options.calc_max_sbu_span:
                report.add_data(**{"sbu_span.1": sbu.max_span})
        report.write()
Esempio n. 11
0
def tokenizing(csv_import_path, csv_export_path):
    csv_obj = CSV(csv_import_path)
    csv_data = csv_obj.get_data()

    sentence_arr = []
    for row in csv_data:
        for cell in row:
            sentence_arr.append(cell)

    tokenizer = MeCabTokenizer(tagger='-Ochasen')
    output_arr = []
    stop_words = ['。', '、', '・']
    for sentence in sentence_arr:
        tokens = tokenizer.parse_to_node(sentence)
        surface = []
        while tokens:
            if tokens.surface and tokens.surface not in stop_words:
                surface.append(tokens.surface)
            tokens = tokens.next
        if len(surface) > 0:
            output_arr.append([sentence, " ".join(surface)])

    csv_obj.export(csv_export_path, output_arr)
Esempio n. 12
0
    subpath = os.path.join(args_opt.rootDir,'output',re.sub(r'_epoch_size_[0-9.]*','',key))
    if os.path.exists(subpath):
        import shutil
        # if exited, remove it!
        shutil.rmtree(subpath)
    os.makedirs(subpath,exist_ok=True)
    strategy_path = os.path.join(subpath,'strategy')

    csv_abspath = re.sub(r'_epoch_size_[0-9.]*_device_num_\d', '-log.csv', key)
    csv_path = os.path.join(subpath,csv_abspath)
    with open(strategy_path,'w') as fp:
        for item in value['strategy']:
            fp.write(item+'\n')
    if 'step' in value['performance'].keys() and\
        'speed' in value['performance'].keys() and\
        'throughput' in value['performance'].keys() and\
        'loss' in value['performance'].keys():
        csvfile = CSV(
                        path=csv_path,
                        columns=['step','step_cost_time(ms)','samples/second','loss'],
                        values=[
                            value['performance']['step'],
                            value['performance']['speed'],
                            value['performance']['throughput'],
                            value['performance']['loss'],
                        ])
        csvfile.dump()

# print(blocks[1])

Esempio n. 13
0
                running = False
                break


# Main()
if __name__ == '__main__':
    '''
    Main iteration menu is given to the users the the options to add an entry,
    look up and old entry, or exit the program. Looking up an old entry yields even
    more options on how to look up the entry.
    '''
    # Run set to True, create other class instances
    run = True
    error = Error()
    entry = Entry()
    csv = CSV()
    # While to correct for mistakes
    while run:
        # Print work log menu with options, check for errors and clear screen
        print("Welcome to the work log! Options are listed below!")
        print(
            "1: Add new entry\n2: Lookup previous entry\n3: Exit the program")
        main_choice = error.error(1, 4)
        clear_screen()
        # Choice is to add Entry, use entry.add() to add
        if main_choice == 1:
            entry.add()
            clear_screen()
        # Choice is to look up Entry
        elif main_choice == 2:
            # Print menu of choices to look up by, check for errors
Esempio n. 14
0
File: main.py Progetto: MaxAsif/BTP
"""
Created on Mon Feb 10 22:26:14 2020

@author: Asif
"""

from GCM import GCM
from formats import FORMAT
from features import FEATURE
from CSV import CSV
import numpy as np
import os

for filename in os.listdir(os.getcwd()+"\\raw_data"):
    #print(filename)
    csv = CSV(filename)
    csv.generate_csv()

for filename in os.listdir(os.getcwd()+"\\csv"):
    #print(filename)
    frmt = FORMAT(filename)
    frmt.format()

ctr = 0
data = np.array([[]])
for filename in os.listdir(os.getcwd()+"\\graph"):
    ctr = ctr + 1
    print(filename)
    feature = FEATURE(filename)
    dt = feature.generate_feature()
    print(dt.shape)
Esempio n. 15
0
from CSV import CSV

# load data in from csv file (from https://catalog.data.gov/dataset/local-weather-archive)
weather_data = CSV().DictRead("rdu-weather-history.csv",delimiter=";")

# print headers

print(weather_data[0].keys())

# write data to a new file
CSV().DictWrite(weather_data,"weather_data.csv")


# convert the csv file to an excel file
CSV().CsvToXlsx("weather_data.csv")


# test converting the excel file data back to csv
CSV().XlsxToCsv("Workbook.xlsx",sheet="Sheet")
Esempio n. 16
0
s.z,s.zErr_noqso,cModelMag_r,p.cModelMagErr_r,p.cModelMag_u,p.cModelMagErr_u,p.petroRad_r,p.petroRadErr_r,p.modelMag_r,p.modelMag_u, p.petroMag_r ,p.petroMagErr_r,  p.petroMagErr_u, p.petroMag_u, p.petroR90_r
FROM PhotoObj AS p
   JOIN SpecObj AS s ON s.bestobjid = p.objid
WHERE 
   (s.BOSS_TARGET1 & 1) != 0 and s.bossprimary = 1 and ZWARNING_NOQSO = 0 and s.z between 0.002 and 0.5 and s.plateID >= 10324

''' # redoanlaod with new query
from CSV import CSV
from calc_kcor import calc_kcor

from astropy.cosmology import Planck15 as cosmo

import numpy as np
import matplotlib.pyplot as pl

SDSS = CSV('data_all_2')
SDSS_data = SDSS.read_all()
# -1 because of column titles
print(SDSS.row_count_data)
calculated = np.zeros((SDSS.row_count_data, 3))  #add area
print('read data')

for index, row in enumerate(SDSS_data):

    if index % 1000 == 0:
        pass
    z, zErr_noqso, cModelMag_r, cModelMagErr_r, cModelMag_u, cModelMagErr_u, petroRad_r, petroRadErr_r, modelMag_r, modelMag_u, petroMag_r, petroMagErr_r, petroMagErr_u, petroMag_u, Pr90 = row

    dist = cosmo.luminosity_distance(z).value  # in Mpc

    k_corr = calc_kcor('r', z, 'u - r', (petroMag_u - petroMag_r))
Esempio n. 17
0
}

# Grabbing most used Pokemon online ranked 1 - 25 according to Pikalytics
most_popular = format_data(soup.find(id='min_list'), num_spaces=2)
for i in range(int(len(most_popular)/2)):
    temp_list = [most_popular[i][0], most_popular[i + 1][0]]
    most_popular[i] = temp_list
    del (most_popular[i + 1])

# Path needs to be specified by the user
path = ''

# For each Pokemon in the top 25, creates a CSV file for common moves, teammates, items, abilities, and ev spreads
# Checks to make sure the directories exist, and create new ones if they don't
for i in range(len(most_popular)):
    directory = path + most_popular[i][0].title()
    for key, value in id_dict.items():
        URL_pikalytics = "https://pikalytics.com/pokedex/ss/" + most_popular[i][0].lower()
        pikalytics_page = requests.get(URL_pikalytics)
        soup = BeautifulSoup(pikalytics_page.text, 'html.parser')
        data = format_data(soup.find(id=key), value[0], value[1], value[2], value[3])
        filepath = directory + '/' + most_popular[i][0] + '_' + value[4][0] + '.csv'
        file = CSV(data, filepath)
        if not os.path.exists(directory):
            os.makedirs(directory)
        else:
            if not os.path.exists(filepath):
                file.create_file(value[4][1:len(value[4])])
        file.csv_write()

Esempio n. 18
0
    def _build_structures(self):
        """Pass the sbu combinations to a MOF building algorithm."""
        run = Generate(self.options, self.sbu_pool)
        # generate the combinations of SBUs to build
        if self.options.sbu_combinations:
            combinations = run.combinations_from_options()
        else:
            # remove SBUs if not listed in options.organic_sbus or options.metal_sbus
            combinations = run.generate_sbu_combinations()
        csvinfo = CSV(name='%s_info' % (self.options.jobname))
        csvinfo.set_headings('topology', 'sbus', 'edge_count', 'time',
                             'space_group', 'net_charge')
        csvinfo.set_headings('edge_length_err', 'edge_length_std',
                             'edge_angle_err', 'edge_angle_std')
        self.options.csv = csvinfo
        # generate the MOFs.
        if self.options.count_edges_along_lattice_dirs:
            lattfile = open("edge_counts.csv", "w")
            lattfile.writelines("topology,Na,Nb,Nc\n")
        inittime = time()
        for combo in combinations:
            node_degree = [i.degree for i in set(combo)]
            node_lin = [i.linear for i in set(combo)]
            degree = sorted(
                [j for i, j in zip(node_lin, node_degree) if not i])
            # find degrees of the sbus in the combo
            if not self._topologies:
                warning("No topologies found! Exiting.")
                Terminate()
            debug("Trying " + self.combo_str(combo))
            for top, graph in self._topologies.items():
                if self.options.use_builds:
                    try:
                        build = self._stored_builds[top]
                    except:
                        build = Build(self.options)
                        build.net = (top, graph,
                                     self._topologies.voltages[top])
                else:
                    build = Build(self.options)
                    build.net = (top, graph, self._topologies.voltages[top])
                build.sbus = list(set(combo))
                #build.get_automorphisms()
                if self.options.count_edges_along_lattice_dirs:
                    info(
                        "Computing Edge lengths along each lattice direction for %s"
                        % (top))
                    n = Net(graph)
                    n.voltage = self._topologies.voltages[top]
                    n.simple_cycle_basis()
                    n.get_lattice_basis()
                    n.get_cocycle_basis()
                    edge_str = n.print_edge_count()
                    lattfile.writelines("%s,%s\n" % (top, edge_str))
                elif self.options.show_barycentric_net_only:
                    info("Preparing barycentric embedding of %s" % (top))
                    #print("CHECK", top, build.net.graph.number_of_selfloops())
                    self._check_barycentric_embedding(
                        graph, self._topologies.voltages[top])
                else:
                    if build.check_net:
                        # check node incidence
                        if build.met_met_bonds and run.linear_sbus_exist and not run.linear_in_combo(
                                combo):
                            # add linear organics
                            debug(
                                "Metal-type nodes attached to metal-type nodes. "
                                +
                                "Attempting to insert 2-c organic SBUs between these nodes."
                            )
                            for comb in run.yield_linear_org_sbu(combo):
                                if self.options.use_builds:
                                    try:
                                        build = self._stored_builds[top]
                                    except:
                                        build = Build(self.options)
                                else:
                                    build = Build(self.options)
                                    build.sbus = list(set(comb))
                                    build.net = (
                                        top, graph,
                                        self._topologies.voltages[top])
                                self.embed_sbu_combo(top, comb, build)
                        elif build.met_met_bonds and run.linear_in_combo(
                                combo):
                            self.embed_sbu_combo(top, combo, build)

                        elif build.met_met_bonds and not run.linear_sbus_exist:
                            debug(
                                "Metal-type nodes are attached to metal-type nodes. "
                                +
                                "No linear SBUs exist in database, so the structure "
                                + "will have metal - metal SBUs joined")
                            self.embed_sbu_combo(top, combo, build)
                        elif not build.met_met_bonds:
                            self.embed_sbu_combo(top, combo, build)

                    else:
                        debug("Net %s does not support the same" % (top) +
                              " connectivity offered by the SBUs")

        if self.options.count_edges_along_lattice_dirs:
            lattfile.close()
        finaltime = time() - inittime
        info("Topcryst completed after %f seconds" % finaltime)
        if self.options.get_run_info:
            info("Writing run information to %s" % self.options.csv.filename)
            self.options.csv.write()
        if self.options.store_net and self._stored_nets:
            info("Writing all nets to nets_%s.pkl" % self.options.jobname)
            f = open("nets_%s.pkl" % self.options.jobname, 'wb')
            p = pickle.dump(self._stored_nets, f)
            f.close()
        Terminate()
Esempio n. 19
0
    def _build_structures_from_top(self):
        if not self._topologies:
            warning("No topologies found!")
            Terminate()

        csvinfo = CSV(name='%s_info' % (self.options.jobname))
        csvinfo.set_headings('topology', 'sbus', 'edge_count', 'time',
                             'space_group')
        csvinfo.set_headings('edge_length_err', 'edge_length_std',
                             'edge_angle_err', 'edge_angle_std')
        self.options.csv = csvinfo
        run = Generate(self.options, self.sbu_pool)
        inittime = time()
        if self.options.count_edges_along_lattice_dirs:
            lattfile = open("edge_counts.csv", "w")
            lattfile.writelines("topology,Na,Nb,Nc\n")
        for top, graph in self._topologies.items():
            if self.options.count_edges_along_lattice_dirs:
                info(
                    "Computing Edge lengths along each lattice direction for %s"
                    % (top))
                n = Net(graph)
                n.voltage = self._topologies.voltages[top]
                n.simple_cycle_basis()
                n.get_lattice_basis()
                n.get_cocycle_basis()
                edge_str = n.print_edge_count()
                lattfile.writelines("%s,%s" % (top, edge_str))
            elif self.options.show_barycentric_net_only:
                info("Preparing barycentric embedding of %s" % (top))
                self._check_barycentric_embedding(
                    graph, self._topologies.voltages[top])
            else:

                build = Build(self.options)
                build.net = (top, graph, self._topologies.voltages[top])
                if self.options.sbu_combinations:
                    combinations = run.combinations_from_options()
                else:
                    combinations = run.generate_sbu_combinations(
                        incidence=build.net_degrees())

                if not list(combinations):
                    debug("Net %s does not support the same" % (top) +
                          " connectivity offered by the SBUs")
                for combo in combinations:
                    build.sbus = list(set(combo))
                    # check node incidence
                    if build.met_met_bonds and run.linear_sbus_exist:
                        # add linear organics
                        debug(
                            "Metal-type nodes attached to metal-type nodes. " +
                            "Attempting to insert 2-c organic SBUs between these nodes."
                        )
                        for comb in run.yield_linear_org_sbu(combo):
                            build.sbus = list(set(comb))
                            self.embed_sbu_combo(top, comb, build)
                    elif build.met_met_bonds and not run.linear_sbus_exist:
                        debug(
                            "Metal-type nodes are attached to metal-type nodes. "
                            +
                            "No linear SBUs exist in database, so the structure "
                            + "will have metal - metal SBUs joined")
                        self.embed_sbu_combo(top, combo, build)
                    else:
                        self.embed_sbu_combo(top, combo, build)

        if self.options.count_edges_along_lattice_dirs:
            lattfile.close()
        finaltime = time() - inittime
        info("Topcryst completed after %f seconds" % finaltime)
        Terminate()
Esempio n. 20
0
 def __init__(self):
     self.my_csv = CSV()
     self.error = Error()
Esempio n. 21
0
    def __init__(self):

        self.csv_operator = CSV()