示例#1
0
def constraint_test():
    """ Verify constraint checking methods."""
    import itertools, sys

    show_analysis = False
    #Generated via grammar
    gr = grammar.Grammar('grammars/test_constraints.bnf')
    inputs = ([1 for _ in range(100)], [ i%3 for i in range(100)])
    for _input in inputs: 
        output = gr.generate(_input)
        azr = analyser.Analyser('test',output['phenotype'],True)
        try:
            azr.create_graph()
        except ValueError as e:
            print(__name__, "ERROR", _input, e)
            continue
        azr.parse_graph()
        azr.apply_stresses()
        azr.create_slf_file()
        azr.test_slf_file()
        azr.parse_results()
        azr.print_stresses()
        if show_analysis:
            azr.show_analysis()
            
    #Fixed generated
    lengths = (1000, 10000)
    levels = (5, 10)
    for length_idx, level_idx in itertools.permutations([0,1]):
        try:
            GRAPH = constrained_offset_graph(lengths[length_idx],
                                             levels[length_idx])
        except ValueError as e:
            print(__name__, "ERROR", lengths[length_idx], levels[length_idx], e)
            continue
        GRAPH.save_graph("pylon")
        print "nodes:", GRAPH.number_of_nodes()
        print "edges", GRAPH.number_of_edges()
    #will it blend?
        azr = analyser.Analyser('test',"moo",True)
        azr.my_graph = GRAPH
        azr.parse_graph()
        azr.apply_stresses()
        azr.create_slf_file()
        azr.test_slf_file()
        azr.parse_results()
        azr.print_stresses()
        if show_analysis:
            azr.show_analysis()
示例#2
0
 def button_handler(self, name):
     """records selected individuals and assigns fitness values"""
     self.buttons[name].focus_force()
     self.last_button = name
     current_indiv = None
     for indiv in self.ge.individuals:
         if indiv.uid == int(name):
             print "found:", name
             current_indiv = indiv
     if self.buttons[name]['background'] == "green":
         print "unassigning fitness to indiv", name
         for indiv in self.ge.individuals:
             if indiv.uid == int(self.last_button):
                 analyser = AZR.Analyser(indiv.uid,indiv.phenotype,False)
                 current_indiv.fitness = analyser.test_mesh()
         print current_indiv.fitness
         self.buttons[name]['background'] = self.defCol
         self.buttons[name]['relief'] = "raised"
     else:
         print "assigning good fitness to indiv:", name
         current_indiv.fitness = [0, 0, 0]
         self.buttons[name]['background'] = "green"
         self.buttons[name]['relief'] = "sunken"
         self.save_indiv("best" + str(self.chosen))
         self.chosen += 1
示例#3
0
def print_stats(generation, individuals): 
    global TIME
    def ave(values):
        return float(sum(values))/len(values)
    def std(values, ave):
        return math.sqrt(float(sum((value-ave)**2 for value in values))/len(values))

    newTime = time.time()
    genTime = newTime - TIME
    TIME = newTime
    ave_weight= ave([i.weightTotal for i in individuals if i.phenotype is not None])
    std_weight= std([i.weightTotal for i in individuals if i.phenotype is not None], ave_weight)
    ave_fit = ave([i.fitness[0] for i in individuals if i.phenotype is not None])
    std_fit = std([i.fitness[0] for i in individuals if i.phenotype is not None], ave_fit)
    ave_used_codons = ave([i.used_codons for i in individuals
                           if i.phenotype is not None])
    std_used_codons = std([i.used_codons for i in individuals
                           if i.phenotype is not None], ave_used_codons)
    print("Gen:%d best:%s weight:s:%d ave:%.1f+-%.1f Used:%.1f+-%.1f tt:%.2f Avrg No:%d+-%.1f" % (generation,individuals[0].fitness,individuals[0].weightTotal,ave_fit,std_fit,ave_used_codons,std_used_codons,genTime,ave_weight,std_weight))

    if SAVE_BEST:
        print "saving best individual"
        bestMesh = AZR.Analyser(individuals[0].phenotype)
        filename = 'xxx.'+str(generation)
        bestMesh.create_mesh(filename)
示例#4
0
def print_stats(generation, individuals):
    def ave(values):
        return float(sum(values)) / len(values)

    def std(values, ave):
        return math.sqrt(
            float(sum((value - ave)**2 for value in values)) / len(values))

    ave_fitness_b = ave(
        [i.fitness_b for i in individuals if i.phenotype is not None])
    std_fitness_b = std(
        [i.fitness_b for i in individuals if i.phenotype is not None],
        ave_fitness_b)
    ave_fit = ave(
        [i.fitness[1] for i in individuals if i.phenotype is not None])
    std_fit = std(
        [i.fitness[1] for i in individuals if i.phenotype is not None],
        ave_fit)
    ave_used_codons = ave(
        [i.used_codons for i in individuals if i.phenotype is not None])
    std_used_codons = std(
        [i.used_codons for i in individuals if i.phenotype is not None],
        ave_used_codons)
    print(
        "Gen:%d best compliance, deflection (mm) and weight (kg):%s Avg. deflection (mm):%.1f+-%.1f"
        % (generation, individuals[0].fitness, ave_fit, std_fit))

    if SAVE_BEST:
        print "saving best individual"
        best_mesh = AZR.Analyser(0, individuals[0].phenotype)
        filename = './saved/best.' + str(generation)
        best_mesh.create_mesh(filename)
示例#5
0
 def save_dxf(self):
     print "saving individual as DXF:",self.last_button
     for indiv in self.ge.individuals:
         if indiv.uid == int(self.last_button):
             analyser = AZR.Analyser(indiv.uid,indiv.phenotype,False)
             analyser.create_graph()
             analyser.save_dxf(self.ge.generation, name='indiv')
             self.show_msg("saved bridge as DXF "+self.last_button+" in dxf folder")
示例#6
0
def write_mesh(fronts, name):
    counter = 0
    for front in fronts:
        for indiv in front:
            mesh = AZR.Analyser(indiv.uid, str(indiv.phenotype))
            mesh.create_graph()
            filename = FRONT_FOLDER + "/" + name + "." + str(counter)
            mesh.create_mesh(filename)
            counter += 1
示例#7
0
def build_individual(filename, genome, grammar):
    """map genotype and save mesh file"""
    ind = Individual(None)
    generated_values = grammar.generate(genome)
    ind.set_values(generated_values)
    analyser = AZR.Analyser(filename, ind.phenotype, False)
    analyser.create_graph()
    analyser.create_mesh(filename)
    return ind
示例#8
0
def build_individual(filename,genome,grammar):
    ind = Individual(None)
    generatedValues= grammar.generate(genome)
    ind.set_values(generatedValues)
    analyser = AZR.Analyser(filename,ind.phenotype,False)
    print "recreating mesh in folder:",filename
    analyser.create_graph()
    analyser.create_mesh(filename)
    return ind
示例#9
0
def structural_fitness(graph):
    azr = analyser.Analyser('test',"moo",True)
    azr.myGraph=graph
    azr.parse_graph(graph)
    azr.apply_stresses()
    azr.create_slf_file()
    azr.test_slf_file()
    azr.parse_results()
    fitness, weight = azr.calculate_fitness()
    return fitness
示例#10
0
 def save_picture(self, name=None):
     save = True
     if save:
         Ganalyser = analyser.Analyser("test", None, False)
         if name == None:
             filename = self.popFolder + "img%03d" % (graph.nodeCounter)
         else:
             filename = name
         Ganalyser.myGraph = self.copy()
         Ganalyser.parse_graph(Ganalyser.myGraph)
         Ganalyser.create_mesh(filename)
         graph.nodeCounter += 1
示例#11
0
 def __init__(self,uid,program):
     self.uid = uid
     self.program = program
     self.analyser = AZR.Analyser(uid,program)
     self.bridge_weight=0
     self.fixed_list=[]
     self.load_nodes=[]
     self.nodeselfloads=[]
     self.load_elems=[]
     self.beams=[]
     self.stress_log=[]
     self.iterations = 3
示例#12
0
def mutate_individual(ind,grammar,mutOp):
#    before = ind.derivation_tree.textual_tree_view().splitlines()
    if mutOp == "int":
        ind = int_flip_mutation(ind,False)
    elif mutOp == "nodal":
        ind = nodal_mutation(ind,False)
    elif mutOp == "struct":
        ind = struct_mutation(ind,False)
    generatedValues= grammar.generate(ind.genome)
    ind.set_values(generatedValues)
#    after = ind.derivation_tree.textual_tree_view().splitlines()
#    d = difflib.Differ()
#    differences  = list(d.compare(before,after))
#    for line in differences:
#        if line.startswith('-')or line.startswith('+'):
#            print line
    analyser = AZR.Analyser(ind.UID,ind.phenotype,True)
    analyser.create_graph()
    return ind
示例#13
0
    def create_annotations(self, chapters=0, caching=CachingType.NONE):
        """Analyse the text and create the annotations.

        Using the analyser, generate the annotations for all the chapters.

        Args:
            chapters (int, optional, default=0): How many chapters to analyse.
                                                0 means analyse all.
            caching (:obj:`CachingType, optional, default=0): What caching to
                                                use. Can be CachingType.NONE,
                                                CachingType.ANNOTATIONS,
                                                CachingType.HTML,
                                                CachingType.HTML_ANNOTATIONS
        """
        if chapters == 0:
            text = ' '.join(self.chapters)
        else:
            text = ' '.join(self.chapters[:chapters])
        ana = analyser.Analyser(text)
        self.annotations = ana.generate_annotations(caching)
示例#14
0
def main():
    """ Method for testing pylon creating functions."""
    GRAPH = lambda_graph()
    GRAPH.save_graph("pylon")
    meshName = "pylon.mesh"
    cmd = "./population/linuxShow " + meshName
    process = subprocess.Popen(cmd,
                               shell=True,
                               stdout=subprocess.PIPE,
                               stdin=subprocess.PIPE)
    process.communicate()
    print "nodes:", GRAPH.number_of_nodes()
    print "edges", GRAPH.number_of_edges()

    #will it blend?
    azr = analyser.Analyser('test', "moo", True)
    azr.my_graph = GRAPH
    azr.assign_load_case()
    azr.parse_graph()
    azr.apply_stresses()
    azr.create_slf_file()
    azr.test_slf_file()
    azr.parse_results()
    azr.show_analysis()
示例#15
0
# -*- coding: utf-8 -*-
"""
Created on Fri Jul  7 00:32:12 2017

@author: Appu B
"""
import twitteragent
import keys
from tweetparser import Parser
import analyser

agent = twitteragent.Agent(keys.ckey, keys.csecret)
agent.set_keys()
tweets = agent.search("linux", 300)
processed_tweets = Parser(tweets).parse()
ar = analyser.Analyser(processed_tweets)
reach = ar.calc_reach()
x = str(max((processed_tweets['created_at'])))
y = str(min((processed_tweets['created_at'])))
print("Aanalyzed tweets from %s to %s :" % (y, x))
print("The number of accounts reached :", reach)
print("***************************************")
populartweets = ar.popular_tweets()
print("The most popular tweets")
print(populartweets)
mostRT = ar.most_RT()
print("most rt")
print(mostRT)
mostFV = ar.most_favorited()
print("most favorited")
print(mostFV)
示例#16
0
 def __call__(self, unique_id, program):
     analyser = AZR.Analyser(unique_id, program)
     fitness_a, fitness_b, fitness_c = analyser.test_mesh()
     return fitness_a, fitness_b, fitness_c
示例#17
0
    new_datasets = analyser.AnalyserUtils.filter_coords(
        data, True, True, False)
    for entry in new_datasets:
        if entry['title_short'] in datasets:
            print("Dataset already in Plotlist")
            continue
        lst_plots.append(entry['title_short'])
        datasets[entry['title_short']] = entry

    return datasets


plot_types = ['Hexbin', 'Histogram - Blur', 'Histogram - Clear']
plot_type = 'Histogram - Clear'

replay_analyser = analyser.Analyser(replay)
if 'Wasteland' in replay_analyser.replay.header['MapName']:
    arena = plotter.WASTELAND
    overlays = [plotter.OUTLINE, plotter.FIELDLINE]
elif 'labs_utopia_p' in replay_analyser.replay.header['MapName']:
    arena = plotter.UTOPIA_RETRO
    overlays = [plotter.OUTLINE]
else:
    arena = plotter.STANDARD
    overlays = [plotter.OUTLINE, plotter.FIELDLINE, plotter.BOOST]

hexbin = False
interpolate = False

# 0.1 - 5
scale = 4.5
示例#18
0
    print("Sending API call - please wait approx 20 seconds")

    scan_results = scanner.run_pairs_scan()

    pp.pprint(scan_results)

elif user_choice == 3:

    print("Please enter the first ticker e.g LTC-PERP")

    ticker_1 = str(input())

    print("Please enter the second ticker e.g DOGE-PERP")

    ticker_2 = str(input())

    analyser = analyser.Analyser(ticker_1, ticker_2)
    results = analyser.show_scores()

    print(" ")
    print("Trace Statistics")
    print(f"VS: {results[0]}")
    print(f"Crit-90%: {results[1]}")
    print(f"Crit-95%: {results[2]}")
    print(f"Crit-99%: {results[3]}")
    print(f"EV: {results[4]}")
    print(f"Crit-90%: {results[5]}")
    print(f"Crit-95%: {results[6]}")
    print(f"Crit-99%: {results[7]}")
示例#19
0
 def __call__(self,UID, program):
     analyser = AZR.Analyser(UID,program)
     analyser.test_mesh()
     fitness = self.calculate_stress(analyser)
     weight = self.calculate_weight(analyser)
     return fitness, weight
示例#20
0
    pylon_graph.connect_nodes(all_brace_ids)
    pylon_graph.node[all_brace_ids[-1]]['label'] = 'ground'
    rotated_graph = pylon_graph.copy_and_rotate_around_xy_plane(
        pylon_graph, 180)
    mirror_graph = pylon_graph.copy_and_offset_with_mirror(
        rotated_graph, [0, 0, 0], True)
    full_graph = pylon_graph.copy_and_rotate_around_xy_plane(mirror_graph, 90)
    final_graph = pylon_graph.sanitise_pylon(full_graph, width[1])
    pylon_graph.replace_graph(final_graph)
    return pylon_graph


testGraph = mutant()
testGraph.save_graph("test")

analyser = analyser.Analyser('test', "moo", True)
analyser.my_graph = testGraph
analyser.parse_graph()

#using medit to show the graph
meshName = "test.mesh"
cmd = "./population/linuxShow " + meshName
process = subprocess.Popen(cmd,
                           shell=True,
                           stdout=subprocess.PIPE,
                           stdin=subprocess.PIPE)
process.communicate()

#using slffea to show the mesh
#analyser.apply_stresses()
#analyser.create_slf_file()
示例#21
0
def run_analysis(indiv):
    analyser = AZR.Analyser(indiv.UID,indiv.phenotype)
    analyser.show_mesh()
示例#22
0
    def apply_annotations(self, text):
        """Apply the annotations on the words.

        Args:
            text (str): The text to annotate.

        Returns:
            The annotated text.
        """
        # split text into individual words
        words = text.split(' ')
        # Get just the annotation words
        words_to_annotate = [ann.word for ann in self.annotations]
        ana = analyser.Analyser(None)
        # Deal with multiple words proper nouns
        proposed_ann_word = words[0]
        in_word = False
        number_of_words = 1
        for index, current_word in enumerate(words):
            # We need to remove extra stuff, like when looked for annotations
            current_word = ana.preprocess_input(current_word)
            # Test next word
            next_word = "~!~"
            if index + 1 < len(words):
                next_word = ana.preprocess_input(words[index + 1])

            if not in_word:
                if (current_word + " " + next_word) in words_to_annotate:
                    proposed_ann_word = current_word + " " + next_word
                    number_of_words = 2
                    in_word = True
                    continue
                else:
                    proposed_ann_word = current_word
                    number_of_words = 1
            else:
                if (proposed_ann_word + " " + next_word) in words_to_annotate:
                    proposed_ann_word += " " + next_word
                    number_of_words += 1
                    continue
                else:
                    in_word = False

            # Check if the word or its lower case version is to be annotated
            if proposed_ann_word in words_to_annotate:
                # Get the annotation tag
                ann = self.annotations[words_to_annotate.index(
                    proposed_ann_word)]
                # We didn't find the meaning
                if ann.data is None:
                    continue
                tag = enclose_in_html_tag(
                    'a', str(proposed_ann_word), {
                        'class':
                        'annotation',
                        'data-content':
                        '' + cgi.escape(ann.data, True),
                        'title':
                        "<a target='_blank' " + "href='" + ann.url +
                        "'>More</a>"
                    })
                # If we have image
                if ann.image_url is not None and ann.image_url != '':
                    img_tag = enclose_in_html_tag('img', '', {
                        'class': 'ann-img',
                        'src': ann.image_url
                    }, False)
                    img_tag += enclose_in_html_tag('figcaption',
                                                   str(proposed_ann_word))
                    img_tag = enclose_in_html_tag('figure', img_tag,
                                                  {'class': 'ann-figure'})
                    tag += img_tag
                # Replace the processed word found with a tag with the
                # annotation
                if number_of_words == 1:
                    words[index] = re.sub(proposed_ann_word, tag, current_word)
                else:
                    # Delete words
                    words[index - number_of_words + 1:index + 1] = []
                    # Replace with tag
                    words.insert(index - number_of_words + 1, tag)
                # Remove annotation from list
                if ann in self.annotations:
                    self.annotations.remove(ann)
                    words_to_annotate.remove(proposed_ann_word)
        # Rebuild the original text
        text = ' '.join(words)
        return text
示例#23
0
def main():
    global loader
    STX = 2

    # 엑셀 파일에 있는 금칙어를 로드,
    loader = dataloader.DataLoader()
    loader.load_data('/Users/andrew/Downloads/pWords.xlsx')

    # 형태소분석기를 로드
    tokenizer = analyser.Analyser()

    # TCP/IP 구성 정보를 로드
    global tcp_config
    global conn
    global interval

    try:
        with open('tcp_config.json', encoding='utf-8') as json_file:
            tcp_config = json.load(json_file)
    except FileNotFoundError:
        print("No File exists...")
        exit('socket configuration exception')

    host = tcp_config['hostname']
    port = tcp_config['port']
    interval = tcp_config['interval']

    error_cnt = 0
    while True:
        conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        try:
            conn.connect((host, port))
        except ConnectionRefusedError:
            conn.close()
            time.sleep(10)
            continue

        while True:
            try:
                content = conn.recv(200)
            except socket.error as e:
                print("error while receiving :: " + str(e), e.errno)
                if e.errno == errno.EPIPE:
                    conn.close()
                    break
                else:
                    raise
                    # exit("terminating")
            except:
                print("error 2 while receiving :: ")
                print(errno)
                break

            line_time = timeutil.TimeElapsed()

            if len(content) == 0:
                error_cnt += 1
                if error_cnt > 3:
                    error_cnt = 0
                    break

            if content[0] != STX:
                print('wrong data from the server..')
                continue

            actual_data = content[2:]
            print('[' + actual_data.decode(encoding='cp949') + ']')

            # 일반로직에 따라수신한 문장의 단어별 일따라 금지어 존재 검색
            pword_list = check_pword(actual_data.decode(encoding='cp949'))
            if len(pword_list) > 1:
                print(pword_list, len(pword_list))

            # 복합명사 처리를 위해 속기기록을 형태소 분석하여 명사만 가져온다
            nouns_list = tokenizer.get_noun_tokens(
                actual_data.decode(encoding='cp949'))
            for noun in nouns_list:
                print('Noun', noun)

            print("Time spent to analyse line: ", line_time.getelapsed())
            print('------------------------------------')
示例#24
0
def create_meshes(population):
    """assign uid and creating meshes"""
    for idx, indiv in enumerate(population):
        indiv.uid = idx
        analyser = AZR.Analyser(indiv.uid, indiv.phenotype, True)
        analyser.create_graph()
import json, analyser, geoLocation, requests, time
from scapy.layers.dns import DNS

from scapy.layers.inet import TCP, IP

from scapy.all import *

counter = 0

# rdpcap comes from scapy and loads in our pcap file
packets = rdpcap(
    '/home/mrodger4/workspaces/CS491/network-edge-packet-inspector/server/src/example/pcap1.pcap'
)
pcapHelp = analyser.Analyser()
# create a for loop for all packets
for pkt in packets:
    counter += 1
    data = pcapHelp.analyse(pkt)
    json.loads(data)
    print data
    break
# total = len(packets)
# current = 0
# start_time = time.time()
# for pkt in packets:
#     current += 1
#     data = analyser.analyse(pkt)
#     temp = json.loads(data)
#     requests.post('http://localhost:8090/api/pcap/save', json=temp)
#     print "Uploading: %d/%d" % (current,total)
#
示例#26
0
def run_analysis(indiv):
    """create mesh and show it using bmpost"""
    analyser = AZR.Analyser(indiv.uid, indiv.phenotype)
    analyser.show_mesh()
示例#27
0
for line in loadFile:
    if line.startswith("phenotype:"):
        line = line.lstrip("phenotype:")
        phenotype = line
print "writing program to test.py"

footer = open("./footer.txt", 'r')
saveFile = open("./test.py", 'w')
imports = "import analyser, subprocess, graph\nfrom geometry import *\n"
saveFile.write(imports)
saveFile.write(analyser.python_filter(line) + "\n")
for line in footer:
    saveFile.write(line)
saveFile.close

analyser = analyser.Analyser('test', phenotype, True)
analyser.create_graph()
#analyser.parse_graph(analyser.myGraph)

#using medit to show the graph
meshName = "indiv.test.mesh"
cmd = "ffmedit " + meshName
process = subprocess.Popen(cmd,
                           shell=True,
                           stdout=subprocess.PIPE,
                           stdin=subprocess.PIPE)
process.communicate()

#using slffea to show the mesh
analyser.apply_stresses()
analyser.create_slf_file()
示例#28
0
def create_meshes(population):
    #assign UID and creating meshes
    for idx,indiv in enumerate(population):
        indiv.UID = idx
        analyser = AZR.Analyser(indiv.UID,indiv.phenotype,True)
        analyser.create_graph()
示例#29
0
 def __init__(self, db):
     self.analyser = analyser.Analyser(db)
     self.db = db