Пример #1
0
 def test_grakn_session_invalid_keyspace(self):
     inst = grakn.Grakn('localhost:48555')
     with self.assertRaises(TypeError):
         a_session = inst.session(123)
         tx = a_session.transaction(grakn.TxType.READ) # won't fail until opening a transaction
     inst2 = grakn.Grakn('localhost:48555')
     with self.assertRaises(GraknError):
         a_session = inst2.session('')
         tx = a_session.transaction(grakn.TxType.READ) # won't fail until opening a transaction
Пример #2
0
 def __enter__(self, **kwargs):
     if self.credentials:
         self.client = grakn.Grakn(uri=self.uri,
                                   credentials=self.credentials)
     else:
         self.client = grakn.Grakn(uri=self.uri)
     self.session = self.client.session(keyspace=self.keyspace, **kwargs)
     self.tx = self.session.transaction(grakn.TxType.BATCH)
     return self
Пример #3
0
    def test_grakn_init_invalid_uri(self):
        """ Test invalid URI """
        with self.assertRaises(GraknError):
            a_inst = grakn.Grakn('localhost:1000')
            a_session = a_inst.session('testkeyspace')
            a_session.transaction(grakn.TxType.READ)

        with self.assertRaises(GraknError):
            a_inst = grakn.Grakn('localhost:1000')
            with a_inst.session("test") as s:
                with s.transaction(grakn.TxType.READ) as tx:
                    pass
Пример #4
0
def main(keyspace):
    client = grakn.Grakn(uri='localhost:48555')
    with client.session(keyspace=keyspace).transaction(
            grakn.TxType.READ) as graph:
        # Check for gets() function
        # Get address of function to use for next query
        func_names = ['gets', 'cgc_gets']
        func_addrs = []
        for function_name in func_names:
            query1 = 'match $func isa function, has func-name "{}", has asm-address $a; get $a;'.format(
                function_name)
            func_addrs += [
                int(result.value(), 16)
                for result in graph.query(query1).collect_concepts()
            ]

        # If the function is found continue query
        for func_addr in func_addrs:
            # Get all instructions that have function name
            query2 = 'match $x has operation-type "MLIL_CALL_SSA" has asm-address $a; $y isa"MLIL_CONST_PTR"; ($x,$y); $z isa constant, has constant-value {}; ($y,$z); get $a;'.format(
                func_addr)
            result2 = graph.query(query2).collect_concepts()

            # If there are instructions that use the function check the instructions
            for instr in result2:
                ins_addr = instr.value()
                print(
                    "CWE-120: Buffer Copy Without Checking Size of Input at {}"
                    .format(ins_addr))
Пример #5
0
def main(keyspace):
    client = grakn.Grakn(uri='localhost:48555')
    with client.session(keyspace=keyspace).transaction(
            grakn.TxType.READ) as graph:

        # Get address of printf to use for next query
        query1 = 'match $func isa function, has func-name contains "printf", has asm-address $a; offset 0; limit 100; get $a;'
        result1 = [result.map() for result in graph.query(query1)]
        if len(result1) > 0:
            print("Found potential calls at the following addresses:")
            for addr in result1:
                print(addr['a'].value())

        # If printf is found continue query
        for printf_func in result1:
            # Pull any instructions that use printf and don't use a modifier (have var type and not data type)
            func_addr = int(printf_func['a'].value(), 16)
            print("Scanning address {}".format(hex(func_addr)))
            query2 = 'match $x isa instruction, has operation-type "MLIL_CALL_SSA", has asm-address $a; $y isa "MLIL_CONST_PTR"; ($x,$y); $z isa constant, has constant-value {}; ($y,$z); $l isa list, has list-size 1; ($x,$l); $s isa "MLIL_VAR_SSA"; ($l,$s); offset 0; limit 500; get $x, $a;'.format(
                func_addr)
            result2 = [result.map() for result in graph.query(query2)]

            # If there is an instruction that uses printf without modifier, output instruction
            if result2:
                for instr in result2:
                    asm_addr = instr['a'].value()
                    print(
                        "CWE-134: Uncontrolled Format String possible at {} ".
                        format(asm_addr))
Пример #6
0
def make_queries(timetables_dir_path,
                 keyspace,
                 uri=settings.uri,
                 log_file=settings.migration_logs_path +
                 "graql_output_{}.txt".format(dt.datetime.now())):

    pathlib.Path(settings.migration_logs_path).mkdir(exist_ok=True)

    client = grakn.Grakn(uri=uri)

    start_time = dt.datetime.now()
    with client.session(keyspace=keyspace) as session:
        # with session.transaction(grakn.TxType.WRITE) as transaction:
        with open(log_file, "w") as graql_output:

            def query_function(graql_string):
                print(graql_string)
                print("---")
                graql_output.write(graql_string)
                # Send the graql query to the server
                transaction = session.transaction(grakn.TxType.WRITE)
                response = list(transaction.query(graql_string))
                transaction.commit()
                graql_output.write("\n--response:\n" + str(response))
                graql_output.write("\n{} insertions made \n ----- \n".format(
                    len(response)))
                return response

            import_query_generator(query_function, timetables_dir_path)

            end_time = dt.datetime.now()
            time_taken = end_time - start_time
            time_taken_string = "----------\nTime taken: {}".format(time_taken)
            graql_output.write(time_taken_string)
            print(time_taken_string)
Пример #7
0
    def test_end_to_end(self):
        # Unzip the Grakn distribution containing our data
        sub.run(['unzip', 'external/animaltrade_dist/file/downloaded', '-d',
                          'external/animaltrade_dist/file/downloaded-unzipped'])

        # Start Grakn
        sub.run(['external/animaltrade_dist/file/downloaded-unzipped/grakn-animaltrade/grakn', 'server', 'start'])

        modes = (TRAIN, EVAL)

        client = grakn.Grakn(uri=URI)
        sessions = server_mgmt.get_sessions(client, KEYSPACES)
        transactions = server_mgmt.get_transactions(sessions)

        batch_size = NUM_PER_CLASS * FLAGS.num_classes
        kgcn = model.KGCN(NEIGHBOUR_SAMPLE_SIZES,
                          FLAGS.features_size,
                          FLAGS.starting_concepts_features_size,
                          FLAGS.aggregated_size,
                          FLAGS.embedding_size,
                          transactions[TRAIN],
                          batch_size,
                          neighbour_sampling_method=random_sampling.random_sample,
                          neighbour_sampling_limit_factor=4)

        optimizer = tf.train.GradientDescentOptimizer(learning_rate=FLAGS.learning_rate)
        classifier = classify.SupervisedKGCNClassifier(kgcn, optimizer, FLAGS.num_classes, None,
                                                       max_training_steps=FLAGS.max_training_steps)

        feed_dicts = {}

        sampling_params = {
            TRAIN: {'sample_size': NUM_PER_CLASS, 'population_size': POPULATION_SIZE_PER_CLASS},
            EVAL: {'sample_size': NUM_PER_CLASS, 'population_size': POPULATION_SIZE_PER_CLASS},
            PREDICT: {'sample_size': NUM_PER_CLASS, 'population_size': POPULATION_SIZE_PER_CLASS},
        }
        concepts, labels = thing_mgmt.compile_labelled_concepts(EXAMPLES_QUERY, EXAMPLE_CONCEPT_TYPE,
                                                                LABEL_ATTRIBUTE_TYPE, ATTRIBUTE_VALUES,
                                                                transactions[TRAIN], transactions[PREDICT],
                                                                sampling_params)

        for mode in modes:
            feed_dicts[mode] = classifier.get_feed_dict(sessions[mode], concepts[mode], labels=labels[mode])

        # Note: The ground-truth attribute labels haven't been removed from Grakn, so the results found here are
        # invalid, and used as an end-to-end test only

        # Train
        if TRAIN in modes:
            print("\n\n********** TRAIN Keyspace **********")
            classifier.train(feed_dicts[TRAIN])

        # Eval
        if EVAL in modes:
            print("\n\n********** EVAL Keyspace **********")
            # Presently, eval keyspace is the same as the TRAIN keyspace
            classifier.eval(feed_dicts[EVAL])

        server_mgmt.close(sessions)
        server_mgmt.close(transactions)
Пример #8
0
    def setUp(self):
        entity_query = "match $x isa company, has name 'Google'; get;"
        uri = "localhost:48555"
        keyspace = "test_schema"
        client = grakn.Grakn(uri=uri)
        session = client.session(keyspace=keyspace)
        self._tx = session.transaction(grakn.TxType.WRITE)

        neighbour_sample_sizes = (4, 3)

        sampling_method = ordered.ordered_sample

        samplers = []
        for sample_size in neighbour_sample_sizes:
            samplers.append(samp.Sampler(sample_size, sampling_method, limit=sample_size * 2))

        grakn_things = [answermap.get('x') for answermap in list(self._tx.query(entity_query))]

        things = [neighbour.build_thing(grakn_thing) for grakn_thing in grakn_things]

        context_builder = builder.ContextBuilder(samplers)

        self._neighbourhood_depths = [context_builder.build(self._tx, thing) for thing in things]

        self._neighbour_roles = builder.convert_thing_contexts_to_neighbours(self._neighbourhood_depths)

        self._flattened = flatten_tree(self._neighbour_roles)
Пример #9
0
    def test_grakn_session_valid_keyspace(self):
        """ Test OK uri and keyspace """
        a_inst = grakn.Grakn('localhost:48555')
        a_session = a_inst.session('test')
        self.assertIsInstance(a_session, grakn.Session)

        # test the `with` statement
        with a_inst.session('test') as session:
            self.assertIsInstance(session, grakn.Session)
def run():
    grakn_client = grakn.Grakn(uri=URI)

    entities = get_entities("person", grakn_client)
    people = list(map(lambda x: x["first-name"] + " " + x["last-name"], entities))
    people = people + list(map(lambda x: x["first-name"], entities))
    write_to_file("lookup_person.txt", set(people))

    entities = get_entities("bank", grakn_client)
    bank = list(map(lambda x: x["name"], entities))
    write_to_file("lookup_bank.txt", set(bank))
Пример #11
0
def main(keyspace):
    client = grakn.Grakn(uri='localhost:48555')
    global graph
    with client.session(keyspace=keyspace).transaction(
            grakn.TxType.READ) as graph:

        #Find a variable being compared
        query1 = 'match {$comp isa MLIL_CMP_SGE;} or {$comp isa MLIL_CMP_SLE;} or {$comp isa MLIL_CMP_SLT;} or {$comp isa MLIL_CMP_SGT;};$node isa MLIL_VAR_SSA;$cons isa MLIL_CONST;($comp, $node);($comp, $cons);$varssa isa variable-ssa has var $var;($node, $varssa);get $comp, $var;'
        result1 = [result.map() for result in graph.query(query1)]

        #Parse the output of result1 into the compare statements and varaible names
        comp, var = [], []
        if result1:
            for entry in result1:
                comp.append(entry['comp'].id)
                var.append(entry['var'].value())
        else:
            fail()
        for entry in comp:
            #Do upper bound check
            if ('SGE' or 'SGT') in entry:
                lower = lowerCheck()
                if lower:
                    for item in lower:
                        if item['var'].value() not in var:
                            #failed to find upper bound check
                            addr = get_addr(entry)
                            print('CWE-129: Missing upper bound check at ' +
                                  str(addr[0]['addr'].value()))
                        else:
                            adddr = get_addr(entry)
                else:
                    addr = get_addr(entry)
                    print('CWE-129: Missing upper bound check at ' +
                          str(addr[0]['addr'].value()))
            #Do lower bound check
            else:
                upper = upperCheck()
                if upper:
                    for item in upper:
                        if item['var'].value() not in var:
                            #failed to find lower bound check
                            addr = get_addr(entry)
                            print('CWE-129: Missing lower bound check at ' +
                                  str(addr[0]['addr'].value()))
                        else:
                            addr = get_addr(entry)
                else:
                    addr = get_addr(entry)
                    print('CWE-129: Missing lower bound check at ' +
                          str(addr[0]['addr'].value()))
Пример #12
0
def build_phone_call_graph(inputs):
    '''
    gets the job done:
    1. creates a Grakn instance
    2. creates a session to the targeted keyspace
    3. loads the xml data to Grakn for each file
    4. closes the session
    :param input as list of dictionaties: each dictionary contains details required to parse the data
  '''
    client = grakn.Grakn(uri="localhost:48555")  # 1
    with client.session(keyspace="phone_calls") as session:  # 2 and 4
        for input in inputs:
            print("Loading from [" + input["data_path"] + "] into Grakn ...")
            load_data_into_grakn(input, session)  # 3
Пример #13
0
    def test_integration(self):
        client = grakn.Grakn(uri="localhost:48555")
        session = client.session(keyspace="test_schema")
        tx = session.transaction(grakn.TxType.WRITE)

        print("================= THINGS ======================")
        te = ex.TraversalExecutor(tx)
        schema_concept_types = te.get_schema_concept_types(
            encode.GET_THING_TYPES_QUERY,
            include_implicit=True,
            include_metatypes=False)
        labels = trv.labels_from_types(schema_concept_types)
        print(list(labels))

        schema_concept_types = te.get_schema_concept_types(
            encode.GET_THING_TYPES_QUERY,
            include_implicit=True,
            include_metatypes=False)
        super_types = trv.get_sups_labels_per_type(schema_concept_types,
                                                   include_self=True,
                                                   include_metatypes=False)
        print("==== super types ====")
        [print(type, super_types) for type, super_types in super_types.items()]

        print("================= ROLES ======================")
        schema_concept_types = te.get_schema_concept_types(
            encode.GET_ROLE_TYPES_QUERY,
            include_implicit=True,
            include_metatypes=False)
        labels = trv.labels_from_types(schema_concept_types)
        print(list(labels))

        schema_concept_types = te.get_schema_concept_types(
            encode.GET_ROLE_TYPES_QUERY,
            include_implicit=True,
            include_metatypes=False)
        super_types = trv.get_sups_labels_per_type(schema_concept_types,
                                                   include_self=True,
                                                   include_metatypes=False)
        print("==== super types ====")
        [print(type, super_types) for type, super_types in super_types.items()]
Пример #14
0
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
#

import unittest
import grakn
from grakn.exception.ClientError import ClientError

# run-once per testing
inst = grakn.Grakn("localhost:48555")
session = inst.session("testkeyspace")


class test_Base(unittest.TestCase):
    """ Sets up DB for use in tests """
    @classmethod
    def setUpClass(cls):
        """ Make sure we have some sort of schema and data in DB, only done once """
        super(test_Base, cls).setUpClass()

        # temp tx to set up DB, don"t save it
        tx = session.transaction(grakn.TxType.WRITE)
        try:
            # define parentship roles to test agains
            tx.query(
Пример #15
0
import grakn
import re
from Bio.SeqIO.FastaIO import SimpleFastaParser
from util import insert_if_non_existent, insert_anyway
'''
    1. creates a Grakn session to talk to the 'proteins' keyspace
    2. inserts the database entity named 'UniProt'
    3. for each protein stored in target-protein-sequences.fasta, inserts the:
         - protein entity
         - species entity
         - species <> protein relationship
         - protein <> database relationship
'''
client = grakn.Grakn(uri="localhost:48555")
with client.session(keyspace="proteins") as session:
    # insert the database entity
    q_insert_database = 'insert $db isa database has name "uniprot";'
    db_id = insert_anyway(session, q_insert_database)

    with open("uniprot-asthma-proteins.fasta") as in_handle:
        for first_line, sequence in SimpleFastaParser(in_handle):
            # extra relevant data from first_line of each fasta (protein)
            protein_details = re.split(',| OS=| OX=',
                                       first_line.replace(' ', ',', 1))
            identifier = protein_details[0].split("|")[1]
            name = protein_details[1]
            species = protein_details[2]

            # insert the protein entity
            q_insert_protein = ("insert $pr isa protein " +
                                'has identifier "' + identifier + '" ' +
Пример #16
0
 def test_grakn_tx_invalid_enum(self):
     inst = grakn.Grakn('localhost:48555')
     a_session = inst.session('test')
     with self.assertRaises(Exception):
         a_session.transaction('foo')
Пример #17
0
 def test_grakn_tx_valid_enum(self):
     inst = grakn.Grakn('localhost:48555')
     a_session = inst.session('test')
     tx = a_session.transaction(grakn.TxType.READ)
     self.assertIsInstance(tx, grakn.Transaction)
Пример #18
0
 def test_grakn_session_close(self):
     inst = grakn.Grakn('localhost:48555')
     a_session = inst.session('test')
     a_session.close()
     with self.assertRaises(GraknError):
         a_session.transaction(grakn.TxType.READ)
Пример #19
0
def main(modes=(TRAIN, EVAL, PREDICT)):

    client = grakn.Grakn(uri=URI)
    sessions = grakn_mgmt.get_sessions(client, KEYSPACES)
    transactions = grakn_mgmt.get_transactions(sessions)

    batch_size = NUM_PER_CLASS * FLAGS.num_classes
    kgcn = model.KGCN(NEIGHBOUR_SAMPLE_SIZES,
                      FLAGS.features_size,
                      FLAGS.starting_concepts_features_size,
                      FLAGS.aggregated_size,
                      FLAGS.embedding_size,
                      transactions[TRAIN],
                      batch_size,
                      neighbour_sampling_method=random_sampling.random_sample,
                      neighbour_sampling_limit_factor=4)

    optimizer = tf.train.GradientDescentOptimizer(
        learning_rate=FLAGS.learning_rate)
    classifier = classify.SupervisedKGCNClassifier(
        kgcn,
        optimizer,
        FLAGS.num_classes,
        FLAGS.log_dir,
        max_training_steps=FLAGS.max_training_steps)

    feed_dicts = {}
    feed_dict_storer = persistence.FeedDictStorer(BASE_PATH + 'input/')

    # Overwrites any saved data
    try:
        for mode in modes:
            feed_dicts[mode] = feed_dict_storer.retrieve_feed_dict(mode)

    except FileNotFoundError:

        # Check if saved concepts and labels exist, and act accordingly
        # if check_for_saved_labelled_concepts(SAVED_LABELS_PATH, modes):
        try:
            concepts, labels = prs.load_saved_labelled_concepts(
                KEYSPACES, transactions, SAVED_LABELS_PATH)
        except FileNotFoundError:
            sampling_params = {
                TRAIN: {
                    'sample_size': NUM_PER_CLASS,
                    'population_size': POPULATION_SIZE_PER_CLASS
                },
                EVAL: {
                    'sample_size': NUM_PER_CLASS,
                    'population_size': POPULATION_SIZE_PER_CLASS
                },
                PREDICT: {
                    'sample_size': NUM_PER_CLASS,
                    'population_size': POPULATION_SIZE_PER_CLASS
                },
            }
            concepts, labels = thing_mgmt.compile_labelled_concepts(
                EXAMPLES_QUERY, EXAMPLE_CONCEPT_TYPE, LABEL_ATTRIBUTE_TYPE,
                ATTRIBUTE_VALUES, transactions[TRAIN], transactions[PREDICT],
                sampling_params)
            prs.save_labelled_concepts(KEYSPACES, concepts, labels,
                                       SAVED_LABELS_PATH)

            thing_mgmt.delete_all_labels_from_keyspaces(
                transactions, LABEL_ATTRIBUTE_TYPE)

            # Get new transactions since deleting labels requires committing and therefore closes transactions
            transactions = grakn_mgmt.get_transactions(sessions)
            # We need to re-fetch the sample concepts, since we need live transactions where the labels are removed
            concepts, labels = prs.load_saved_labelled_concepts(
                KEYSPACES, transactions, SAVED_LABELS_PATH)

        for mode in modes:
            feed_dicts[mode] = classifier.get_feed_dict(sessions[mode],
                                                        concepts[mode],
                                                        labels=labels[mode])
            feed_dict_storer.store_feed_dict(mode, feed_dicts[mode])

    # Train
    if TRAIN in modes:
        print("\n\n********** TRAIN Keyspace **********")
        classifier.train(feed_dicts[TRAIN])

    # Eval
    if EVAL in modes:
        print("\n\n********** EVAL Keyspace **********")
        # Presently, eval keyspace is the same as the TRAIN keyspace
        classifier.eval(feed_dicts[EVAL])

    # Predict
    if PREDICT in modes:
        print("\n\n********** PREDICT Keyspace **********")
        # We're using unseen data, but since we have labels we can use classifier.eval rather than classifier.predict
        classifier.eval(feed_dicts[PREDICT])

    grakn_mgmt.close(sessions)
    grakn_mgmt.close(transactions)
Пример #20
0
 def test_grakn_init_valid(self):
     """ Test valid URI """
     a_inst = grakn.Grakn('localhost:48555')
     self.assertIsInstance(inst, grakn.Grakn)
Пример #21
0
def build_phone_call_graph(input):
    client = grakn.Grakn(uri='localhost:48555')
    with client.session(keyspace='phone_calls') as session:
        for input in inputs:
            load_data_into_graph(input, session)
port = "5559"
synthetic_instrument = "KR_EURUSD"

# Socket to talk to server
context = zmq.Context()
socket = context.socket(zmq.SUB)
topicfilter = "kr_eurusd_tick"
socket.setsockopt_string(zmq.SUBSCRIBE, topicfilter)
socket.setsockopt_string(zmq.SUBSCRIBE, "1")
print("Collecting KR_EURUSD updates into grakn server...")
socket.connect("tcp://192.168.0.13:%s" % port)

# Grakn session start

client = grakn.Grakn(uri="192.168.0.154:48555")
session = client.session(keyspace="mykeyspace")
tx = session.transaction(grakn.TxType.WRITE)


def grakn_recorder(query):
    session = client.session(keyspace="mykeyspace")
    tx = session.transaction(grakn.TxType.WRITE)

    # Perform insert query that returns an iterator of ConceptMap of inserted concepts
    insert_iterator = tx.query(query)
    concepts = insert_iterator.collect_concepts()
    # print("Inserted a person with ID: {0}".format(concepts[0].id))
    # Don't forget to commit() to persist changes
    tx.commit()
Пример #23
0
 def __init__(self):
     self.client = grakn.Grakn(uri='localhost:48555')
 def setUp(self):
     self._client = grakn.Grakn(uri="localhost:48555")
     self._session = self._client.session(keyspace="genealogy")
Пример #25
0
def main(keyspace):
    client = grakn.Grakn(uri='localhost:48555')
    with client.session(keyspace=keyspace).transaction(
            grakn.TxType.READ) as graph:

        # Functions with indexes for (dest, sizeof(dest)) stored in dict
        functions = {
            "receive_delim": (2, 3),
            "fgets": (0, 1),
            "strncpy": (0, 2),
            "receive_until": (0, 2),
            "memcpy": (0, 2),
            "freaduntil": (1, 2),
            "read": (1, 2)
        }

        # Check for potential vuln in each function
        for function_name in functions:
            # Get address of function to use for next query
            query1 = 'match $func isa function, has func-name contains "{}", has asm-address $a; get $a;'.format(
                function_name)
            result1 = [result.map() for result in graph.query(query1)]

            # If the function is found continue query
            if result1:
                # Get all instructions that have function name
                func_addr = int(result1[0]['a'].value(), 16)
                query2 = 'match $x has operation-type "MLIL_CALL_SSA"; $y isa"MLIL_CONST_PTR"; ($x,$y); $z isa constant, has constant-value {}; ($y,$z); get $x;'.format(
                    func_addr)
                result2 = [result.map() for result in graph.query(query2)]

                # If there are instructions that use the function check the instructions
                if result2:

                    buff_index = functions[function_name][0]
                    size_index = functions[function_name][1]
                    for instr in result2:
                        Id = instr['x'].id
                        query3 = 'match $x id "' + Id + '"; $l isa list; ($x,$l); (from-node: $l, $q); $q has edge-label $e; (from-node: $q, $v); {$v has var $s;} or {$v has constant-value $s;}; get $e, $s;'
                        result3 = [
                            result.map() for result in graph.query(query3)
                        ]

                        # This section grabs instrution params and insert into an array
                        param_array = [0, 0, 0, 0, 0, 0, 0, 0]

                        for ele in result3:
                            index = int(ele['e'].value())
                            val = ele['s'].value()
                            param_array[index] = val
                        # Get var name - This is done to determine how many bytes the variable is
                        var_name = param_array[buff_index]
                        var_name = var_name.split('#', 1)[0].lstrip()

                        # NOTE Enhancement Make finding buff_size the same as string_size
                        # This assumes that buffer_size is a number, breaks when its a var or register
                        # Get buffer size
                        try:
                            buff_size = int(param_array[size_index])
                        except ValueError as err:
                            continue
                        # Get size of string in by finding initialization Ex. var_88 = &var_58
                        # Find where string is initialzed
                        query4 = 'match $x id "{}"; $y isa basic-block; ($x,$y); $z isa instruction, has operation-type "MLIL_SET_VAR_SSA"; ($y,$z); {{$v1 isa variable, has var "{}";}} or {{$v1 isa variable-ssa, has var "{}";}}; ($z, $v1); $w isa MLIL_ADDRESS_OF; ($w, $z); $v isa variable, has var-size $s; ($w, $v); get $s, $x;'.format(
                            Id, var_name, var_name)
                        result4 = [
                            result.map() for result in graph.query(query4)
                        ]

                        if (result4):
                            string_size = result4[0]['s'].value()
                            # Finally Determine if buffer size == sizeof(str)
                            if string_size != buff_size:
                                instruction_ID = result4[0]['x'].id
                                query5 = 'match $i id {}, has asm-address $a; get $a;'.format(
                                    instruction_ID)
                                result5 = [
                                    result.map()
                                    for result in graph.query(query5)
                                ]
                                instr_addr = result5[0]['a'].value()

                                print(
                                    "CWE-121: Stack-based Overflow possible at {}"
                                    .format(instr_addr))
Пример #26
0
                                upper_radius)

                            centrality_element_id = self._canvas.create_circle(
                                lon, lat, radius, fill=colour, outline="")

                            self._station_centrality_points[
                                concept_id] = centrality_element_id

                            # Send the drawn elements to behind the station point
                            self._canvas.tag_lower(centrality_element_id,
                                                   station_element_id)
                    self._displaying_centrality = True

    def undisplay_centrality(self):
        if self._displaying_centrality:
            for concept_id, point_id in self._station_centrality_points.items(
            ):
                self._canvas.delete(point_id)
            self._displaying_centrality = False


if __name__ == "__main__":
    # Set up a connection to Grakn. Grakn needs to be running first, with settings found in the settings file of this
    # project
    client = grakn.Grakn(uri=settings.uri)

    # Build the Tkinter application
    root = tk.Tk()
    tube_gui = TubeGui(root, client)
    root.mainloop()
Пример #27
0
 def setUpClass(cls):
     client = grakn.Grakn(uri="localhost:48555")
     cls.session = client.session(keyspace=cls.keyspace)
Пример #28
0
    def test_encode(self):
        keyspace = "test_schema"
        uri = "localhost:48555"
        client = grakn.Grakn(uri=uri)
        session = client.session(keyspace=keyspace)
        tx = session.transaction(grakn.TxType.WRITE)
        encoder = encode.Encoder(tx)

        placeholders = [{
            'role_type':
            tf.placeholder(dtype=tf.string, shape=(None, 1)),
            'role_direction':
            tf.placeholder(dtype=tf.int64, shape=(None, 1)),
            'neighbour_type':
            tf.placeholder(dtype=tf.string, shape=(None, 1)),
            'neighbour_data_type':
            tf.placeholder(dtype=tf.string, shape=(None, 1)),
            'neighbour_value_long':
            tf.placeholder(dtype=tf.int64, shape=(None, 1)),
            'neighbour_value_double':
            tf.placeholder(dtype=tf.float32, shape=(None, 1)),
            'neighbour_value_boolean':
            tf.placeholder(dtype=tf.int64, shape=(None, 1)),
            'neighbour_value_date':
            tf.placeholder(dtype=tf.int64, shape=(None, 1)),
            'neighbour_value_string':
            tf.placeholder(dtype=tf.string, shape=(None, 1))
        }]

        encoded_output = encoder(placeholders)

        example_arrays = {
            'role_type':
            np.full((4, 1), fill_value='employee', dtype=np.dtype('U50')),
            'role_direction':
            np.full((4, 1), fill_value=0, dtype=np.int),
            'neighbour_type':
            np.full((4, 1), fill_value='person', dtype=np.dtype('U50')),
            'neighbour_data_type':
            np.full((4, 1), fill_value='', dtype=np.dtype('U10')),
            'neighbour_value_long':
            np.full((4, 1), fill_value=0, dtype=np.int),
            'neighbour_value_double':
            np.full((4, 1), fill_value=0.0, dtype=np.float),
            'neighbour_value_boolean':
            np.full((4, 1), fill_value=0, dtype=np.int),
            'neighbour_value_date':
            np.full((4, 1), fill_value=0, dtype=np.int),
            'neighbour_value_string':
            np.full((4, 1), fill_value='', dtype=np.dtype('U50'))
        }

        feed_dict = {
            placeholder: example_arrays[placeholder_name]
            for placeholder_name, placeholder in placeholders[0].items()
        }

        init_global = tf.global_variables_initializer()
        init_tables = tf.tables_initializer()

        tf_session = tf.Session()
        tf_session.run(init_global)
        tf_session.run(init_tables)

        tf_session.run(encoded_output, feed_dict=feed_dict)
Пример #29
0
    # --- Test grakn session transactions that are pre-DB setup ---
    def test_grakn_tx_valid_enum(self):
        inst = grakn.Grakn('localhost:48555')
        a_session = inst.session('test')
        tx = a_session.transaction(grakn.TxType.READ)
        self.assertIsInstance(tx, grakn.Transaction)

    def test_grakn_tx_invalid_enum(self):
        inst = grakn.Grakn('localhost:48555')
        a_session = inst.session('test')
        with self.assertRaises(Exception):
            a_session.transaction('foo')



inst = grakn.Grakn('localhost:48555')
session = inst.session('testkeyspace')

class test_grakn_Base(test_Base):
    """ Sets up DB for use in tests """

    @classmethod
    def setUpClass(cls):
        """ Make sure we have some sort of schema and data in DB, only done once """
        super(test_grakn_Base, cls).setUpClass()
        # shared grakn instances and session for API testing 

        # temp tx to set up DB, don't save it
        with session.transaction(grakn.TxType.WRITE) as tx:
            tx = session.transaction(grakn.TxType.WRITE)
            try:
Пример #30
0
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
#

import unittest
import grakn

client = grakn.Grakn("localhost:48555")


class test_Keyspace(unittest.TestCase):
    def test_retrieve_delete(self):
        """ Test retrieving and deleting a specific keyspace """

        session = client.session(keyspace="keyspacetest")
        tx = session.transaction(grakn.TxType.WRITE)
        tx.close()

        keyspaces = client.keyspaces().retrieve()
        self.assertGreater(len(keyspaces), 0)
        self.assertTrue('keyspacetest' in keyspaces)

        client.keyspaces().delete('keyspacetest')