Пример #1
0
def get_rank_kcr(dimacs_, solset_, jsonfile_):
    # read tree structure from file
    _treefile = os.path.dirname(dimacs_) + '/smarch/tree.json'
    # node = AnyNode(count=-1, cube=[])

    if os.path.exists(jsonfile_):
        with open(jsonfile_, 'r') as file:
            data = file.read()
            importer = JsonImporter()
            _root = importer.import_(data)
            total = _root.count
    else:
        print("ERROR: tree file not found!")
        return

    for sol in solset_:
        # traverse tree based on solution
        _node = _root
        _precision = 0
        _number = 0
        while _precision == 0:
            _node, _precision, _number = traverse_cube(_node, sol, _number)

        if _precision > 0:
            print(str(_number / total) + "," + str(_precision / total))
        else:
            print("ERROR: tree traverse failure")
Пример #2
0
def get_rank(dimacs_, dir_, jsonfile_):
    # read dimacs file for feature list
    _features, _clauses, _vars = read_dimacs(dimacs_)

    if os.path.exists(jsonfile_):
        with open(jsonfile_, 'r') as file:
            data = file.read()
            importer = JsonImporter()
            _root = importer.import_(data)
            total = _root.count
    else:
        print("ERROR: tree file not found!")
        return

    _cdir = dir_
    for file in os.listdir(_cdir):
        if file.endswith('.config'):
            # convert config file into variable list
            sol = read_config_kmax(_features, _cdir + "/" + file)

            # traverse tree based on solution
            _node = _root
            _precision = 0
            _number = 0
            while _precision == 0:
                _node, _precision, _number = traverse_cube(_node, sol, _number)

            if _precision > 0:
                print(str(_number / total) + "," + str(_precision / total))
            else:
                print("ERROR: tree traverse failure")
Пример #3
0
 def get_tree(self, file_name):
     ''' This function is used to import json file and return anytree Node '''
     importer = JsonImporter()
     tree_file = open(file_name, "r")
     data = tree_file.read()
     tree_file.close()
     return importer.import_(data)
Пример #4
0
def ast_json_to_anytree(json_data):
    """
    Deserializes an anytree in json format and returns its root node.
    """
    from anytree.importer import JsonImporter
    importer = JsonImporter()
    return importer.import_(json_data)
Пример #5
0
 def _restore_json(self, string):
     '''restore the tree from json'''
     imp = JsonImporter()
     root = imp.import_(string)
     if self.verbose:
         Logger.info('Catalog imported from json \"{}\"'.format(self.path))
     return root
Пример #6
0
    def __init__(self, vectorifier=None):

        dict_importer = DictImporter(nodecls=Node)
        importer = JsonImporter(dictimporter=dict_importer)
        self.root = importer.import_(symptom_json)

        if vectorifier is not None:
            # normal mode, if it is None is eval mode
            self.vectorifier = vectorifier
            self.vector_dimension = vectorifier.d
            """
      if concept name vectors file not exists, create it
      """
            if vectorifier.internal_representation == "glove":
                if not os.path.isfile(COM.CSV_ST_CONCEPT_NAMES_GLOVE_PATH + \
                                      str(self.vector_dimension) + "d.csv"):
                    print(
                        "Concept names glove vectors file not found. \nComputing file..."
                    )
                    self.__save_csv_concept_name_glove_vectors()

                self.concept_name_vectors_df = pd.read_csv(COM.CSV_ST_CONCEPT_NAMES_GLOVE_PATH + \
                                                           str(self.vector_dimension) + "d.csv",
                                                           header=None)
            elif vectorifier.internal_representation == "bert":
                if not os.path.isfile(COM.CSV_ST_CONCEPT_NAMES_BERT_PATH + \
                                      COM.FILENAME_CSV_ST_CONCEPT_NAMES_BERT):
                    print(
                        "Concept names bert vectors file not found. \nComputing file..."
                    )
                    self.__save_csv_concept_name_bert_vectors()

                self.concept_name_vectors_df = pd.read_csv(COM.CSV_ST_CONCEPT_NAMES_BERT_PATH + \
                                                           COM.FILENAME_CSV_ST_CONCEPT_NAMES_BERT,
                                                           header=None)
Пример #7
0
    def _loadTree(self, filename, erease):
        str_tree = None
        index = 0

        if filename is None or not os.path.exists(filename):
            return None

        if erease:
            with open(filename, 'w') as file:
                file.write("")

        with open(filename, 'r') as file:
            try:
                index = int(file.readline())
            except ValueError:
                return None
            str_tree = file.read()

        importer = JsonImporter()
        try:
            tree = importer.import_(str_tree)
        except json.JSONDecodeError:
            return None

        return index, tree
Пример #8
0
def read_tree(file_path: str):
    """Deserialize a tree from a saved JSON file"""
    importer = JsonImporter()
    with open(file_path, 'r') as j:
        json_tree = j.read().replace("_name", "name")
    root = importer.import_(json_tree)
    root.cost = None
    return root
Пример #9
0
def _deserialize_ontologies(filename):
  """Deserializes an ontology from a JSON file and returns its root."""
  importer = JsonImporter()
  forest = []
  with open(filename, 'r') as f:
    tree_list = json.load(f)
    for tree_text in tree_list:
      forest.append(importer.import_(tree_text))
  return forest
Пример #10
0
def draw_tree(string1, path_des, file):
    from anytree import RenderTree
    importer = JsonImporter()
    root = importer.import_(string1)
    # Render Tree
    print(RenderTree(root, style=ContRoundStyle()))
    # # Render graph tree
    DotExporter(root).to_picture(path_des + file)
    Image(filename=path_des + file)
Пример #11
0
 def read(self) -> Entry:
     content = None
     if os.path.isfile(self._filepath):
         fp = open(Path(self._filepath))
         d_imp = DictImporter(nodecls=Entry)
         importer = JsonImporter(dictimporter=d_imp)
         content = importer.import_(fp.read())
         fp.close()
     return content
    def load_tree(self):
        with open('test.json') as json_file:
            data = json.load(json_file)

        print(data)
        print("\n")
        data_json = json.dumps(data)
        importer = JsonImporter()
        d0 = importer.import_(data_json)
        print(RenderTree(d0))
Пример #13
0
 def load(self):
     extension = ".rpt"
     dir_name = "../data/policies/"
     file_name = dir_name + self.agent_name + extension
     importer = JsonImporter()
     data = None
     with open(file_name, "r") as f:
         data = json.loads(f)
         f.close()
     self.tree = importer.import_(data)
Пример #14
0
    def __init__(self, dir='controllers/'):

        importer = JsonImporter()

        with open(dir + 'data.json', 'r') as f:
            data = json.load(f)
            self.root = importer.import_(data)

        self.current_node = self.root

        DotExporter(self.root).to_picture("tree.png")
Пример #15
0
class EvidenceRpcClient(object):
    def __init__(self, coupled=False):
        self.tree_importer = JsonImporter()
        self.connection = pika.BlockingConnection(
            pika.ConnectionParameters(host='localhost'))
        self.channel = self.connection.channel()
        self.coupled = coupled

        result = self.channel.queue_declare(exclusive=True)
        self.callback_queue = result.method.queue

        self.channel.basic_consume(self.on_response,
                                   no_ack=True,
                                   queue=self.callback_queue)

    def deserialize(self, response):
        res = json.loads(response)
        if self.coupled:
            res['tree'] = self.tree_importer.import_(res['tree'])
        else:
            res[0] = json.loads(res[0])
            res[0]['tree'] = self.tree_importer.import_(res[0]['tree'])

        return res

    def on_response(self, ch, method, props, body):
        if self.corr_id == props.correlation_id:
            self.response = body

    def call(self, request):
        self.response = None
        self.corr_id = str(uuid.uuid4())
        try:
            self.channel.basic_publish(exchange='',
                                       routing_key='rpc_queue',
                                       properties=pika.BasicProperties(
                                           reply_to=self.callback_queue,
                                           correlation_id=self.corr_id,
                                       ),
                                       body=str(request))
        except:
            time.sleep(5)
            self.channel.basic_publish(exchange='',
                                       routing_key='rpc_queue',
                                       properties=pika.BasicProperties(
                                           reply_to=self.callback_queue,
                                           correlation_id=self.corr_id,
                                       ),
                                       body=str(request))

        while self.response is None:
            self.connection.process_data_events()

        return self.deserialize(self.response)
Пример #16
0
def import_conversation_trees_from_db(user_id):
    documents = collection_trees.find()
    trees = []
    for i, tree in enumerate(documents):
        importer = JsonImporter()
        r1 = json_util.dumps(tree)
        root = importer.import_(r1)
        if i % 1000 == 0:
            print("Finding: {}".format(i))
        trees.append(root)
    return trees
Пример #17
0
def load_from_file(path):
    path = osp.expanduser(path)
    if osp.exists(osp.dirname(path)):
        dict_imp = DictImporter(nodecls=DPNode)
        imp = JsonImporter(dictimporter=dict_imp,
                           object_hook=tools.object_hook)
        with open(path, "r") as f:
            res = imp.read(f)
        return RemoteTree(res)
    else:
        print("Error saving to disk. Dir {} not existing.".format(
            osp.dirname(path)))
Пример #18
0
    def __init__(self, coupled=False):
        self.tree_importer = JsonImporter()
        self.connection = pika.BlockingConnection(
            pika.ConnectionParameters(host='localhost'))
        self.channel = self.connection.channel()
        self.coupled = coupled

        result = self.channel.queue_declare(exclusive=True)
        self.callback_queue = result.method.queue

        self.channel.basic_consume(self.on_response,
                                   no_ack=True,
                                   queue=self.callback_queue)
Пример #19
0
def printTree(tweetsFile):
    propTree = PropTree()  # an instance of a tree
    importer = JsonImporter()
    rootNr = 0
    with open(tweetsFile, 'r') as _file:
        content = _file.read()

    contentSplit = content.split("&")
    contentSplit.pop()
    for data in contentSplit:
        root = importer.import_(data)
        propTree.addRoot(root)
        rootNr += 1
    return propTree
Пример #20
0
def drawtree(string, path_des, path, filename):
    try:
        error_flag = 0
        importer = JsonImporter()
        root = importer.import_(string)
        print(RenderTree(root, style=ContRoundStyle()))
        DotExporter(root).to_picture(path_des + filename)
        Image(filename=path_des + filename)
        return (error_flag)
    except:
        error_flag = 1
        f = open(path + '/H_log.dat', 'a+')
        f.write(filename + '\tInvalid JSON format\n')
        f.close()
        return (error_flag)
Пример #21
0
def test_json_importer():
    """Json Importer."""
    refdata = {
        'id':
        'root',
        'children': [{
            'id': 'sub0',
            'children': [{
                'id': 'sub0B'
            }, {
                'id': 'sub0A'
            }]
        }, {
            'id':
            'sub1',
            'children': [{
                'id': 'sub1A'
            }, {
                'id': 'sub1B'
            }, {
                'id': 'sub1C',
                'children': [{
                    'id': 'sub1Ca'
                }]
            }]
        }]
    }
    lines = [
        '{', '  "children": [', '    {', '      "children": [', '        {',
        '          "id": "sub0B"', '        },', '        {',
        '          "id": "sub0A"', '        }', '      ],',
        '      "id": "sub0"', '    },', '    {', '      "children": [',
        '        {', '          "id": "sub1A"', '        },', '        {',
        '          "id": "sub1B"', '        },', '        {',
        '          "children": [', '            {',
        '              "id": "sub1Ca"', '            }', '          ],',
        '          "id": "sub1C"', '        }', '      ],',
        '      "id": "sub1"', '    }', '  ],', '  "id": "root"', '}'
    ]

    imported = DictExporter().export(JsonImporter().import_("\n".join(lines)))
    eq_(refdata, imported)
    with NamedTemporaryFile(mode="w+") as ref:
        ref.write("\n".join(lines))
        ref.seek(0)
        imported = DictExporter().export(JsonImporter().read(ref))
    eq_(refdata, imported)
Пример #22
0
    def load_fuzzy_tree(n):
        alg_file = os.path.join('Fuzzy', '{}.json'.format(n))

        if os.path.exists(alg_file):
            with open(alg_file, 'r') as f:
                root = JsonImporter().read(f)

                return root
Пример #23
0
    def __init__(self, output_folder, csv_file_path='', use_compression=False, processes_num=1, tokenized=True):
         # CSV to get program data from
        self.csv_file_path = csv_file_path

        # Output folder to save data to
        self.output_folder = output_folder

        self.tokenized = tokenized

        # Create JSON importer
        self.importer = JsonImporter()

        # Boolean indicating whether ASTs from the input folder are stored using compression
        self.use_compression = use_compression

        # Number of parallel processes
        self.processes_num = processes_num
def printTree(tweetsFile):
    propTree = PropTree()  # an instance of a tree
    importer = JsonImporter()
    rootNr = 0

    with open('./data/tree/trees/' + tweetsFile + '.txt', 'r') as _file:
        content = _file.read()

    contentSplit = content.split("&")

    for data in contentSplit:
        root = importer.import_(data)
        propTree.addRoot(root)
        rootNr += 1

    propTree.makeSimpleTree()
    return propTree
Пример #25
0
    def load_working_tree(self, root_comp):
        self.create_dirs()
        alg_file = os.path.join(self.base_dir, '{}.json'.format(root_comp))

        if os.path.exists(alg_file):
            with open(alg_file, 'r') as f:
                root = JsonImporter().read(f)

                return root
 def ImportAsJson(self, json: str):
     """
     This function converts a AnyNode Json representation to a AnyNode tree.
         :param json:str: anytree json 
     """
     try:
         return JsonImporter().import_(json.replace('#::smt\n', ''))
     except Exception as ex:
         template = "An exception of type {0} occurred in [TParser.ImportAsJson]. Arguments:\n{1!r}"
         message = template.format(type(ex).__name__, ex.args)
         print(message)
Пример #27
0
def drawtree(string, path_des, path, filename, file):
	try:
		error_flag = 0
		importer = JsonImporter()
		root = importer.import_(string)
		file1 = file+'.dot'
		print(RenderTree(root, style=ContRoundStyle()))
		DotExporter(root).to_dotfile(path_des+file1)
		add_edge_labels(path_des, file1)
		check_call(['dot','-Tpng',path_des+file1,'-o',path_des+file+'.png'])
		return(error_flag)
	except:
		error_flag = 1
		f = open(path+'/E_log.dat', 'a+')
		f.write(filename +'\tInvalid Drawtree input format\n')
		f.close()
		f = open(path_des+'/E_log.dat', 'a+')
		f.write('\tInvalid Drawtree input format\n')
		f.close()
		return(error_flag)
Пример #28
0
def get_relevant_features(args, feature_nodes):
    """Identify relevant features and feature pairs"""
    candidate_nodes = [node for node in feature_nodes if node.is_leaf and node.name != constants.BASELINE]
    if not args.analyze_all_pairwise_interactions:
        # Get list of nodes outputted by hierarchical FDR procedure on features
        fdr_tree_filename = "%s/%s/%s.json" % (args.output_dir, constants.HIERARCHICAL_FDR_DIR,
                                               constants.HIERARCHICAL_FDR_OUTPUTS)
        with open(fdr_tree_filename, "r") as fdr_tree_file:
            fdr_tree = JsonImporter().read(fdr_tree_file)
            fdr_tree_node_map = {node.name: node for node in anytree.PreOrderIter(fdr_tree)}
            candidate_nodes = [node for node in candidate_nodes if fdr_tree_node_map[node.name].rejected]
    return candidate_nodes
Пример #29
0
def evaluate(args, relevant_feature_map, feature_id_map):
    """
    Evaluate mihifepe results - obtain power/FDR measures for all nodes/outer nodes/base features/interactions
    """

    # pylint: disable = too-many-locals
    def get_relevant_rejected(nodes, outer=False, leaves=False):
        """Get set of relevant and rejected nodes"""
        assert not (outer and leaves)
        if outer:
            nodes = [
                node for node in nodes if node.rejected
                and all([not child.rejected for child in node.children])
            ]
        elif leaves:
            nodes = [node for node in nodes if node.is_leaf]
        relevant = [
            0 if node.description == constants.IRRELEVANT else 1
            for node in nodes
        ]
        rejected = [1 if node.rejected else 0 for node in nodes]
        return relevant, rejected

    tree_filename = "%s/%s/%s.json" % (args.output_dir,
                                       constants.HIERARCHICAL_FDR_DIR,
                                       constants.HIERARCHICAL_FDR_OUTPUTS)
    with open(tree_filename, "r") as tree_file:
        tree = JsonImporter().read(tree_file)
        nodes = list(anytree.PreOrderIter(tree))
        # All nodes FDR/power
        relevant, rejected = get_relevant_rejected(nodes)
        precision, recall, _, _ = precision_recall_fscore_support(
            relevant, rejected, average="binary")
        # Outer nodes FDR/power
        outer_relevant, outer_rejected = get_relevant_rejected(nodes,
                                                               outer=True)
        outer_precision, outer_recall, _, _ = precision_recall_fscore_support(
            outer_relevant, outer_rejected, average="binary")
        # Base features FDR/power
        bf_relevant, bf_rejected = get_relevant_rejected(nodes, leaves=True)
        bf_precision, bf_recall, _, _ = precision_recall_fscore_support(
            bf_relevant, bf_rejected, average="binary")
        # Interactions FDR/power
        interaction_precision, interaction_recall = get_precision_recall_interactions(
            args, relevant_feature_map, feature_id_map)

        return Results(1 - precision, recall, 1 - outer_precision,
                       outer_recall, 1 - bf_precision, bf_recall,
                       1 - interaction_precision, interaction_recall)
Пример #30
0
def get_precision_recall_interactions(args, relevant_feature_map,
                                      feature_id_map):
    """Computes precision (1 - FDR) and recall (power) for detecting interactions"""
    # pylint: disable = invalid-name, too-many-locals
    # The set of all possible interactions might be very big, so don't construct label vector for all
    # possible interactions - compute precision/recall from basics
    # TODO: alter to handle higher-order interactions
    if not args.analyze_interactions:
        return (0.0, 0.0)
    true_interactions = {
        key
        for key in relevant_feature_map.keys() if len(key) > 1
    }
    tree_filename = "%s/%s/%s.json" % (args.output_dir,
                                       constants.INTERACTIONS_FDR_DIR,
                                       constants.HIERARCHICAL_FDR_OUTPUTS)
    tp = 0
    fp = 0
    tn = 0
    fn = 0
    tested = set()
    with open(tree_filename, "r") as tree_file:
        tree = JsonImporter().read(tree_file)
        # Two-level tree with tested interactions on level 2
        for node in tree.children:
            pair = frozenset({int(idx) for idx in node.name.split(" + ")})
            if feature_id_map:
                pair = frozenset(
                    {feature_id_map[visual_id]
                     for visual_id in pair})
            tested.add(pair)
            if node.rejected:
                if relevant_feature_map.get(pair):
                    tp += 1
                else:
                    fp += 1
            else:
                if relevant_feature_map.get(pair):
                    fn += 1
                else:
                    tn += 1
    if not tp > 0:
        return (0.0, 0.0)
    missed = true_interactions.difference(tested)
    fn += len(missed)
    precision = tp / (tp + fp)
    recall = tp / (tp + fn)
    return precision, recall