def handle(self, *args, **options): try: verses_by_unique = collate_verses_by_unique() names_by_unique = collate_names_by_unique() verse_uniques_by_name_unique = collate_verse_uniques_by_name_unique(names_by_unique, verses_by_unique) graph_db = GraphDatabaseService("http://localhost:7474/db/data/") self.stdout.write("Clearing the old graph...") #graph_db.clear() self.stdout.write("Writing name nodes...") names = graph_db.get_or_create_index(Node, 'names') print("Created index ",names) batch = WriteBatch(graph_db) name_nodes = list() for count, name in enumerate(names_by_unique.values()): abstract = Node.abstract(**name.to_dict()) batch.create_in_index_or_fail(Node,names,'by_unique',name.unique,abstract) if count % 500 is 0: name_nodes.extend(batch.submit()) batch = WriteBatch(graph_db) name_nodes.extend(batch.submit()) verses = graph_db.get_or_create_index(Node,'verses') batch = WriteBatch(graph_db) self.stdout.write("Writing verse nodes...") for count, verse in enumerate(verses_by_unique.values()): abstract = Node.abstract(**verse.to_dict()) #batch.add_labels(abstract, 'Test') batch.create_in_index_or_fail(Node,verses,'by_unique',verse.unique, abstract) if count % 250 is 0: batch.submit() batch = WriteBatch(graph_db) batch.submit() self.stdout.write("Relating nodes...") for name_node in name_nodes: name = name_node['name'].lower() verse_uniques = verse_uniques_by_name_unique.get(name) if not verse_uniques: continue read_batch = ReadBatch(graph_db) for unique in verse_uniques: read_batch.get_indexed_nodes(verses,'by_unique',unique) verse_nodes = read_batch.submit() batch = WriteBatch(graph_db) for verse_node in verse_nodes: abstract = Relationship.abstract(verse_node[0],'REFERENCES_NAME', name_node) batch.add_indexed_relationship(abstract) batch.submit() except: traceback.print_exc()