Пример #1
0
 def delete(self, *args, **kwargs):
     batch = WriteBatch(graph_db)
     node = self.get_node()
     for rel in node.get_relationships():
         batch.delete_relationship(rel)
     batch.delete_node(node)
     super(Campaign, self).delete(*args, **kwargs)
Пример #2
0
 def set_contacts(self, profiles):
     """
     *profiles* -- registration.models.Userprofile objects.
     """
     me = self.get_node()
     batch = WriteBatch(graph_db)
     for profile in profiles:
         batch.get_or_create_relationship(me, self.contact_relationship, profile.__node__)
     r = batch.submit()
     logger.warn(r)
Пример #3
0
 def set_contacts(self, profiles):
     """
     *profiles* -- registration.models.Userprofile objects.
     """
     me = self.get_node()
     batch = WriteBatch(graph_db)
     for profile in profiles:
         batch.get_or_create_relationship(me, self.contact_relationship,
                                          profile.__node__)
     r = batch.submit()
     logger.warn(r)
def create_directory_children(dirs, parent):
    '''Create the following relationship: (p:Directory)<-[:PARENT]-(d:Directory)
    where dirs is a list of strings and parent is a py2neo node.'''
    batch = WriteBatch(graph_db)
    for d in dirs:
        dir_node = batch.create({'name': d, '_id': uuid.uuid4().hex})
        batch.add_labels(dir_node, "Directory")
        batch.create(rel(dir_node, "PARENT", parent))
    batch.run()
def create_file_children(files, parent, root_path):
    '''Create (p:Directory)<-{:PARENT]-(f:File)
    for all files to the given parent. Also stores the file's contents in
    the content property.'''
    batch = WriteBatch(graph_db)
    for f in files:
        file_content = get_file_content(f, root_path)
        file_node = batch.create({'name': f, '_id': uuid.uuid4().hex,
            'content': file_content})
        batch.add_labels(file_node, 'File')
        batch.create(rel(file_node, "PARENT", parent))
    batch.run()
Пример #6
0
 def delete(self, *args, **kwargs):
     batch = WriteBatch(graph_db)
     node = self.get_node()
     for rel in node.get_relationships():
         batch.delete_relationship(rel)
     batch.delete_node(node)
     super(Campaign, self).delete(*args, **kwargs)
Пример #7
0
 def set_is_client(self, is_client):
     if not is_client:
         self.is_client = False
         self.save()
         return
     batch = WriteBatch(neomodel.core.connection())
     for rel in self.known.all():
         batch.delete(rel)
     batch.set_node_property(self.__node__, 'is_client', True)
     self.get_set_client_lock().acquire()
     try:
         batch.submit()
     finally:
         self.get_set_client_lock().release()
Пример #8
0
 def set_is_client(self, is_client):
     if not is_client:
         self.is_client = False
         self.save()
         return
     batch = WriteBatch(neomodel.core.connection())
     for rel in self.known.all():
         batch.delete(rel)
     batch.set_node_property(self.__node__, 'is_client', True)
     self.get_set_client_lock().acquire()
     try:
         batch.submit()
     finally:
         self.get_set_client_lock().release()
Пример #9
0
create_business_query = '''
// MERGE ON categories
CREATE (b:Business {id: {business_id}, name: {name}, lat:{latitude}, lon:{longitude},
	stars: {stars}, review_count: {review_count}})
'''

merge_category_query = '''
MATCH (b:Business {id: {business_id}})
MERGE (c:Category {name: {category}})
CREATE UNIQUE (c)<-[:IS_IN]-(b)
'''

print "Beginning business batch"
with open('data/yelp_academic_dataset_business.json', 'r') as f:
	business_batch = WriteBatch(db)
	count = 0
	for b in (json.loads(l) for l in f):
		business_batch.append_cypher(create_business_query, b)
		count += 1
		if count >= 10000:
			business_batch.run()
			business_batch.clear()
			count = 0
	if count > 0:
		business_batch.run()

print "Beginning category batch"
with open('data/yelp_academic_dataset_business.json', 'r') as f:
	category_batch = WriteBatch(db)
	count = 0
Пример #10
0
create_business_query = '''
// MERGE ON categories
CREATE (b:Business {id: {business_id}, name: {name}, lat:{latitude}, lon:{longitude},
	stars: {stars}, review_count: {review_count}})
'''

merge_category_query = '''
MATCH (b:Business {id: {business_id}})
MERGE (c:Category {name: {category}})
CREATE UNIQUE (c)<-[:IS_IN]-(b)
'''

print "Beginning business batch"
with open('data/yelp_academic_dataset_business.json', 'r') as f:
    business_batch = WriteBatch(db)
    count = 0
    for b in (json.loads(l) for l in f):
        business_batch.append_cypher(create_business_query, b)
        count += 1
        if count >= 10000:
            business_batch.run()
            business_batch.clear()
            count = 0
    if count > 0:
        business_batch.run()

print "Beginning category batch"
with open('data/yelp_academic_dataset_business.json', 'r') as f:
    category_batch = WriteBatch(db)
    count = 0
    def handle(self, *args, **options):
        try:
            verses_by_unique = collate_verses_by_unique()
            names_by_unique = collate_names_by_unique()
            verse_uniques_by_name_unique = collate_verse_uniques_by_name_unique(names_by_unique, verses_by_unique)
            graph_db = GraphDatabaseService("http://localhost:7474/db/data/")

            self.stdout.write("Clearing the old graph...")
            #graph_db.clear()

            self.stdout.write("Writing name nodes...")

            names = graph_db.get_or_create_index(Node, 'names')
            print("Created index ",names)
            batch = WriteBatch(graph_db)
            name_nodes = list()
            for count, name in enumerate(names_by_unique.values()):
                abstract = Node.abstract(**name.to_dict())
                batch.create_in_index_or_fail(Node,names,'by_unique',name.unique,abstract)
                if count % 500 is 0:
                    name_nodes.extend(batch.submit())
                    batch = WriteBatch(graph_db)
            name_nodes.extend(batch.submit())

            verses = graph_db.get_or_create_index(Node,'verses')
            batch = WriteBatch(graph_db)
            self.stdout.write("Writing verse nodes...")
            for count, verse in enumerate(verses_by_unique.values()):
                abstract = Node.abstract(**verse.to_dict())
                #batch.add_labels(abstract, 'Test')
                batch.create_in_index_or_fail(Node,verses,'by_unique',verse.unique, abstract)
                if count % 250 is 0:
                    batch.submit()
                    batch = WriteBatch(graph_db)
            batch.submit()

            self.stdout.write("Relating nodes...")

            for name_node in name_nodes:
                name = name_node['name'].lower()
                verse_uniques = verse_uniques_by_name_unique.get(name)
                if not verse_uniques:
                    continue
                read_batch = ReadBatch(graph_db)
                for unique in verse_uniques:
                    read_batch.get_indexed_nodes(verses,'by_unique',unique)
                verse_nodes = read_batch.submit()
                batch = WriteBatch(graph_db)
                for verse_node in verse_nodes:
                    abstract = Relationship.abstract(verse_node[0],'REFERENCES_NAME', name_node)
                    batch.add_indexed_relationship(abstract)
                batch.submit()






        except:
            traceback.print_exc()