Ejemplo n.º 1
0
    def merge(self, graph, merge_properties=None, batch_size=None):
        """
        Merge nodes from NodeSet on merge properties.

        :param merge_properties: The merge properties.
        """
        log.debug('Merge NodeSet on {}'.format(merge_properties))

        if not batch_size:
            batch_size = self.batch_size

        if not merge_properties:
            merge_properties = self.merge_keys

        log.debug('Batch Size: {}'.format(batch_size))

        query = nodes_merge_unwind(self.labels, merge_properties)
        log.debug(query)

        i = 1
        for batch in chunks(self.node_properties(), size=batch_size):
            batch = list(batch)
            log.debug('Batch {}'.format(i))
            log.debug(batch[0])

            graph.run(query, props=batch)
            i += 1
Ejemplo n.º 2
0
    def create(self, graph, batch_size=None):
        """
        Create relationships in this RelationshipSet
        """
        log.debug('Create RelationshipSet')
        if not batch_size:
            batch_size = self.batch_size
        log.debug('Batch Size: {}'.format(batch_size))

        # get query
        query = query_create_rels_unwind(self.start_node_labels, self.end_node_labels, self.start_node_properties,
                                         self.end_node_properties, self.rel_type)
        log.debug(query)

        i = 1
        # iterate over chunks of rels
        for batch in chunks(self.relationships, size=batch_size):
            batch = list(batch)
            log.debug('Batch {}'.format(i))
            log.debug(batch[0])
            # get parameters
            query_parameters = params_create_rels_unwind_from_objects(batch)
            log.debug(json.dumps(query_parameters))
            result = graph.run(query, **query_parameters)
            for r in result:
                print(r)
            i += 1
Ejemplo n.º 3
0
    def create(self, graph, batch_size=None):
        """
        Create all nodes from NodeSet.
        """
        log.debug('Create NodeSet')
        if not batch_size:
            batch_size = self.batch_size
        log.debug('Batch Size: {}'.format(batch_size))

        i = 1
        for batch in chunks(self.nodes, size=batch_size):
            batch = Subgraph(list(batch))
            log.debug('Batch {}'.format(i))

            graph.create(batch)
            i += 1
Ejemplo n.º 4
0
    def merge(self,
              graph,
              merge_properties=None,
              batch_size=None,
              raise_on_result_count_deviation=False):
        """
        Merge nodes from NodeSet on merge properties.

        :param merge_properties: The merge properties.

        :raise_on_result_count_deviation: boolean. Raise if less nodes were processed on DB side as sended with the query. This can happen in parallel processing environments. set Nodeset.failed_batch_handler(error,query,batch) to catch single failed batches
        """
        log.debug('Merge NodeSet on {}'.format(merge_properties))

        if not batch_size:
            batch_size = self.batch_size

        if not merge_properties:
            merge_properties = self.merge_keys

        log.debug('Batch Size: {}'.format(batch_size))

        query = nodes_merge_unwind(self.labels, merge_properties)
        log.debug(query)
        i = 1
        for batch in chunks(self.node_properties(), size=batch_size):
            batch = list(batch)
            log.debug('Batch {}'.format(i))
            log.debug(batch[0])
            try:
                tx = graph.begin()
                tx.run(query, props=batch)
                result = tx.run(query, props=batch)
                tx.commit()
                count = result.data()[0]["cnt"]
                if raise_on_result_count_deviation and count < len(batch):
                    raise MissingNodesEx(
                        "Excepted {} Nodes to be inserted, got {}", len(batch),
                        count)
            except Exception as e:
                if self.failed_batch_handler is not None:
                    self.failed_batch_handler(self, e, query, batch)
                else:
                    raise
            i += 1
Ejemplo n.º 5
0
    def create(self,
               graph,
               batch_size=None,
               raise_on_result_count_deviation=False):
        """
        Create all nodes from NodeSet.


        :raise_on_result_count_deviation: boolean. Raise if less nodes were processed on DB side as sended with the query. This can happen in parallel processing environments. set Nodeset.failed_batch_handler(error,query,batch) to catch single failed batches
        """
        log.debug('Create NodeSet')
        if not batch_size:
            batch_size = self.batch_size
        log.debug('Batch Size: {}'.format(batch_size))

        i = 1
        for batch in chunks(self.nodes, size=batch_size):
            batch = list(batch)
            log.debug('Batch {}'.format(i))

            query = nodes_create_unwind(self.labels)
            log.debug(query)
            try:
                tx = graph.begin()
                tx.run(query, props=batch)
                result = tx.run(query, props=batch)
                tx.commit()
                count = result.data()[0]["cnt"]
                if raise_on_result_count_deviation and count < len(batch):
                    raise MissingNodesEx(
                        "Excepted {} Nodes to be inserted, got {}", len(batch),
                        count)
            except Exception as e:
                if self.failed_batch_handler is not None:
                    self.failed_batch_handler(self, e, query, batch)
                else:
                    raise

            #with graph.session() as s:
            #    result = s.run(query, props=batch)

            i += 1
Ejemplo n.º 6
0
    def create(self, graph, batch_size=None):
        """
        Create all nodes from NodeSet.
        """
        log.debug('Create NodeSet')
        if not batch_size:
            batch_size = self.batch_size
        log.debug('Batch Size: {}'.format(batch_size))

        i = 1
        for batch in chunks(self.nodes, size=batch_size):
            batch = list(batch)
            log.debug('Batch {}'.format(i))

            query = nodes_create_unwind(self.labels)
            log.debug(query)

            result = graph.run(query, props=batch)

            i += 1
Ejemplo n.º 7
0
    def merge(self, graph, batch_size=None, raise_on_result_count_deviation=False):
        """
        Create relationships in this RelationshipSet

        :raise_on_result_count_deviation: boolean. Raise if less relationships were processed on DB side as sended with the query. This can happen in parallel processing environments. set RelationshipSet.failed_batch_handler(error,query,batch) to catch single failed batches
        """
        log.debug('Create RelationshipSet')
        if not batch_size:
            batch_size = self.batch_size
        log.debug('Batch Size: {}'.format(batch_size))

        # get query
        query = query_merge_rels_unwind(self.start_node_labels, self.end_node_labels, self.start_node_properties,
                                        self.end_node_properties, self.rel_type)
        log.debug(query)

        i = 1
        # iterate over chunks of rels
        for batch in chunks(self.relationships, size=batch_size):
            batch = list(batch)
            log.debug('Batch {}'.format(i))
            log.debug(batch[0])
            # get parameters
            query_parameters = params_create_rels_unwind_from_objects(batch)
            log.debug(json.dumps(query_parameters))
            
            try:
                tx = graph.begin()
                tx.run(query,**query_parameters)
                result = tx.run(query,**query_parameters)
                tx.commit()
                count = result.data()[0]["cnt"]
                if raise_on_result_count_deviation and count < len(batch):
                    raise MissingRelationshipsEx("Excepted {} RelationShips to be inserted, got {}", len(batch), count)
            except Exception as e:
                if self.failed_batch_handler is not None:
                    self.failed_batch_handler(self,e, query, batch)
                else:
                    raise
            i += 1