Пример #1
0
def get_response_content(fs):
    # read the query tree
    query_tree = NewickIO.parse(fs.query, FelTree.NewickTree)
    # read the reference tree
    reference_tree = NewickIO.parse(fs.reference, FelTree.NewickTree)
    # calculate the loss using the requested loss function
    if fs.uniform:
        loss_numerator = TreeComparison.get_split_distance(
                query_tree, reference_tree)
    elif fs.weighted:
        loss_numerator = TreeComparison.get_weighted_split_distance(
                query_tree, reference_tree)
    # do the normalization if requested
    if fs.normalize:
        if fs.uniform:
            loss_denominator = float(
                    TreeComparison.get_nontrivial_split_count(reference_tree))
        elif fs.weighted:
            loss_denominator = float(
                    TreeComparison.get_weighted_split_count(reference_tree))
    else:
        loss_denominator = 1
    # return the response
    if loss_denominator:
        return str(loss_numerator / loss_denominator) + '\n'
    else:
        return 'normalization failed\n'
Пример #2
0
def get_response_content(fs):
    # read the query tree
    query_tree = NewickIO.parse(fs.query, FelTree.NewickTree)
    # read the reference tree
    reference_tree = NewickIO.parse(fs.reference, FelTree.NewickTree)
    # calculate the loss using the requested loss function
    if fs.uniform:
        loss_numerator = TreeComparison.get_split_distance(
            query_tree, reference_tree)
    elif fs.weighted:
        loss_numerator = TreeComparison.get_weighted_split_distance(
            query_tree, reference_tree)
    # do the normalization if requested
    if fs.normalize:
        if fs.uniform:
            loss_denominator = float(
                TreeComparison.get_nontrivial_split_count(reference_tree))
        elif fs.weighted:
            loss_denominator = float(
                TreeComparison.get_weighted_split_count(reference_tree))
    else:
        loss_denominator = 1
    # return the response
    if loss_denominator:
        return str(loss_numerator / loss_denominator) + '\n'
    else:
        return 'normalization failed\n'
Пример #3
0
 def run(self, distance_matrices, ordered_names):
     """
     This function stores the losses for each reconstruction.
     @param distance_matrices: a sequence of distance matrices
     @param ordered_names: order of taxa in the distance matrix
     """
     if self.start_time is not None:
         msg = 'each simulation object should be run only once'
         raise HandlingError(msg)
     if not distance_matrices:
         raise HandlingErrror('no distance matrices were provided')
     tip_name_set = set(node.name for node in self.original_tree.gen_tips())
     if tip_name_set != set(ordered_names):
         raise HandlingError('leaf name mismatch')
     self.start_time = time.time()
     # Define the reference tree and its maximum cost
     # under different loss functions.
     reference_tree = self.original_tree
     max_error_count = TreeComparison.get_nontrivial_split_count(
         reference_tree)
     max_loss_value = TreeComparison.get_weighted_split_count(
         reference_tree)
     for distance_matrix in distance_matrices:
         # create the tree builder
         tree_builder = NeighborhoodJoining.TreeBuilder(
             distance_matrix, ordered_names, self.splitter)
         # set parameters of the validating tree builder
         tree_builder.set_fallback_name(self.fallback_name)
         # build the tree
         try:
             query_tree = tree_builder.build()
         except NeighborhoodJoining.NeighborhoodJoiningError as e:
             raise HandlingError(e)
         # Note the number and weight of partition errors
         # during the reconstruction.
         error_count = TreeComparison.get_split_distance(
             query_tree, reference_tree)
         loss_value = TreeComparison.get_weighted_split_distance(
             query_tree, reference_tree)
         # make sure that the summary is internally consistent
         assert error_count <= max_error_count, (error_count,
                                                 max_error_count)
         assert loss_value <= max_loss_value, (loss_value, max_loss_value)
         # save the reconstruction characteristics to use later
         self.error_counts.append(error_count)
         self.loss_values.append(loss_value)
         self.max_error_counts.append(max_error_count)
         self.max_loss_values.append(max_loss_value)
     self.stop_time = time.time()
Пример #4
0
 def run(self, distance_matrices, ordered_names):
     """
     This function stores the losses for each reconstruction.
     @param distance_matrices: a sequence of distance matrices
     @param ordered_names: order of taxa in the distance matrix
     """
     if self.start_time is not None:
         msg = "each simulation object should be run only once"
         raise HandlingError(msg)
     if not distance_matrices:
         raise HandlingErrror("no distance matrices were provided")
     tip_name_set = set(node.name for node in self.original_tree.gen_tips())
     if tip_name_set != set(ordered_names):
         raise HandlingError("leaf name mismatch")
     self.start_time = time.time()
     # Define the reference tree and its maximum cost
     # under different loss functions.
     reference_tree = self.original_tree
     max_error_count = TreeComparison.get_nontrivial_split_count(reference_tree)
     max_loss_value = TreeComparison.get_weighted_split_count(reference_tree)
     for distance_matrix in distance_matrices:
         # create the tree builder
         tree_builder = NeighborhoodJoining.TreeBuilder(distance_matrix, ordered_names, self.splitter)
         # set parameters of the validating tree builder
         tree_builder.set_fallback_name(self.fallback_name)
         # build the tree
         try:
             query_tree = tree_builder.build()
         except NeighborhoodJoining.NeighborhoodJoiningError as e:
             raise HandlingError(e)
         # Note the number and weight of partition errors
         # during the reconstruction.
         error_count = TreeComparison.get_split_distance(query_tree, reference_tree)
         loss_value = TreeComparison.get_weighted_split_distance(query_tree, reference_tree)
         # make sure that the summary is internally consistent
         assert error_count <= max_error_count, (error_count, max_error_count)
         assert loss_value <= max_loss_value, (loss_value, max_loss_value)
         # save the reconstruction characteristics to use later
         self.error_counts.append(error_count)
         self.loss_values.append(loss_value)
         self.max_error_counts.append(max_error_count)
         self.max_loss_values.append(max_loss_value)
     self.stop_time = time.time()