def test_only_rows_for_parent_id(self): parent_id = 1 len_parent_rows = len(self.dframe) / 2 column = Series([parent_id] * len_parent_rows) column.name = PARENT_DATASET_ID self.dframe = self.dframe.join(column) dframe_only = rows_for_parent_id(self.dframe, parent_id) self.assertFalse(PARENT_DATASET_ID in dframe_only.columns) self.assertEqual(len(dframe_only), len_parent_rows)
def update(self, dataset, child_dataset, formula, reducible): """Attempt to reduce an update and store.""" parent_dataset_id = dataset.dataset_id # get dframe only including rows from this parent dframe = rows_for_parent_id(child_dataset.dframe( keep_parent_ids=True, reload_=True), parent_dataset_id) # remove rows in child from parent child_dataset.remove_parent_observations(parent_dataset_id) if reducible and self.__is_reducible(): dframe = self.aggregation.reduce(dframe, self.columns) else: dframe = self.updated_dframe(dataset, formula, dframe) new_a_dframe = concat([child_dataset.dframe(), dframe]) new_a_dframe = add_parent_column(new_a_dframe, parent_dataset_id) child_dataset.replace_observations(new_a_dframe) return child_dataset.dframe()
def update(self, dataset, child_dataset, formula, reducible): """Attempt to reduce an update and store.""" parent_dataset_id = dataset.dataset_id # get dframe only including rows from this parent dframe = rows_for_parent_id( child_dataset.dframe(keep_parent_ids=True, reload_=True), parent_dataset_id) # remove rows in child from parent child_dataset.remove_parent_observations(parent_dataset_id) if reducible and self.__is_reducible(): dframe = self.aggregation.reduce(dframe, self.columns) else: dframe = self.updated_dframe(dataset, formula, dframe) new_a_dframe = concat([child_dataset.dframe(), dframe]) new_a_dframe = add_parent_column(new_a_dframe, parent_dataset_id) child_dataset.replace_observations(new_a_dframe) return child_dataset.dframe()