def prepare(self): # Denormalization # STEP 1: Reconstruct database and workload based on the given design LOG.info("Denormalizing database") d = DBDenormalizer(self.metadata_db, self.new_meta, self.ori_db, self.new_db, self.design) d.process() # STEP 2: Put indexs on the dataset_db based on the given design self.setIndexes(self.new_db, self.design) self.setupShardKeys()
from design_deserializer import Deserializer from design import Design def test_combine_deletes(combiner, operations): return combiner.combineDeletes(operations) if __name__=="__main__": design_path = r"/home/ruiz1/mongodb-d4/exps/tpcc_design" print design_path deserializer = Deserializer() deserializer.loadDesignFile(design_path) design = Design() design.data = deserializer.json_doc print design.data dm = DBDenormalizer(None, None, None, None, design) graph = dm.constructGraph() dm.metadata_db = pymongo.Connection('localhost:27017')['tpcc_meta'] parent_keys = dm.readSchema('schema') combiner = DBCombiner(None, design, graph, parent_keys) operations = [] for i in range(5): op = dict() op['query_content'] = [] op['query_fields'] = None op['collection'] = 'order_line' op['query_content'].append({'ol_o_id':i,'ol_id':i+1}) op['predicates'] = {'ol_o_id':'eq','ol_id':'eq'} operations.append(op)