def test_bulk_dump(self): """Test Xco2.bulk_dump()""" from src.formatdata import bulk_dump print('#### TEST5 ####') session2 = dbProxy.create_session(db='test', engine=self.engine) util_truncate_table(session2, [Xco2, Areas]) bulk_dump(create_generator_from_dataset(self.dataset, 8)) rows = self.session.query(Xco2).count() try: self.assertEqual(rows, 8) print('PASSED') except AssertionError: print('FAILED')
def test_bulk_dump(self): """Test Xco2.bulk_dump()""" from src.formatdata import bulk_dump print('#### TEST5 ####') session2 = dbProxy.create_session(db='test', engine=self.engine) util_truncate_table(session2, [Xco2, Areas]) bulk_dump( create_generator_from_dataset(self.dataset, 8) ) rows = self.session.query(Xco2).count() try: self.assertEqual(rows, 8) print('PASSED') except AssertionError: print('FAILED')
def main(full=False): paths = return_files_paths() print(paths, len(paths)) # check the full flag if not full: l = randint(0, len(paths) - 1) # try the first thousand rows of one random file dataset = [return_dataset(paths[l])] luke = (create_generator_from_dataset(d, 1000) for d in dataset) else: # dump all the files dataset = [] dataset += [return_dataset(p) for p in paths] luke = (create_generator_from_dataset(d) for d in dataset) #print(luke, ) # Luke is a >> generator of generators << # Feel the Force print('DUMPING...') from src.formatdata import bulk_dump i = 0 while True: try: _, n = bulk_dump(next(luke)) i += n except StopIteration: print('>>> {} Xco2 data dumped <<<'.format(i)) break except KeyboardInterrupt: break sys.exit(0)