def main(make_engine=True): desc = "Clean data and import into db." inputparser = argparse.ArgumentParser(desc) inputparser.add_argument("--debug", action="store_true", default=False, help="Enable debugging") inputparser.add_argument("--link_areas", action="store_true", default=False, help="Link with neighbourhood areas") args = inputparser.parse_args() if args.debug: log.setLevel(logging.DEBUG) logging.getLogger("urllib3").setLevel(logging.DEBUG) start = time.time() if make_engine: engine = db_helper.make_engine() db_helper.set_session(engine) if args.link_areas: session = db_helper.session link_areas(session, 'importer_trafficorder') else: importer = TrafficOrderImporter() importer.start_import() log.info("Total time: %s", time.time() - start)
def start_import(make_engine, endpoint): if make_engine: engine = db_helper.make_engine(section='docker') db_helper.set_session(engine) data = fetch_json(endpoint) store(data, endpoint)
async def main(workers=WORKERS, make_engine=True): # when testing we do not want an engine. if make_engine: engine = db_helper.make_engine(section="docker", environment=KILO_ENVIRONMENT_OVERRIDES) db_helper.set_session(engine) await run_workers(workers=workers)
def main(make_engine=True): desc = "Clean data and import into db." inputparser = argparse.ArgumentParser(desc) inputparser.add_argument("--link_areas", action="store_true", default=False, help="Link stations with neighbourhood areas") args = inputparser.parse_args() start = time.time() if make_engine: engine = db_helper.make_engine() db_helper.set_session(engine) session = db_helper.session if args.link_areas: link_areas(session, OvFiets.__tablename__) else: start_import() log.info("Took: %s", time.time() - start) session.close()
async def main(args): """Main.""" engine = db_helper.make_engine(section="docker") db_helper.set_session(engine) drop_views(args) drop_tables(args) LOG.warning("CREATING DEFINED TABLES") # recreate tables Base.metadata.create_all(engine) create_views(args)
def setup_module(): global transaction, connection, engine, session settings.TESTING["running"] = True db_helper.create_db() engine = db_helper.make_engine(section="test") connection = engine.connect() transaction = connection.begin() session = db_helper.set_session(engine) session.execute("CREATE EXTENSION IF NOT EXISTS postgis;") session.commit() models.Base.metadata.drop_all(bind=engine) models.Base.metadata.create_all(bind=engine)
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ connectable = db_helper.make_engine(environment=KILO_ENVIRONMENT_OVERRIDES) with connectable.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations()
async def main(args): """Main.""" engine = db_helper.make_engine(section="docker") session = db_helper.set_session(engine) if args.drop: # resets everything LOG.warning("DROPPING ALL DEFINED TABLES") for table in OVFIETS_TABLES: session.execute(f"DROP table if exists {table};") session.commit() LOG.warning("CREATING DEFINED TABLES") # recreate tables Base.metadata.create_all(engine)
def main(make_engine=True): """Start ETL. When testing we can turn of endgine creation. """ if make_engine: engine = db_helper.make_engine( section="docker", environment=KILO_ENVIRONMENT_OVERRIDES) db_helper.set_session(engine) extract_measurements() # using wfs services which are loaded in the maintenance # jenkins job. we merge enevo fill levels with datapunt afval sites. # based on distance. update_site_ids()
async def create_tables(args): """Main.""" engine = db_helper.make_engine(environment=KILO_ENVIRONMENT_OVERRIDES) session = db_helper.set_session(engine) if args.drop: # resets everything LOG.warning("DROPPING ALL DEFINED TABLES") for table in SIDCON_TABLES: session.execute(f"DROP table if exists {table};") session.commit() LOG.warning("CREATING DEFINED TABLES") # recreate tables Base.metadata.create_all(engine) db_helper.alembic_migrate(engine)
def main(make_engine=True): desc = "Clean data and import into db." inputparser = argparse.ArgumentParser(desc) inputparser.add_argument( "endpoint", type=str, default="traveltime", choices=ENDPOINTS, help="Provide Endpoint to scrape", nargs=1, ) inputparser.add_argument( "--debug", action="store_true", default=False, help="Enable debugging" ) inputparser.add_argument( "--exclude_areas", action="store_true", default=False, help="Link areas to model" ) args = inputparser.parse_args() if args.debug: log.setLevel(logging.DEBUG) start = time.time() if make_engine: engine = db_helper.make_engine() db_helper.set_session(engine) importer = ENDPOINT_IMPORTER[args.endpoint[0]]() if args.exclude_areas: importer.link_areas = False importer.start_import() log.info("Total time: %s", time.time() - start)
async def create_tables(args): """Main.""" engine = db_helper.make_engine(environment=KILO_ENVIRONMENT_OVERRIDES) session = db_helper.set_session(engine) if args.drop: # resets the target table LOG.warning("DROPPING TARGET TABLE") #for table in KILOGRAM_TABLES: session.execute(f"DROP table if exists kilogram_weigh_measurement;") session.commit() # Base.metadata.drop_all(engine) LOG.warning("CREATING DEFINED TABLES") # recreate tables Base.metadata.create_all(engine) db_helper.alembic_migrate(engine)
def main(make_engine=True): desc = "Clean data and import into db." inputparser = argparse.ArgumentParser(desc) inputparser.add_argument( "endpoint", type=str, default="parking_location", choices=ENDPOINTS, help="Provide Endpoint to scrape", nargs=1, ) inputparser.add_argument("--link_areas", action="store_true", default=False, help="Link stations with neighbourhood areas") args = inputparser.parse_args() start = time.time() if make_engine: engine = db_helper.make_engine() db_helper.set_session(engine) session = db_helper.session endpoint = args.endpoint[0] raw_model = ENDPOINT_RAW_MODEL[endpoint] clean_model = ENDPOINT_MODEL[endpoint] store_func = ENDPOINTS_STORE_FUNC[endpoint] if args.link_areas: link_areas(session, clean_model.__tablename__) else: start_import(store_func, raw_model, clean_model) log.info("Took: %s", time.time() - start) session.close()
def main(make_engine=True): desc = "Clean data and import into db." inputparser = argparse.ArgumentParser(desc) inputparser.add_argument("--debug", action="store_true", default=False, help="Enable debugging") args = inputparser.parse_args() if args.debug: log.setLevel(logging.DEBUG) start = time.time() if make_engine: engine = db_helper.make_engine() db_helper.set_session(engine) WifiInfoImporter().start_import() log.info("Total time: %s", time.time() - start)
async def main(args): """Table management.""" engine = db_helper.make_engine(environment=KILO_ENVIRONMENT_OVERRIDES, ) session = db_helper.set_session(engine) if args.dropall: LOG.warning("DROPPING ALL DEFINED TABLES") # resets everything Base.metadata.drop_all(engine) if args.drop: # resets non raw tables LOG.warning("DROPPING ALL DROP TABLES") for table in DROP_TABLES: table = table.__table__.name session.execute(f"DROP table if exists {table};") for seq in DROP_SEQUENCE: session.execute(f"DROP SEQUENCE IF EXISTS {seq};") session.commit() if args.live: LOG.warning("CREATING LIVE RELATED TABLES") meta = MetaData() meta.bind = engine meta.reflect() for table in LIVE_TABLES: table_name = table.__table__.name if table_name not in meta.tables: table.__table__.create(engine) # stop creating more tables return LOG.warning("CREATING ALL DEFINED TABLES") # recreate tables Base.metadata.create_all(engine)
def get_session(): engine = db_helper.make_engine(environment=KILO_ENVIRONMENT_OVERRIDES) session = db_helper.set_session(engine) return engine, session
inputparser = argparse.ArgumentParser(desc) inputparser.add_argument( "endpoint", type=str, default="", choices=list(OPTIONS.keys()), help="Provide Endpoint to copy", nargs=1, ) inputparser.add_argument( "--link_container_slots", action="store_true", default=False, help="Link containerslots with sites and sitecontenttypes") inputparser.add_argument("--validate_containers", action="store_true", default=False, help="Validate the customer_keys are in bammens") args = inputparser.parse_args() engine = db_helper.make_engine() session = db_helper.set_session(engine) main() session.close()
r = requests.get(url, params=params) return r.json() def store_bbga_buurten(): session.execute('''TRUNCATE TABLE buurt_counts''') session.commit() start = datetime.datetime.now().year years = [] for x in range(6): years.append(start - x) for year in years: raw_data = _get_bbga_bevolking(year) _store_raw_buurt_data(raw_data) def main(args, engine, session): reset_table(engine) store_bbga_buurten() if __name__ == "__main__": desc = "Collect Buurt Inhabitants data from BBGA" inputparser = argparse.ArgumentParser(desc) args = inputparser.parse_args() engine = db_helper.make_engine(environment=KILO_ENVIRONMENT_OVERRIDES) session = db_helper.set_session(engine) main(args, engine, session) session.close()
def setup_db(self, make_engine): if make_engine: self.engine = db_helper.make_engine(section='docker') db_helper.set_session(self.engine)