Пример #1
0
def init():
    db = SessionLocal()

    status = crud.status.get(db)
    if not status:
        crud.status.create(db, obj_in={'id': 0, 'status': False})

    settings = crud.settings.get(db)
    if not settings:
        crud.settings.create(db, obj_in={'id': 0, 'smart_off': True})

    bri_curve = crud.curve.get_default_by_kind(db, kind='bri')
    if not bri_curve:
        bri_curve = models.Curve(name='Default', kind='bri', default=True)
        models.Point(x=0, y=245, first=True, curve=bri_curve)
        models.Point(x=360, y=216, curve=bri_curve)
        models.Point(x=660, y=182, curve=bri_curve)
        models.Point(x=900, y=221, curve=bri_curve)
        models.Point(x=1080, y=27, curve=bri_curve)
        models.Point(x=1440, y=12, last=True, curve=bri_curve)
        db.add(bri_curve)

    ct_curve = crud.curve.get_default_by_kind(db, kind='ct')
    if not ct_curve:
        ct_curve = models.Curve(name='Default', kind='ct', default=True)
        models.Point(x=0, y=153, first=True, curve=ct_curve)
        models.Point(x=420, y=324, curve=ct_curve)
        models.Point(x=900, y=347, curve=ct_curve)
        models.Point(x=1080, y=475, curve=ct_curve)
        models.Point(x=1440, y=500, last=True, curve=ct_curve)
        db.add(ct_curve)

    db.commit()
    db.close()
Пример #2
0
def create_default_data():
	hpassword = bcrypt.hashpw(bytes("123", 'utf-8'), bcrypt.gensalt())
	db = SessionLocal()
	objects = [
	    models.User(username="******", fullname="Archibald Weng", password=hpassword),
	    models.User(username="******", fullname="Boris Tien", password=hpassword),
	    models.User(username="******", fullname="Larry Chuang", password=hpassword),
	    models.User(username="******", fullname="Thomas Lin", password=hpassword),
	    models.User(username="******", fullname="Dora Yen", password=hpassword),
	    models.User(username="******", fullname="Wimy Kuo", password=hpassword),
	    models.User(username="******", fullname="Kathy Wu", password=hpassword),
	    
	    models.Task(taskname="TIIP", fullname="TIIP-全方位減碳系統服務平台開發計畫", organization="永智"),
	    models.Task(taskname="108高雄溫管", fullname="108年高雄市溫室氣體管理計畫", organization="臺中市政府環境保護局"),
	    models.Task(taskname="108台中溫減", fullname="108年台中市溫室氣體減量計畫", organization="臺中市政府環境保護局"),
	    models.Task(taskname="109台中溫減", fullname="109年台中市溫室氣體減量計畫", organization="臺中市政府環境保護局"),
	    models.Task(taskname="台電-風力", fullname="台灣電力公司風力發電計畫(一)抵換專案", organization="台灣電力股份有限公司環境保護處"),
	    models.Task(taskname="台電-太陽光電", fullname="台灣電力公司台中及興達太陽光電計畫", organization="台灣電力股份有限公司環境保護處"),
	    models.Task(taskname="108台電-碳限制計畫", fullname="台電公司碳限制管理及對策研析計畫", organization="台灣電力股份有限公司環境保護處"),
	    models.Task(taskname="尚承-盤查", fullname="尚承鋼鐵股份有限公司108年度溫室氣體盤查", organization="尚承鋼鐵股份有限公司"),
	    models.Task(taskname="尚承-抵換", fullname="尚承鋼鐵加熱爐能源效率提升及天然氣替代重油溫室氣體減量抵換專案", organization="尚承鋼鐵股份有限公司"),
	    models.Task(taskname="109低碳辦", fullname="109年臺中市低碳城市推動計畫", organization="臺中市政府環境保護局"),
	    models.Task(taskname="106CORSIA", fullname="推動我國國際航空業碳抵換及減量計畫第一期計畫", organization="交通部民用航空局"),
	    models.Task(taskname="109CORSIA", fullname="推動我國國際航空業碳抵換及減量計畫第二期計畫", organization="交通部民用航空局"),

	    models.Task(taskname="瑞振工業-盤查及SBTi專案", fullname="瑞振工業溫室氣體管理專案", organization="瑞振工業公司"),
	    models.Task(taskname="108台電-碳限制計畫", fullname="台電公司碳限制管理及對策研析計畫", organization="台灣電力股份有限公司環境保護處"),

	]
	db.bulk_save_objects(objects)
	db.commit()
	db.close()
Пример #3
0
def save_blog(blog: schemas.Blog):
    db = SessionLocal()
    db_record = models.Blog(
        date=datetime.datetime.now().strftime("%x"),
        title=blog.title,
        description=blog.description,
        category=blog.category,
        body=blog.body,
    )
    db.add(db_record)
    db.commit()
    return blog
Пример #4
0
def reset_smart_off():
    db = SessionLocal()
    api = get_api(db)
    lights = crud.light.get_multi(db)
    for light in lights:
        try:
            crud.light.reset_smart_off(db, api, light=light)
        except HTTPException as e:
            log.error(e.detail)
    db.commit()
    log.info('Reset smart off for %s lights', len(lights))
    db.close()
Пример #5
0
def fetch_stock_data(id: int):
    db = SessionLocal()
    stock = db.query(Stock).filter(Stock.id == id).first()

    yahoo_data = yfinance.Ticker(stock.symbol)

    stock.ma200 = yahoo_data.info['twoHundredDayAverage']
    stock.ma50 = yahoo_data.info['fiftyDayAverage']
    stock.price = yahoo_data.info['previousClose']
    stock.forward_pe = yahoo_data.info['forwardPE']
    stock.forward_eps = yahoo_data.info['forwardEps']
    if yahoo_data.info['dividendYield'] is not None:
        stock.dividend_yield = yahoo_data.info['dividendYield'] * 100

    db.add(stock)
    db.commit()
def test_get_prescriptions(client):
    prescription = schemas.PrescriptionCreate(clinic=1,
                                              physician=1,
                                              patient=1,
                                              text='Dipirona 1x ao dia')
    db = SessionLocal()
    db_prescription = crud.create_prescription(db=db,
                                               prescription=prescription)
    db.commit()

    response = client.get('/prescriptions/')
    assert response.status_code == 200
    assert db_prescription.id == response.json()[0]['id']

    count_prescriptions = db.query(models.Prescription.id).count()
    assert count_prescriptions == 1

    db.query(models.Prescription).delete()
    db.commit()
    db.close()
Пример #7
0
async def user_signup(user: UserCreate, session:SessionLocal=Depends(get_db)):
    '''
        Create a new user account.
    '''
    try:
        new_user = create_user(user=user, session=session)
    except ValueError as e:
        # Email or username already exists
        raise HTTPException(status_code=400, detail=e.args[0])
    
    if new_user:
        try:
            session.add(new_user)
            session.commit()
            session.refresh(new_user)
        except Exception as e:
            # Something else went wrong while saving user data
            raise HTTPException(status_code=400, detail=e.args[0])
    
    return new_user
Пример #8
0
    async def handle(self, topic: str, payload: Dict):
        db = SessionLocal()

        try:
            parts = topic.split("/")

            if len(parts) < 2 or parts[0] != "moca":
                return

            # moca/...

            if parts[1] == "via" and len(parts) >= 5:
                # data from a service (e.g. moca-service-telegram)
                # example: moca/via/telegram/4/contacts --> add or update contact(s)

                service: str = parts[2]
                connector_id: int = int(parts[3])
                command: str = parts[4]

                connector = crud.get_connector_by_connector_id(
                    db, service, connector_id)

                if not connector:
                    return

                if command == "contacts":
                    for contact_data in payload:
                        internal_contact_id = contact_data.get("contact_id")

                        contact = models.Contact(
                            contact_id=crud.get_id(connector.connector_id,
                                                   internal_contact_id),
                            internal_id=internal_contact_id,
                            service_id=service,
                            name=contact_data.get("name"),
                            username=contact_data.get("username"),
                            phone=contact_data.get("phone"),
                            avatar=None,
                            connector_id=connector.connector_id,
                        )
                        db.merge(contact)
                        db.commit()

                elif command == "chats":
                    for chat_data in payload:
                        internal_chat_id = chat_data.get("chat_id")

                        chat = models.Chat(
                            chat_id=crud.get_id(connector.connector_id,
                                                internal_chat_id),
                            connector_id=connector.connector_id,
                            internal_id=internal_chat_id,
                            name=chat_data.get("name"),
                            is_muted=False,
                            is_archived=False,
                        )

                        db.merge(chat)
                        db.commit()

                        chat_id = chat.chat_id

                        last_message = chat_data.get("last_message")

                        if last_message:

                            # Get contact of the sender, else ask for it
                            internal_contact_id = last_message.get(
                                "contact_id")

                            # check if contact exists, else request contact
                            maybe_contact = crud.get_contact_from_connector(
                                db,
                                connector.user_id,
                                connector.connector_id,
                                internal_contact_id,
                            )

                            if not maybe_contact:
                                contact = await self.get_contact(
                                    connector.connector_type,
                                    connector.connector_id,
                                    internal_contact_id,
                                )
                                new_contact = models.Contact(
                                    contact_id=crud.get_id(
                                        connector.connector_id,
                                        internal_contact_id),
                                    internal_id=internal_contact_id,
                                    service_id=connector.connector_type,
                                    connector_id=connector.connector_id,
                                    name=contact.get("name"),
                                    username=contact.get("username"),
                                    phone=contact.get("phone"),
                                    avatar=contact.get("avatar"),
                                    is_self=False,
                                )
                                db.merge(new_contact)
                                db.commit()

                                contact_id = new_contact.contact_id
                            else:
                                contact_id = maybe_contact.contact_id

                            # TODO: Reimplement last message
                            # new_last_message = models.Message(
                            #     message_id=last_message.get("message_id"),
                            #     contact_id=contact_id,
                            #     internal_id=chat_id,
                            #     message=json.dumps(last_message.get("message")),
                            #     sent_datetime=datetime.fromisoformat(last_message.get("sent_datetime"))
                            # )

                            # db.merge(new_last_message)
                            # db.commit()

                        participants = chat_data.get("participants")
                        if participants:
                            for participant in participants:
                                c = crud.get_contact_from_connector(
                                    db,
                                    connector.user_id,
                                    connector.connector_id,
                                    participant,
                                )
                                if not c:
                                    contact = await self.get_contact(
                                        connector.connector_type,
                                        connector.connector_id,
                                        participant,
                                    )
                                    c = models.Contact(
                                        contact_id=crud.get_id(
                                            connector.connector_id,
                                            participant),
                                        internal_id=participant,
                                        service_id=connector.connector_type,
                                        connector_id=connector.connector_id,
                                        name=contact.get("name"),
                                        username=contact.get("username"),
                                        phone=contact.get("phone"),
                                        avatar=contact.get("avatar"),
                                        is_self=False,
                                    )
                                    db.merge(c)
                                    db.commit()

                                db.merge(
                                    models.ContactsChatsRelationship(
                                        contact_id=c.contact_id,
                                        chat_id=chat_id))
                                try:
                                    db.commit()
                                except sqlalchemy.exc.IntegrityError:
                                    db.rollback()

                elif command == "messages":
                    for message_data in payload:

                        # Get contact of the sender, else ask for it
                        internal_contact_id = message_data.get("contact_id")

                        # check if contact exists, else request contact
                        c = crud.get_contact_from_connector(
                            db,
                            connector.user_id,
                            connector.connector_id,
                            internal_contact_id,
                        )
                        if not c:
                            contact = await self.get_contact(
                                connector.connector_type,
                                connector.connector_id,
                                internal_contact_id,
                            )
                            c = models.Contact(
                                contact_id=crud.get_id(connector.connector_id,
                                                       internal_contact_id),
                                internal_id=internal_contact_id,
                                service_id=connector.connector_type,
                                connector_id=connector.connector_id,
                                name=contact.get("name"),
                                username=contact.get("username"),
                                phone=contact.get("phone"),
                                avatar=contact.get("avatar"),
                                is_self=False,
                            )
                            db.merge(c)
                            db.commit()
                        else:
                            pass

                        chat = (db.query(models.Chat).filter(
                            models.Chat.connector_id == connector_id,
                            models.Chat.internal_id == message_data.get(
                                "chat_id"),
                        ).first())

                        if not chat:
                            chat = models.Chat(
                                chat_id=crud.get_id(
                                    connector.connector_id,
                                    message_data.get("chat_id")),
                                connector_id=connector_id,
                                internal_id=message_data.get("chat_id"),
                                name="Loading...",
                                is_muted=False,
                                is_archived=False,
                            )

                            db.merge(chat)
                            db.commit()

                        new_last_message = models.Message(
                            message_id=crud.get_message_id(
                                connector.connector_id,
                                message_data.get("message_id"), chat.chat_id),
                            internal_id=message_data.get("message_id"),
                            contact_id=crud.get_id(connector.connector_id,
                                                   internal_contact_id),
                            chat_id=chat.chat_id,
                            message=json.dumps(message_data.get("message")),
                            sent_datetime=datetime.fromisoformat(
                                message_data.get("sent_datetime").split("Z")
                                [0]),
                        )

                        db.merge(new_last_message)
                        db.commit()

        finally:
            db.close()
Пример #9
0
import csv
import datetime

from app import models
from app.database import SessionLocal, engine

db = SessionLocal()

models.Base.metadata.create_all(bind=engine)

# import fresh data only if the table is empty
if db.query(models.Record).count() == 0:

    with open("sars_2003_complete_dataset_clean.csv", "r") as f:
        csv_reader = csv.DictReader(f)

        for row in csv_reader:
            db_record = models.Record(
                date=datetime.datetime.strptime(row["date"], "%Y-%m-%d"),
                country=row["country"],
                cases=row["cases"],
                deaths=row["deaths"],
                recoveries=row["recoveries"],
            )
            db.add(db_record)
            db.commit()

db.close()
Пример #10
0
def clear_prescription_table():
    db = SessionLocal()
    db.query(models.Prescription).delete()
    db.commit()
    db.close()
Пример #11
0
class DB:

    session = None

    def __init__(self):

        models.Base.metadata.create_all(bind=engine)
        self.session = SessionLocal()
        self.date_today = date.today().isoformat()

        self.sources = json.load(
            open(os.path.dirname(__file__) + "/../rki/sources.json", "r"))

        self.data_dir = "downloads"

        # create logger
        self.logger = logging.getLogger("create_db")
        self.logger.setLevel(logging.DEBUG)
        # create formatter
        formatter = logging.Formatter(
            "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
        # create console handler and set level to debug
        ch = logging.StreamHandler()
        ch.setLevel(logging.DEBUG)
        # add formatter to ch
        ch.setFormatter(formatter)
        # add ch to logger
        self.logger.addHandler(ch)
        # create file handler and set level to debug
        fh = logging.FileHandler("create_db.log")
        fh.setLevel(logging.DEBUG)
        # add formatter to ch
        fh.setFormatter(formatter)
        # add ch to logger
        self.logger.addHandler(fh)

    """

    def bundeslaender_data_update_from_csv(self):

        # bundeslaender = self.session.query(models.Bundesland).all()

        bundesland_data = (
            self.session.query(models.Bundesland_Daten_Taeglich)
            .order_by(models.Bundesland_Daten_Taeglich.Aktualisierung.desc())
            .first()
        )

        last_aktualisierung_timestamp = int(bundesland_data.Aktualisierung)
        last_aktualisierung_date = datetime.fromtimestamp(last_aktualisierung_timestamp)
        next_day = last_aktualisierung_date + timedelta(days=1)
        while True:
            date = next_day.date().isoformat()
            file = self.data_dir + "/" + date + "_RKI_Corona_Bundeslaender.csv.gz"
            if os.path.isfile(file):
                self.insert_bundeslaender_data_from_csv(date)
            else:
                self.logger.info("file does not exist " + file + ". stopping update")
                break
            next_day += timedelta(days=1)
    """

    def insert_bundeslaender_data_from_csv(self, date: str, filepath: str):

        # filepath = self.data_dir + "/" + date + "_RKI_Corona_Bundeslaender.csv.gz"

        if not os.path.isfile(filepath):
            self.logger.warning("Cound not find file " + filepath)
            return
        self.logger.info("reading " + filepath)
        rows = []

        rows = read_data_from_csv(
            filepath=filepath,
            expected_header_line=
            "OBJECTID_1,LAN_ew_AGS,LAN_ew_GEN,LAN_ew_BEZ,LAN_ew_EWZ,OBJECTID,Fallzahl,Aktualisierung,AGS_TXT,GlobalID,faelle_100000_EW,Death,cases7_bl_per_100k,cases7_bl,death7_bl,cases7_bl_per_100k_txt,AdmUnitId,SHAPE_Length,SHAPE_Area",
        )

        header = rows[0]
        for i in range(1, len(rows)):
            row = rows[i]

            # if len(row) < 10:
            #    continue

            bundesland_ID = row[header.index("OBJECTID_1")]

            # get bundesland from db or create it
            bundesland = (self.session.query(
                models.Bundesland).filter_by(ID=bundesland_ID).one_or_none())

            if bundesland is None:
                bundesland = models.Bundesland(
                    ID=bundesland_ID,
                    LAN_ew_GEN=row[header.index("LAN_ew_GEN")],
                    LAN_ew_BEZ=row[header.index("LAN_ew_BEZ")],
                    LAN_ew_EWZ=row[header.index("LAN_ew_EWZ")],
                )
                self.session.add(bundesland)

            aktualisierung_datetime = datetime.strptime(
                row[header.index("Aktualisierung")], "%Y/%m/%d %H:%M:%S+00")
            aktualisierung_timestamp = int(
                datetime.timestamp(aktualisierung_datetime))

            # check if current data is already in db by checking the timestamp
            bundesland_daten_taeglich = (self.session.query(
                models.Bundesland_Daten_Taeglich).filter(
                    (models.Bundesland_Daten_Taeglich.bundesland_id
                     == bundesland_ID),
                    (models.Bundesland_Daten_Taeglich.Aktualisierung
                     == aktualisierung_timestamp),
                ).one_or_none())

            if bundesland_daten_taeglich is None:
                self.logger.info("Addingd data for " + bundesland.LAN_ew_GEN)
                bundesland_daten_taeglich = models.Bundesland_Daten_Taeglich(
                    Fallzahl=row[header.index("Fallzahl")],
                    Aktualisierung=aktualisierung_timestamp,
                    faelle_100000_EW=row[header.index("faelle_100000_EW")],
                    Death=row[header.index("Death")],
                    cases7_bl_per_100k=row[header.index("cases7_bl_per_100k")],
                    cases7_bl=row[header.index("cases7_bl")],
                    death7_bl=row[header.index("death7_bl")],
                )

                try:

                    bundesland.taegliche_daten.append(
                        bundesland_daten_taeglich)
                except Exception as e:
                    print(e)
                    print(row)
                    self.logger.error("Error :" + str(e) +
                                      " while trying to process line " + row +
                                      "with data " + json.dumps(e))
            else:
                self.logger.warning("Data for " + bundesland.LAN_ew_GEN +
                                    " for date " + date + " already in db")
        Inserted_csv_File = models.Inserted_csv_File(
            data_type="Bundesland",
            date=date,
            md5sum=md5(filepath),
            file_path=filepath,
            date_file_processed=datetime.now().isoformat(),
        )
        self.session.add(Inserted_csv_File)
        self.session.commit()

    """
    def landkreise_data_update_from_csv(filepath:str):

        landkreise_data = (
            self.session.query(models.Landkreis_Daten_Taeglich)
            .order_by(models.Landkreis_Daten_Taeglich.last_update.desc())
            .first()
        )

        last_update_timestamp = int(landkreise_data.last_update)
        last_update_date = datetime.fromtimestamp(last_update_timestamp)
        next_day = last_update_date + timedelta(days=1)
        while True:
            date = next_day.date().isoformat()
            file = self.data_dir + "/" + date + "_RKI_Corona_Landkreise.csv.gz"
            if os.path.isfile(file):
                self.insert_landkreise_data_from_csv(date)
            else:
                self.logger.info("file does not exist " + file + ". stopping update")
                break
            next_day += timedelta(days=1)
    """

    def insert_landkreise_data_from_csv(self, date: str, filepath: str):

        # filepath = self.data_dir + "/" + date + "_RKI_Corona_Landkreise.csv.gz"
        if not os.path.isfile(filepath):
            self.logger.warning("Cound not find file " + filepath)
            return
        self.logger.info("reading " + filepath)
        rows = []

        rows = read_data_from_csv(
            filepath=filepath,
            expected_header_line=
            "OBJECTID,ADE,GF,BSG,RS,AGS,SDV_RS,GEN,BEZ,IBZ,BEM,NBD,SN_L,SN_R,SN_K,SN_V1,SN_V2,SN_G,FK_S3,NUTS,RS_0,AGS_0,WSK,EWZ,KFL,DEBKG_ID,death_rate,cases,deaths,cases_per_100k,cases_per_population,BL,BL_ID,county,last_update,cases7_per_100k,recovered,EWZ_BL,cases7_bl_per_100k,cases7_bl,death7_bl,cases7_lk,death7_lk,cases7_per_100k_txt,AdmUnitId,SHAPE_Length,SHAPE_Area",
        )

        header = rows[0]
        for i in range(1, len(rows)):
            row = rows[i]
            landkreis_ID = row[header.index("OBJECTID")]

            # get landkreis from db or create it
            landkreis = (self.session.query(
                models.Landkreis).filter_by(ID=landkreis_ID).one_or_none())

            if landkreis is None:
                landkreis = models.Landkreis(
                    ID=landkreis_ID,
                    RS=row[header.index("RS")],
                    AGS=row[header.index("AGS")],
                    GEN=row[header.index("GEN")],
                    BEZ=row[header.index("BEZ")],
                    EWZ=row[header.index("EWZ")],
                    BL_ID=row[header.index("BL_ID")],
                )
                self.session.add(landkreis)
            # else:
            #    print("TODO: update it if necessary !")
            # check if current data is already in db by checking the timestamp

            last_update_datetime = datetime.strptime(
                row[header.index("last_update")], "%d.%m.%Y, %H:%M Uhr")
            last_update_timestamp = int(
                datetime.timestamp(last_update_datetime))

            # check if current data is already in db by checking the timestamp
            landkreis_daten_taeglich = (self.session.query(
                models.Landkreis_Daten_Taeglich).filter(
                    (models.Landkreis_Daten_Taeglich.landkreis_id
                     == landkreis_ID),
                    (models.Landkreis_Daten_Taeglich.last_update
                     == last_update_timestamp),
                ).one_or_none())

            if landkreis_daten_taeglich is None:
                self.logger.info("Adding data for " + landkreis.GEN)
                landkreis_daten_taeglich = models.Landkreis_Daten_Taeglich(
                    death_rate=row[header.index("death_rate")],
                    cases=row[header.index("cases")],
                    deaths=row[header.index("deaths")],
                    cases_per_100k=row[header.index("cases_per_100k")],
                    cases_per_population=row[header.index(
                        "cases_per_population")],
                    county=row[header.index("county")],
                    last_update=last_update_timestamp,
                    cases7_per_100k=row[header.index("cases7_per_100k")],
                    cases7_lk=row[header.index("cases7_lk")],
                    death7_lk=row[header.index("death7_lk")],
                    landkreis_id=landkreis_ID,
                )

                try:

                    self.session.add(landkreis_daten_taeglich)
                except Exception as e:
                    print(e)
                    print(row)
                    self.logger.error("Error :" + str(e) +
                                      " while trying to process line " + row +
                                      "with data " + json.dumps(e))
            else:
                self.logger.warning("Data for " + landkreis.GEN +
                                    " for date " + date + " already in db")

        Inserted_csv_File = models.Inserted_csv_File(
            data_type="Landkreis",
            date=date,
            md5sum=md5(filepath),
            file_path=filepath,
            date_file_processed=datetime.now().isoformat(),
        )
        self.session.add(Inserted_csv_File)
        self.session.commit()

    def insert_or_update_faelle_data_from_csv(self, date: str, filepath: str):
        """
        Achtung: diese funktion nur auf leere Tabellen für Altersgruppen, Faelle anwenden !
        """

        # filepath = self.data_dir + "/" + date + "_RKI_COVID19.csv.gz"
        if not os.path.isfile(filepath):
            self.logger.warning("Cound not find file " + filepath)
            return
        rows = []
        self.logger.info("reading " + filepath)
        rows = read_data_from_csv(
            filepath=filepath,
            expected_header_line=
            "ObjectId,IdBundesland,Bundesland,Landkreis,Altersgruppe,Geschlecht,AnzahlFall,AnzahlTodesfall,Meldedatum,IdLandkreis,Datenstand,NeuerFall,NeuerTodesfall,Refdatum,NeuGenesen,AnzahlGenesen,IstErkrankungsbeginn,Altersgruppe2",
        )

        header = rows[0]

        # Alle einzigartigen Altersgruppen finden
        altersgruppen_set = set()
        ag_index = header.index("Altersgruppe")
        for i in range(1, len(rows)):
            row = rows[i]
            altersgruppe_string = row[ag_index]
            altersgruppen_set.add(altersgruppe_string)

        # alle Altersgruppen einfügen
        altersgruppe_per_name = {}
        for ag_name in altersgruppen_set:
            altersgruppe = models.Altersgruppe(name=ag_name)
            self.session.add(altersgruppe)
            altersgruppe_per_name[ag_name] = altersgruppe
        self.session.commit()

        # alle Landkreise laden

        landkreise = self.session.query(models.Landkreis).all()
        landkreise_per_RS_ID = {}
        for lk in landkreise:
            landkreise_per_RS_ID[int(lk.RS)] = lk

        # alle Bundesländer  laden
        bundeslaender = self.session.query(models.Bundesland).all()
        bundeslaender_per_ID = {}
        for bl in bundeslaender:
            bundeslaender_per_ID[bl.ID] = bl

        objectId_index = header.index("ObjectId")
        IdBundesland = header.index("IdBundesland")

        altersgruppe_index = header.index("Altersgruppe")
        geschlecht_index = header.index("Geschlecht")
        anzahlFall_index = header.index("AnzahlFall")
        anzahlTodesFall_index = header.index("AnzahlTodesfall")
        meldeDatum_index = header.index("Meldedatum")
        idLandkreis_index = header.index("IdLandkreis")
        datenStand_index = header.index("Datenstand")
        neuerFall_index = header.index("NeuerFall")
        neuerTodesFall_index = header.index("NeuerTodesfall")
        refDatum_index = header.index("Refdatum")
        neuGenesen_index = header.index("NeuGenesen")
        anzahlGenesen_index = header.index("AnzahlGenesen")
        istErkrankungsbeginn_index = header.index("IstErkrankungsbeginn")
        altersgruppe2_index = header.index("Altersgruppe2")

        counter = 0
        self.logger.debug("getting all IDs")
        db_rows = (
            self.session.query(models.Fall_Daten_Taeglich)
            # .options(load_only("ID"))
            .all())
        # all_IDs = []
        db_rows_per_ID = {}
        for row in db_rows:
            # all_IDs.append(int(row.ID))
            db_rows_per_ID[int(row.ID)] = row
        # print(type(all_IDs[0]))

        self.logger.debug("looping all rows ")
        for i in range(1, len(rows)):
            counter += 1
            row = rows[i]

            ID = row[objectId_index]

            altersgruppe_string = row[altersgruppe_index]

            meldeDatum_datetime = datetime.strptime(
                row[header.index("Meldedatum")], "%Y/%m/%d %H:%M:%S+00")
            meldeDatum_timestamp = int(datetime.timestamp(meldeDatum_datetime))

            datenStand_datetime = datetime.strptime(
                row[header.index("Datenstand")], "%d.%m.%Y, %H:%M Uhr")
            datenStand_timestamp = int(datetime.timestamp(datenStand_datetime))

            refDatum_datetime = datetime.strptime(
                row[header.index("Refdatum")], "%Y/%m/%d %H:%M:%S+00")
            refDatum_timestamp = int(datetime.timestamp(refDatum_datetime))

            ID_Landkreis = row[idLandkreis_index]
            ID_Bundesland = row[IdBundesland]
            """
            fall_daten_taeglich = (
                self.session.query(models.Fall_Daten_Taeglich)
                .filter_by(ID=ID)
                .one_or_none()
            ) """

            if int(ID) not in db_rows_per_ID:
                # self.logger.debug("new ID " + str(ID))

                # if fall_daten_taeglich is None:
                fall_daten_taeglich = models.Fall_Daten_Taeglich(
                    geschlecht=row[geschlecht_index],
                    anzahlFall=row[anzahlFall_index],
                    anzahlTodesFall=row[anzahlTodesFall_index],
                    meldeDatum=meldeDatum_datetime,
                    datenStand=datenStand_timestamp,
                    neuerFall=row[neuerFall_index],
                    neuerTodesFall=row[neuerTodesFall_index],
                    refDatum=refDatum_timestamp,
                    neuGenesen=row[neuGenesen_index],
                    anzahlGenesen=row[anzahlGenesen_index],
                    istErkrankungsbeginn=bool(
                        int(row[istErkrankungsbeginn_index])),
                    altersgruppe2=row[altersgruppe2_index],
                )
                fall_daten_taeglich.altersgruppe = altersgruppe_per_name[
                    altersgruppe_string]

                fall_daten_taeglich.landkreis = landkreise_per_RS_ID[int(
                    ID_Landkreis)]
                fall_daten_taeglich.bundesland = bundeslaender_per_ID[int(
                    ID_Bundesland)]
                self.session.add(fall_daten_taeglich)
            else:
                self.logger.debug("updating ID " + str(ID))
                data_to_update = {}
                data_to_update["geschlecht"] = row[geschlecht_index]
                data_to_update["anzahlFall"] = row[anzahlFall_index]
                data_to_update["anzahlTodesFall"] = row[anzahlTodesFall_index]
                data_to_update["meldeDatum"] = meldeDatum_datetime
                data_to_update["neuerFall"] = row[neuerFall_index]
                data_to_update["neuerTodesFall"] = row[neuerTodesFall_index]
                data_to_update["refDatum"] = refDatum_timestamp
                data_to_update["neuGenesen"] = row[neuGenesen_index]
                data_to_update["anzahlGenesen"] = row[anzahlGenesen_index]
                data_to_update["istErkrankungsbeginn"] = bool(
                    int(row[istErkrankungsbeginn_index]))
                data_to_update["altersgruppe2"] = row[altersgruppe2_index]

                fall_daten_taeglich = db_rows_per_ID[int(ID)]
                for key in data_to_update:
                    value = str(data_to_update[key])
                    old_value = str(getattr(fall_daten_taeglich, key))
                    if value != old_value:
                        self.logger.info("updated value for key " + key + "=" +
                                         str(value) + " in row " + str(i + 1) +
                                         " it was " + str(old_value))

                        setattr(fall_daten_taeglich, key, value)

            if counter > 50000:

                percent = round((i + 1) / len(rows) * 100, 1)
                self.logger.info("adding/updating Faelle, " + str(percent) +
                                 "% done")
                self.session.commit()
                counter = 0
            # free memory
            rows[i] = None
        self.session.commit()
        Inserted_csv_File = models.Inserted_csv_File(
            data_type="Fall",
            date=date,
            md5sum=md5(filepath),
            file_path=filepath,
            date_file_processed=datetime.now().isoformat(),
        )
        self.session.add(Inserted_csv_File)
        self.session.commit()

    def _clear_db(self):
        print("clearing")
        self.session.close()
        print(os.path.dirname(__file__) + "/../database.db")
        os.remove(os.path.dirname(__file__) + "/../database.db")
        # print(models.Base.metadata.tables.values())
        # models.Base.metadata.drop_all(bind=engine)
        models.Base.metadata.create_all(bind=engine)
        self.session = SessionLocal()

    def create(self, date):

        from sqlalchemy.engine import Engine
        from sqlalchemy import event

        @event.listens_for(Engine, "connect")
        def set_sqlite_pragma(dbapi_connection, connection_record):
            # print("event")
            cursor = dbapi_connection.cursor()
            cursor.execute("PRAGMA journal_mode=OFF")
            # cursor.execute("PRAGMA cache_size = 100000")
            cursor.execute("PRAGMA cache_size = -20000")
            cursor.execute("PRAGMA SYNCHRONOUS = OFF")
            cursor.execute("PRAGMA LOCKING_MODE = EXCLUSIVE")
            cursor.close()

        self._clear_db()
        self.insert_bundeslaender_data_from_csv(
            date=date,
            filepath=self.data_dir + "/" + date +
            "_RKI_Corona_Bundeslaender.csv.gz",
        )
        self.insert_landkreise_data_from_csv(
            date=date,
            filepath=self.data_dir + "/" + date +
            "_RKI_Corona_Landkreise.csv.gz",
        )
        self.insert_or_update_faelle_data_from_csv(
            date=date,
            filepath=self.data_dir + "/" + date + "_RKI_COVID19.csv.gz")

        # self.update()

    # get last date of data from table

    def update(self):
        # hier weiter

        # bundesland
        csv_entries_bundesland = (self.session.query(
            models.Inserted_csv_File).filter_by(data_type="Bundesland").all())
        csv_files_in_db = []
        for row in csv_entries_bundesland:
            csv_files_in_db.append(os.path.basename(row.file_path))

        downloaded_files = glob.glob("downloads/*_Bundeslaender.csv.gz")
        csv_files_on_disk = []
        for f in downloaded_files:
            csv_files_on_disk.append(os.path.basename(f))

        for f in csv_files_in_db:
            if not f in csv_files_on_disk:
                self.logger.waring("csv file is in db but not on disk: " + f)
                continue
            csv_files_on_disk.remove(f)

        re_date = re.compile(r"(202\d-\d+-\d+)")

        for f in csv_files_on_disk:
            m = re_date.search(f)
            if m == None:
                self.logger.error("could not determine date in filepath " + f)
            date = m.group(1)

            self.insert_bundeslaender_data_from_csv(date=date,
                                                    filepath="downloads/" + f)

        # Landkreise
        csv_entries_landkreis = (self.session.query(
            models.Inserted_csv_File).filter_by(data_type="Landkreis").all())
        csv_files_in_db = []
        for row in csv_entries_landkreis:
            csv_files_in_db.append(os.path.basename(row.file_path))

        downloaded_files = glob.glob("downloads/*_Landkreise.csv.gz")
        csv_files_on_disk = []
        for f in downloaded_files:
            csv_files_on_disk.append(os.path.basename(f))

        for f in csv_files_in_db:
            if not f in csv_files_on_disk:
                self.logger.waring("csv file is in db but not on disk: " + f)
                continue
            csv_files_on_disk.remove(f)

        re_date = re.compile(r"(202\d-\d+-\d+)")

        for f in csv_files_on_disk:
            m = re_date.search(f)
            if m == None:
                self.logger.error("could not determine date in filepath " + f)
            date = m.group(1)

            self.insert_landkreise_data_from_csv(date=date,
                                                 filepath="downloads/" + f)

        # Faelle
        csv_entries_fall = (self.session.query(
            models.Inserted_csv_File).filter_by(data_type="Fall").all())
        csv_files_in_db = []
        for row in csv_entries_fall:
            csv_files_in_db.append(os.path.basename(row.file_path))

        downloaded_files = glob.glob("downloads/*_COVID19.csv.gz")
        csv_files_on_disk = []
        for f in downloaded_files:
            csv_files_on_disk.append(os.path.basename(f))

        for f in csv_files_in_db:
            if not f in csv_files_on_disk:
                self.logger.waring("csv file is in db but not on disk: " + f)
                continue
            csv_files_on_disk.remove(f)

        re_date = re.compile(r"(202\d-\d+-\d+)")
        print(csv_files_on_disk)
        for f in csv_files_on_disk:
            m = re_date.search(f)
            if m == None:
                self.logger.error("could not determine date in filepath " + f)
            date = m.group(1)

            self.insert_or_update_faelle_data_from_csv(date=date,
                                                       filepath="downloads/" +
                                                       f)
        """