def create_all_workers_util_reports(session: sessionmaker): all_workers = session.query(Worker) for w in all_workers: # How many different days do we have on # record for this worker? # Also, grab the year number so we can # calculate the week number accurately. unique_days = (session.query( Entry.day_number, Entry.year_number).filter(Entry.worker_id == w.id).distinct()) # We start on the day level to ensure # that anything over 8 hours in one day # counts as double hours. week_hours = {} for ud in unique_days: day_hours = (session.query(func.sum( Entry.hours)).filter(Entry.worker_id == w.id).filter( Entry.day_number == ud.day_number).one()) day_hours = day_hours[0] week_number = utils.get_week_number_from_day_number( ud.year_number, ud.day_number) # Excess and normal hours are # rolled into total hours for # the week here. weighted_day_hours = utils.get_weighted_day_hours(day_hours) week_hours[week_number] = weighted_day_hours create_worker_util_reports(session, w.id, week_hours) session.commit()
def create_worker_util_reports(session: sessionmaker, worker_id: int, week_hours: dict): for week, hours in week_hours.items(): result = utils.calc_util_percent(hours) util_report = UtilReport(worker_id=worker_id, week_number=week, percent=result) session.add(util_report)
def create(_self_name, db: sessionLocal, values: dict): if 'id' in values.keys(): values.pop('id') rec = _self_name(**values) db.add(rec) db.commit() if rec.id: return rec.id
def _unlink_id(_class_data, db: sessionLocal, id: int): existing_rec = db.query(_class_data).get(id) print("\n\n", existing_rec) if existing_rec: db.delete(existing_rec) db.commit() return True else: return False
def _update_rec(existing_rec, db: sessionLocal, values: dict): if 'id' in values.keys(): values.pop('id') for key in values.keys(): if key in existing_rec.__dict__.keys(): if values.get(key, False): existing_rec.__setattr__(key, values[key]) print(existing_rec.__dict__) db.commit() return existing_rec
def get_or_create(session: sessionmaker, model: object, **kwargs): """ Takes a database `Session`, some kind of model (i.e. `Worker`), and any information we would store about the given model instance. """ instance = session.query(model).filter_by(**kwargs).first() if not instance: instance = model(**kwargs) session.add(instance) session.commit() return instance
def _get_all(_self_name, db: sessionLocal): records = db.query(_self_name).all() for record in records: if 'participants' in record.__dict__.keys(): record.__setattr__('participants',\ record.__dict__.get('participants','').replace('{','').replace('}','').split(',')) return records
def _get_by_id(_self_name, db: sessionLocal, id: int): record = db.query(_self_name).get(id) if record: if 'participants' in record.__dict__.keys(): record.__setattr__('participants',\ record.__dict__.get('participants').replace('{','').replace('}','').split(',')) return record
def db_session(database: sessionmaker): """Creates a session for use against the test database.""" with database.begin(): with database() as session: yield session session.rollback( ) # cleanup the state of the test database during teardown
def update_op(session: sessionmaker, model, nsize: int) -> None: column: str = choice(list(FIELDS.keys())) if column not in [column.name for column in model.__table__.columns]: raise RuntimeError() faker: Faker = Faker() with Timer(f"Updated {nsize} {model.__table__}"): for _ in range(nsize): field = FIELDS.get(column) value = getattr(faker, field)() row = (session.query(model).filter( getattr(model, column) != value).order_by( sa.func.random()).limit(1)) if row: print(f'Updating {model.__table__} SET {column} = "{value}"') try: setattr(row[0], column, value) session.commit() except Exception as e: session.rollback()
def get_percent_util_on_week(session: sessionmaker, worker_name: str, week_number: int): worker_id = get_worker_id_from_name(session, worker_name) ut_report = (session.query(UtilReport).filter( UtilReport.week_number == week_number).filter( UtilReport.worker_id == worker_id).first()) final_result = ut_report.percent if ut_report else 0 return final_result
def insert_fixtures( db_session: sessionmaker, config: Config, fixture_file: str, models_module: str = "captchamonitor.utils.models", ) -> None: """ Inserts given JSON formatted fixture file into the database :param db_session: Database session used to connect to the database :type db_session: sessionmaker :param config: The config class instance that contains global configuration values :type config: Config :param fixture_file: Absolute path to the fixture file :type fixture_file: str :param models_module: The location of the file that stores the database models, defaults to "captchamonitor.utils.models" :type models_module: str :raises Exception: If there there was an issue with database inserting """ fixture_file_path = os.path.join(config["fixture_location"], fixture_file) with open(fixture_file_path, "r") as file: fixture = json.loads(file.read()) instances = [] for data in fixture: if "model" in data: module = importlib.import_module(models_module) model = getattr(module, data["model"]) instance = model(**data["fields"]) instances.append(instance) try: for instance in instances: db_session.merge(instance) db_session.flush() db_session.commit() except Exception: db_session.rollback() raise
def delete_op(session: sessionmaker, model, nsize: int) -> None: with Timer(f"Deleted {nsize} {model.__table__}"): for _ in range(nsize): row = session.query(model).order_by(sa.func.random()).limit(1) pk = [ column.name for column in filter(lambda x: x.primary_key, model.__table__.columns) ][0] if row: try: value = getattr(row[0], pk) print(f"Deleting {model.__table__} WHERE {pk} = {value}") session.query(model).filter( getattr(model, pk) == value).delete() session.commit() except Exception as e: session.rollback()
def process_new_deposits( *, web3: Web3, bridge_contracts: Dict[str, Contract], DBSession: sessionmaker, config: Config, start_block: int, ) -> Optional[int]: current_block = web3.eth.get_block_number() to_block = current_block - config.required_block_confirmations logger.info('Processing new deposits from %s to %s', start_block, to_block) if to_block < start_block: logger.info('to_block %s is smaller than start_block %s, not doing anything', to_block, start_block) return None deposits = [] for bridge_key, bridge_contract in bridge_contracts.items(): logger.info("Getting deposits for %s", bridge_key) bridge_deposits = get_deposits( bridge_contract=bridge_contract, web3=web3, from_block=start_block, to_block=to_block, fee_percentage=config.deposit_fee_percentage, ) logger.info("Found %s deposits for %s", len(bridge_deposits), bridge_key) deposits.extend(bridge_deposits) with DBSession.begin() as dbsession: for deposit in deposits: queue_reward( deposit=deposit, dbsession=dbsession, web3=web3, reward_amount_rbtc=config.reward_rbtc, deposit_thresholds=config.reward_thresholds, ) last_processed_block = to_block update_last_processed_block(dbsession, last_processed_block) start_block = last_processed_block + 1 return start_block
def search_variable(var: namedtuple, vars: List[namedtuple], i: int, session: sessionmaker): while True: show_variable(None) print("Enter variable name:") inp = input() if inp == "exit": break db_var = session.query(Variable).filter(Variable.name == inp).first() if db_var: print() print(f"Set {var.name} to {db_var.content}") vars[i] = var._replace(content=db_var.content) break else: print() print("Variable name not found") print("Enter new name or enter exit to leave") print() return vars
def insert_op(session: sessionmaker, model, nsize: int) -> None: faker: Faker = Faker() rows: Set = set([]) for _ in range(nsize): kwargs = {} for column in model.__table__.columns: if column.foreign_keys: foreign_key = list(column.foreign_keys)[0] pk = [ column.name for column in foreign_key.column.table.columns if column.primary_key ][0] fkey = (session.query(foreign_key.column.table).order_by( sa.func.random()).limit(1)) value = getattr(fkey[0], pk) kwargs[column.name] = value elif column.primary_key: continue else: field = FIELDS.get(column.name) if not field: # continue raise RuntimeError(f"field {column.name} not in mapping") value = getattr(faker, field)() kwargs[column.name] = value print(f"Inserting {model.__table__} VALUES {kwargs}") row = model(**kwargs) rows.add(row) with Timer(f"Created {nsize} {model.__table__} in"): try: session.add_all(rows) session.commit() except Exception as e: print(f"Exception {e}") session.rollback()
def populate_from_entries(session: sessionmaker, entries: list): """ This function doesn't return the entries it puts in the database because that should be handled by a separate function. """ for e in entries: name, year_number, week_number, day_number, hours = parse_entry(e) worker = get_or_create(session, Worker, name=name) session.add(worker) entry = Entry(worker_id=worker.id, year_number=year_number, week_number=week_number, day_number=day_number, hours=hours) session.add(entry) session.commit()
def getUserData(session:sessionmaker,name:str): sql="select af_user.TNAME,cds.YHMC,ds.SATELLITEID,ds.SENSORID,ds.PRODUCTLEVEL,ds.CLOUDPERCENT,ds.PRODUCTTYPE,ds.TLLATITUDE,ds.TLLONGITUDE,ds.TRLATITUDE,ds.TRLONGITUDE,ds.BRLATITUDE,ds.BRLONGITUDE,ds.BLLATITUDE,ds.BLLONGITUDE,ds.CENTERLONGITUDE,ds.CENTERLATITUDE from (ds_product_imagery ds inner join cdsjdd_yxsj_sj cd on ds.ID = cd.SJBH inner join cdsjdd_yxsj cds on cd.DDBH = cds.DDBH inner join af_user on cds.YHMC = af_user.USERNAME) where cds.YHMC like :YHMC;" resultproxy = session.execute(text(sql).bindparams(YHMC=name)) results = resultproxy.fetchall() resMat=np.mat(results) return resMat
def getUsers(session:sessionmaker)->list: sql="select af_user.TNAME,cds.YHMC,count(ds.ID) record_count from (ds_product_imagery ds join cdsjdd_yxsj_sj cd on ds.ID = cd.SJBH join cdsjdd_yxsj cds on cd.DDBH = cds.DDBH join af_user on cds.YHMC = af_user.USERNAME) group by cds.YHMC order by record_count DESC;" resultproxy = session.execute(text(sql)) results = resultproxy.fetchall() return results
def _get_all(_self_name, db: sessionLocal): return db.query(_self_name).all()
def get_unique_weeks(session: sessionmaker): weeks = session.query(Entry.week_number).distinct() return [i[0] for i in weeks]
def get_worker_id_from_name(session: sessionmaker, name): worker = session.query(Worker).filter(Worker.name == name).first() return worker.id
def get_all_workers_names(session: sessionmaker): return [i[0] for i in session.query(Worker.name)]
def _get_by_id(_self_name, db: sessionLocal, id: int): return db.query(_self_name).get(id)