def save_log(message, created_at, user_id): """Сохранение лога в БД""" logs_exists = session.query(Log).filter( created_at == Log.created_at).count() if not logs_exists: new_log = Log(message=message, created_at=created_at, user_id=int(user_id)) session.add(new_log) session.commit() logger.debug( f'Лог созданный в {created_at} пользователем {user_id} сохранен!')
def create_log(timestamp, challenger_hp, opponent_hp, action, bid): if get_battle(bid) is None: return None new_log = Log(timestamp=timestamp, challenger_hp=challenger_hp, opponent_hp=opponent_hp, action=action, bid=bid) db.session.add(new_log) db.session.commit() return new_log
def update(tile_id): if request.method == 'POST': tile = db.session.query(Tile).filter_by(id=tile_id).first() if tile != None: tile.color_id = (tile.color_id + 1) % 2 db.session.add(tile) log = Log( ip_address=request.remote_addr, date_time=datetime.utcnow().isoformat(), tile_id=tile_id, new_color_id=tile.color_id ) db.session.add(log) db.session.commit() return jsonMain()
def log(self, type_log, item='0'): log = Log(client=self.id, type=type_log, item=item) logging.debug(f'type: {type_log} files: {item}') # change this time if you want to receive repeated log more often interval_min = datetime.now() - timedelta(minutes=INTERVAL_LOG) previous_log = Log.objects( client=self.id, type=type_log, item=str( ','.join(item)), date__gte=interval_min) if not previous_log: new_log = Log.objects.create(client=self.id, type=type_log, item=str(','.join(item))) attrs = [self.mail, self.hostname, str( type_log), str(','.join(item))] logging.debug(attrs) launcher(attrs, remote=self.remote)
def import_to_db(log_data_list): for dict_row in log_data_list: log = Log(dict_row.get('ip'), dict_row.get('method'), dict_row.get('code')) db_session.add(log) db_session.commit()
def observer(self, text): obs = Log(log=text) db.session.add(obs) db.session.commit()
async def parse_clan_raid_logs(self, clan_details, message_stream=sys.stdout): clan_name, clan_id, aliases = clan_details kol = self.bot.kol clan = Clan(kol, id=clan_id) await clan.join() try: current = await clan.get_raids() except ClanPermissionsError: message_stream.write( "Skipping {} due to lack of basement permissions".format( clan_name)) return try: previous = await clan.get_previous_raids() except: previous = [] pass tasks = [] created_raids = [] updated_raids = [] for data in tqdm( current + previous, desc="Discovering previous raid logs in {}".format(clan_name), file=message_stream, unit="raid logs", leave=False): raid = Raid.get_or_none(id=data.id) raids_list = updated_raids if raid is None: raid = Raid(id=data.id, name=data.name, clan_id=clan_id, clan_name=clan_name) raids_list = created_raids if data.events is None and raid.end is None: raid.start = data.start raid.end = data.end tasks += [asyncio.ensure_future(clan.get_raid_log(data.id))] if raid.is_dirty(): raids_list.append(raid) Raid.bulk_create(created_raids, batch_size=50) Raid.bulk_update(updated_raids, fields=[Raid.start, Raid.end], batch_size=50) raids_data = current + [ await t for t in tqdm( asyncio.as_completed(tasks), desc="Loading previous raid logs in {}".format(clan_name), unit="raid logs", total=len(tasks), leave=False, file=message_stream, ascii=False) ] with tqdm(raids_data, desc="Parsing raid logs in {}".format(clan_name), unit="raids", file=message_stream, ascii=False) as p: for data in p: raid = Raid.get_or_none(id=data.id) if raid is None: p.write("Something went wrong with raid {}".format( data.id)) continue logs = [] for category, events in data.events: category = category.rstrip(":") for event in events: turns = int(event.data.pop("turns", 0)) event_data = json.dumps(event.data, sort_keys=True) log = Log.get_or_none(Log.raid == raid, Log.category == category, Log.action == event.action, Log.username == event.username, Log.user_id == event.user_id, Log.data == event_data) if log is None: log = Log( raid=raid, category=category, action=event.action, username=event.username, user_id=event.user_id, turns=turns, data=event_data, ) elif log.turns != turns: log.turns = turns log.last_updated = time() logs.append(log) with db.atomic(): Log.delete().where(Log.raid == raid).execute() Log.bulk_create(logs, batch_size=50) raid.summary = json.dumps(data.summary) raid.save()