def run(self): self.running = True self.session = db.Session() self.session.add(self.search) self.session.add(self.customer) lr = LogReader(self.log_path, self.customer.get_safe_short_name(), \ self.search.start_dt, self.search.finish_dt) self.search.status = Search.Status.RUNNING self.search.count = 0 self.update_search() while self.running == True: event = lr.next() if event == None: break row = self.sql_expression.evaluate(event) if row != None: #TODO: do we restrict the rows here? self.fh.write(json.dumps(event)) self.fh.write("\n") self.search.count += 1 self.update_search() self.fh.close() self.search.status = Search.Status.DONE self.update_search() self.session.close() print "Done"
def __init__(self, log_path, search_id): Thread.__init__(self) self.running = None self.log_path = log_path # self.socket = socket self.session = db.Session() self.search = self.session.query(Search).get(search_id) self.customer = self.search.customer # we open the result file first to prevent race conditions result_path = os.path.join(self.log_path, \ self.search.customer.get_safe_short_name(), "search", \ "%d.txt" % self.search.id) print "Opened result file %s" % result_path result_dir = os.path.dirname(result_path) if os.path.exists(result_dir) == False: os.makedirs(result_dir) self.fh = open(result_path, "w") # parse the expression before we begin so we can return an # exception if need be sql_parser = SQLParser() self.sql_expression = sql_parser.parse(self.search.expression) field_list = self.sql_expression.select.field_list.to_list() self.search.field_list = json.dumps(field_list) self.update_search() self.session.close()
def seed(): session = db.Session() # import pdb; pdb.set_trace() bram = models.Person(name="Bram") session.add(bram) dtc = models.Project(name="dtc") session.add(dtc) session.commit()
async def values(self) -> List[Any]: values: List async with db.Session() as session: async with session.begin(): values = (await session.execute(self.model.select(*self.columns) )).all() return [util.reduce(v) for v in values]
def refresh_authorized_web_servers(self): log("Populating authorized web_server uuids, uuids follow", 5) try: session = db.Session() web_servers = session.query(WebServer)\ .filter(WebServer.authorized==True).all() for web_server in web_servers: self.authorized_web_servers[web_server.uuid] = web_server log(web_server.uuid, 5) log("EOL", 5) finally: session.close()
def reload_cache(self): session = db.Session() self.log_sources = {} try: for log_source in session.query(LogSource).all(): log(2, "Initialising log source:") log(2, "Customer: %s, Source Name: %s, Format: %s" % \ (log_source.customer.name, log_source.name, \ log_source.parameters["format"].value)) self.log_sources[log_source.id] = log_source self.parsers[log_source.id] = self.new_parser(log_source) finally: session.close()
def update_log_source(self, log_source_id, collector_uuid): session = db.Session() try: log_source = session.query(LogSource)\ .filter(LogSource.id==log_source_id).one() self.log_sources[log_source.id] = log_source format_name = log_source.parameters["format"].value #self.parsers[log_source.id] = Parser.new(format_name) self.parsers[log_source.id] = self.new_parser(log_source) finally: session.close() # notify the collector that it's changed self.command_and_control.queue_command(collector_uuid, "update_sources")
def json_get_events(self, request): pager_uuid = request.json["pager_uuid"] pager = self.get_pager(request, pager_uuid) db_session = db.Session() search = db_session.query(Search).get(pager.search_id) db_session.close() for event in pager.yield_events(): data = {"count": search.count, "row": event} print "Status: %d" % search.status request.wfile.write("%s\n" % (json.dumps(data))) request.wfile.flush() data = {"count": search.count, "status": search.status} request.wfile.write("%s\n" % (json.dumps(data))) request.wfile.flush() return ""
def log(self, data, collector_uuid): log_source_id = int(data["log_source_id"]) log_source = self.log_sources[log_source_id] data["log_source_name"] = log_source.name # load collector into session session = db.Session() try: session.add(log_source) collector = log_source.collector finally: session.close() # check that the log source is actually owned by the customer if collector.uuid != collector_uuid: raise Exception("Security exception") parser = self.get_parser(log_source_id) parser.parse_line(data)
def json_set_page_info(self, request): """Set the pager page info and return a list of fields in the query to the client""" pager_uuid = request.json["pager_uuid"] page_number = int(request.json["page_number"]) page_size = int(request.json["page_size"]) print "Set page info" pager = self.get_pager(request, pager_uuid) pager.set_page_info(page_number, page_size=page_size) session = db.Session() try: search = session.query(Search).get(pager.search_id) finally: session.close() data = {"field_list": search.field_list} return json.dumps(data)
def __init__(self, search_id): self.search_id = search_id self.uuid = str(uuid.uuid4()) self.line_no = 0 session = db.Session() try: search = session.query(Search).get(search_id) # open up the results file print config.log_path print search.customer.get_safe_short_name() result_path = os.path.join(config.log_path, \ search.customer.get_safe_short_name(), "search", \ "%d.txt" % search.id) print "SearchPager opening %s" % result_path self.fh = open(result_path, "r") finally: session.close()
async def agg( self, funcs: Union[Function, List[Function]], filter: Union[str, TextClause, BinaryExpression] = None, ) -> Dict[str, Union[int, float]]: func_map: Dict[str, Function] = { f.name: f for f in util.ensure_list(funcs) } stmt = self.model.select(*func_map.values()) if filter is not None: if not isinstance(filter, (TextClause, BinaryExpression)): filter = text(filter) stmt = stmt.where(filter) result: db.Row async with db.Session() as session: async with session.begin(): result = (await session.execute(stmt)).one() return dict(zip(func_map, result))
def json_register(self, request): # create a database object for the web frontend session = db.Session() try: uuid = self.cn_to_uuid(self.peer_cert.get_subject().commonName) # see if the webserver exists already try: web_server = session.query(WebServer)\ .filter(WebServer.uuid==uuid).one() print "Webserver reporting, UUID: %s" % uuid except sqlalchemy.orm.exc.NoResultFound: # create a new webserver web_server = WebServer() web_server.uuid = uuid print "Webserver register. UUID: %s" % (web_server.uuid) session.add(web_server) session.commit() return json.dumps({"uuid": web_server.uuid}) finally: session.close()
async def db_session_middleware(request, call_next): request.state.db = db.Session() response = await call_next(request) request.state.db.close() return response
async def wrapper(): async with engine.begin() as conn: await conn.run_sync(Model.metadata.drop_all) await conn.run_sync(Model.metadata.create_all) async with db.Session() as session: async with session.begin(): for x in range(1, 6): user = User(username=f"test-{x}") session.add(user) # selecting columns returns tuples stmt = select(*User.pk) result = await session.execute(stmt) rows: List[Tuple[int, str]] = result.all() row: Tuple[int, str] = rows[0] rows row # selecting the model returns model objects stmt = select(User) result = await session.execute(stmt) rows: List[User] = result.scalars().all() row: User = rows[0] rows dir(row) row import pandas as pd pd.DataFrame(rows, columns=User.pk.names) # await User.pk.values # returns tuple stmt = User.select().where(User.id == 1) user: Tuple = (await session.execute(stmt)).one() # returns model instance stmt = select(User).where(User.id == 8) user: User = (await session.execute(stmt)).scalar() # returns model instance stmt = User.select() user: User = (await session.execute(stmt)).scalar() predicate = and_(*[col == getattr(user, col.name) for col in user.pk]) # stmt = User.select().where(predicate) stmt = User.__table__.update().where(predicate).values( username="******") await session.execute(stmt) await session.commit() user.username = "******" await session.commit() user: User = await User.get(id=6, username="******") user self = user import sqlalchemy as sa kwargs = dict(username="******") result: Row async with db.Session() as session: async with session.begin(): stmt = sa.insert( self.__class__).returning(*self.c).values(**kwargs) result = (await session.execute(stmt)).one() result user = await User.create(**kwargs) User(**user._mapping) self = user stmt = User.select(db.func.count()) async with db.Session() as session: async with session.begin(): print(await session.execute(User.select(db.func.min(User.id)) )).one() self = User.agg await User.agg.count(filter="id < 10") # TODO: add binary expression to tests await User.agg.count(filter=User.id < 10) # TODO: add multi-expression agg to list await User.agg.agg([ db.func.min(User.id), db.func.max(User.id), db.func.avg(User.id), # ])