def get_auth_attempts_at_port(connection: Connection, nas_ip_address: netaddr.IPAddress, nas_port_id: str, when: Optional[DatetimeRange]=None, limit: Optional[int]=None)-> Iterable[ Tuple[str, str, Groups, Attributes, datetime]]: """ Return auth attempts at a particular port of an NAS ordered by Auth-Date descending. :param connection: A SQLAlchemy connection :param nas_ip_address: NAS IP address :param nas_port_id: NAS Port ID :param when: Range in which Auth-Date must be within :param limit: Maximum number of records :return: An iterable that yields (User-Name, Packet-Type, Groups, Reply, Auth-Date)-tuples ordered by Auth-Date descending """ logger.debug('Getting all auth attempts at port %2$s of %1$s', nas_ip_address, nas_port_id) query = ( select([radpostauth.c.UserName, radpostauth.c.PacketType, radpostauth.c.Groups, radpostauth.c.Reply, radpostauth.c.AuthDate]) .where(and_(radpostauth.c.NASIPAddress == nas_ip_address, radpostauth.c.NASPortId == nas_port_id)) .order_by(radpostauth.c.AuthDate.desc()) ) if when is not None: query.where(radpostauth.c.AuthDate.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def get_sessions_of_mac(connection: Connection, mac: netaddr.EUI, when: Optional[DatetimeRange]=None, limit: Optional[int]=None) -> Iterable[ Tuple[netaddr.IPAddress, str, datetime, datetime]]: """ Return accounting sessions of a particular MAC address ordered by Session-Start-Time descending. :param connection: A SQLAlchemy connection :param str mac: MAC address :param when: Range in which Session-Start-Time must be within :param limit: Maximum number of records :return: An iterable that yields (NAS-IP-Address, NAS-Port-Id, Session-Start-Time, Session-Stop-Time)-tuples ordered by Session-Start-Time descending """ logger.debug('Getting all sessions for MAC "%s"', mac) query = ( select([radacct.c.NASIPAddress, radacct.c.NASPortId, radacct.c.AcctStartTime, radacct.c.AcctStopTime]) .where(and_(radacct.c.UserName == mac)) .order_by(radacct.c.AcctStartTime.desc()) ) if when is not None: query.where(radacct.c.AcctStartTime.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def get_auth_attempts_of_mac(connection: Connection, mac: netaddr.EUI, when: Optional[DatetimeRange]=None, limit: Optional[int]=None) -> Iterable[ Tuple[netaddr.IPAddress, str, str, Groups, Attributes, datetime]]: """ Return auth attempts of a particular MAC address order by Auth-Date descending. :param connection: A SQLAlchemy connection :param mac: MAC address :param when: Range in which Auth-Date must be within :param limit: Maximum number of records :return: An iterable that yields (NAS-IP-Address, NAS-Port-Id, Packet-Type, Groups, Reply, Auth-Date)-tuples ordered by Auth-Date descending """ logger.debug('Getting all auth attempts of MAC %s', mac) query = ( select([radpostauth.c.NASIPAddress, radpostauth.c.NASPortId, radpostauth.c.PacketType, radpostauth.c.Groups, radpostauth.c.Reply, radpostauth.c.AuthDate]) .where(and_(radpostauth.c.UserName == mac)) .order_by(radpostauth.c.AuthDate.desc()) ) if when is not None: query.where(radpostauth.c.AuthDate.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def get_auth_attempts_at_port( connection: Connection, nas_ip_address: netaddr.IPAddress, nas_port_id: str, when: Optional[DatetimeRange] = None, limit: Optional[int] = None, ) -> Iterator[Tuple[str, str, Groups, Attributes, datetime]]: """ Return auth attempts at a particular port of an NAS ordered by Auth-Date descending. :param connection: A SQLAlchemy connection :param nas_ip_address: NAS IP address :param nas_port_id: NAS Port ID :param when: Range in which Auth-Date must be within :param limit: Maximum number of records :return: An iterator that yields (User-Name, Packet-Type, Groups, Reply, Auth-Date)-tuples ordered by Auth-Date descending """ logger.debug('Getting all auth attempts at port %2$s of %1$s', nas_ip_address, nas_port_id) query = (select([ radpostauth.c.UserName, radpostauth.c.PacketType, radpostauth.c.Groups, radpostauth.c.Reply, radpostauth.c.AuthDate ]).where( and_(radpostauth.c.NASIPAddress == nas_ip_address, radpostauth.c.NASPortId == nas_port_id)).order_by( radpostauth.c.AuthDate.desc())) if when is not None: query.where(radpostauth.c.AuthDate.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def count_datasets_through_time_query(self, start, end, period, time_field, expressions): raw_expressions = self._alchemify_expressions(expressions) start_times = select( (func.generate_series(start, end, cast( period, INTERVAL)).label('start_time'), )).alias('start_times') time_range_select = (select((func.tstzrange( start_times.c.start_time, func.lead(start_times.c.start_time).over()).label('time_period'), ))).alias('all_time_ranges') # Exclude the trailing (end time to infinite) row. Is there a simpler way? time_ranges = (select( (time_range_select, )).where(~func.upper_inf(time_range_select.c.time_period)) ).alias('time_ranges') count_query = (select((func.count('*'), )).select_from( self._from_expression(DATASET, expressions)).where( and_( time_field.alchemy_expression.overlaps( time_ranges.c.time_period), DATASET.c.archived == None, *raw_expressions))) return select( (time_ranges.c.time_period, count_query.label('dataset_count')))
def get_auth_attempts_of_mac( connection: Connection, mac: netaddr.EUI, when: Optional[DatetimeRange] = None, limit: Optional[int] = None, ) -> Iterator[Tuple[netaddr.IPAddress, str, str, Groups, Attributes, datetime]]: """ Return auth attempts of a particular MAC address order by Auth-Date descending. :param connection: A SQLAlchemy connection :param mac: MAC address :param when: Range in which Auth-Date must be within :param limit: Maximum number of records :return: An iterator that yields (NAS-IP-Address, NAS-Port-Id, Packet-Type, Groups, Reply, Auth-Date)-tuples ordered by Auth-Date descending """ logger.debug('Getting all auth attempts of MAC %s', mac) query = (select([ radpostauth.c.NASIPAddress, radpostauth.c.NASPortId, radpostauth.c.PacketType, radpostauth.c.Groups, radpostauth.c.Reply, radpostauth.c.AuthDate ]).where(and_(radpostauth.c.UserName == mac)).order_by( radpostauth.c.AuthDate.desc())) if when is not None: query.where(radpostauth.c.AuthDate.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def get_sessions_of_mac( connection: Connection, mac: netaddr.EUI, when: Optional[DatetimeRange] = None, limit: Optional[int] = None, ) -> Iterator[Tuple[netaddr.IPAddress, str, datetime, datetime]]: """ Return accounting sessions of a particular MAC address ordered by Session-Start-Time descending. :param connection: A SQLAlchemy connection :param str mac: MAC address :param when: Range in which Session-Start-Time must be within :param limit: Maximum number of records :return: An iterator that yields (NAS-IP-Address, NAS-Port-Id, Session-Start-Time, Session-Stop-Time)-tuples ordered by Session-Start-Time descending """ logger.debug('Getting all sessions for MAC "%s"', mac) query = (select([ radacct.c.NASIPAddress, radacct.c.NASPortId, radacct.c.AcctStartTime, radacct.c.AcctStopTime ]).where(and_(radacct.c.UserName == mac)).order_by( radacct.c.AcctStartTime.desc())) if when is not None: query.where(radacct.c.AcctStartTime.op('<@') <= func.tstzrange(*when)) if limit is not None: query = query.limit(limit) return iter(connection.execute(query))
def value_to_alchemy(self, value): low, high = value return func.tstzrange( low, high, # Inclusive on both sides. '[]', type_=TSTZRANGE, )
def _where(self, product_name, time): begin_time = self._with_default_tz(time.begin) end_time = self._with_default_tz(time.end) where_clause = and_( func.tstzrange(begin_time, end_time, '[]', type_=TSTZRANGE, ).contains(DATASET_SPATIAL.c.center_time), DATASET_SPATIAL.c.dataset_type_ref == select([DATASET_TYPE.c.id]).where(DATASET_TYPE.c.name == product_name) ) return begin_time, end_time, where_clause
def count_datasets_through_time(self, start, end, period, time_field, expressions): """ :type period: str :type start: datetime.datetime :type end: datetime.datetime :type expressions: tuple[datacube.index.postgres._fields.PgExpression] :rtype: list[((datetime.datetime, datetime.datetime), int)] """ raw_expressions = self._alchemify_expressions(expressions) start_times = select(( func.generate_series(start, end, cast(period, INTERVAL)).label('start_time'), )).alias('start_times') time_range_select = ( select(( func.tstzrange( start_times.c.start_time, func.lead(start_times.c.start_time).over() ).label('time_period'), )) ).alias('all_time_ranges') # Exclude the trailing (end time to infinite) row. Is there a simpler way? time_ranges = ( select(( time_range_select, )).where( ~func.upper_inf(time_range_select.c.time_period) ) ).alias('time_ranges') count_query = ( select( (func.count('*'),) ).select_from( self._from_expression(DATASET, expressions) ).where( and_( time_field.alchemy_expression.overlaps(time_ranges.c.time_period), DATASET.c.archived == None, *raw_expressions ) ) ) results = self._connection.execute(select(( time_ranges.c.time_period, count_query.label('dataset_count') ))) for time_period, dataset_count in results: # if not time_period.upper_inf: yield Range(time_period.lower, time_period.upper), dataset_count
def _where(self, product_name: str, time: Range) -> Tuple[datetime, datetime, ColumnElement]: begin_time = self._with_default_tz(time.begin) end_time = self._with_default_tz(time.end) where_clause = and_( func.tstzrange(begin_time, end_time, "[]", type_=TSTZRANGE).contains( DATASET_SPATIAL.c.center_time), DATASET_SPATIAL.c.dataset_type_ref == select([ ODC_DATASET_TYPE.c.id ]).where(ODC_DATASET_TYPE.c.name == product_name), or_( func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(True), func.st_isvalid(DATASET_SPATIAL.c.footprint).is_(None), ), ) return begin_time, end_time, where_clause
def post_transactions_for_membership_fee(membership_fee, processor, simulate=False): """ Posts transactions (and splits) for users where the specified membership fee was not posted yet. User select: User -> Split (user account) -> Transaction -> Split (fee account) Conditions: User has `membership_fee` property on begins_on + booking_begin - 1 day or begins_on + booking_end - 1 day and no transaction exists on the user account int the fee timespan :param membership_fee: The membership fee which should be posted :param processor: :param simulate: Do not post any transactions, just return the affected users. :return: A list of name of all affected users """ description = membership_fee_description.format(fee_name=membership_fee.name).to_json() split_user_account = Split.__table__.alias() split_fee_account = Split.__table__.alias() rhe_end = RoomHistoryEntry.__table__.alias() rhe_begin = RoomHistoryEntry.__table__.alias() fee_accounts = Account.q.join(Building).distinct(Account.id).all() fee_accounts_ids = set([acc.id for acc in fee_accounts] + [config.membership_fee_account_id]) properties_beginning_timestamp = datetime.combine((membership_fee.begins_on + membership_fee.booking_begin - timedelta(1)), time_min()) properties_end_timestamp = datetime.combine((membership_fee.begins_on + membership_fee.booking_end - timedelta(1)), time_max()) begin_tstz = datetime.combine(membership_fee.begins_on, time_min()) end_tstz = datetime.combine(membership_fee.ends_on, time_max()) # Select all users who fulfill the requirements for the fee in the fee timespan users = (select([User.id.label('id'), User.name.label('name'), User.account_id.label('account_id'), # Select fee_account_id of the building or the default # fee_account_id if user was not living in a room at booking time func.coalesce(Building.fee_account_id, literal(config.membership_fee_account_id)).label('fee_account_id')]) .select_from(User.__table__ # Join the users properties at `booking_begin` .outerjoin(func.evaluate_properties(properties_beginning_timestamp) .alias('properties_beginning'), literal_column('properties_beginning.user_id') == User.id) # Join the users properties at `booking_end` .outerjoin(func.evaluate_properties(properties_end_timestamp) .alias('properties_end'), literal_column('properties_end.user_id') == User.id) # Join RoomHistoryEntry, Room and Building of the user at membership_fee.ends_on .outerjoin(rhe_end, and_(rhe_end.c.user_id == User.id, # Only join RoomHistoryEntry that is relevant # on the fee interval end date literal(end_tstz).op("<@")( func.tstzrange(rhe_end.c.begins_at, func.coalesce(rhe_end.c.ends_at, literal('infinity').cast(DateTime) ) , '[)') ))) # Join RoomHistoryEntry, Room and Building of the user at membership_fee.begins_on # As second option if user moved out within the month .outerjoin(rhe_begin, and_(rhe_begin.c.user_id == User.id, # Only join RoomHistoryEntry that is relevant # on the fee interval end date literal(begin_tstz).op("<@")( func.tstzrange(rhe_begin.c.begins_at, func.coalesce(rhe_begin.c.ends_at, literal('infinity').cast(DateTime) ) , '[)') ))) # Join with Room from membership_fee.ends_on if available, # if not, join with the Room from membership_fee.begins_on .outerjoin(Room, Room.id == func.coalesce(rhe_end.c.room_id, rhe_begin.c.room_id)) .outerjoin(Building, Building.id == Room.building_id) ) # Check if a booking already exists on the user account in the fee timespan .where(not_(exists(select([None]).select_from(split_user_account .join(Transaction, Transaction.id == split_user_account.c.transaction_id) .join(split_fee_account, split_fee_account.c.transaction_id == Transaction.id) ) .where(and_(split_user_account.c.account_id == User.account_id, Transaction.valid_on.between(literal(membership_fee.begins_on), literal(membership_fee.ends_on)), split_fee_account.c.account_id.in_(fee_accounts_ids), split_fee_account.c.amount < 0, split_fee_account.c.id != split_user_account.c.id)) ))) # Only those users who had the `membership_fee` property on `booking_begin` or # `booking_end` .where(or_(and_(literal_column('properties_beginning.property_name') == 'membership_fee', not_(literal_column('properties_beginning.denied'))), and_(literal_column('properties_end.property_name') == 'membership_fee', not_(literal_column('properties_end.denied'))))) .distinct() .cte('membership_fee_users')) affected_users_raw = session.session.execute(select([users.c.id, users.c.name, users.c.fee_account_id])).fetchall() if not simulate: numbered_users = (select([users.c.id, users.c.fee_account_id.label('fee_account_id'), users.c.account_id, func.row_number().over().label('index')]) .select_from(users) .cte("membership_fee_numbered_users")) transactions = (Transaction.__table__.insert() .from_select([Transaction.description, Transaction.author_id, Transaction.posted_at, Transaction.valid_on, Transaction.confirmed], select([literal(description), literal(processor.id), func.current_timestamp(), literal(membership_fee.ends_on), True]).select_from(users)) .returning(Transaction.id) .cte('membership_fee_transactions')) numbered_transactions = (select([transactions.c.id, func.row_number().over().label('index')]) .select_from(transactions) .cte('membership_fee_numbered_transactions')) split_insert_fee_account = (Split.__table__.insert() .from_select([Split.amount, Split.account_id, Split.transaction_id], select([literal(-membership_fee.regular_fee, type_=Money), numbered_users.c.fee_account_id, numbered_transactions.c.id]) .select_from(numbered_users.join(numbered_transactions, numbered_transactions.c.index == numbered_users.c.index)) ) .returning(Split.id) .cte('membership_fee_split_fee_account')) split_insert_user = (Split.__table__.insert().from_select( [Split.amount, Split.account_id, Split.transaction_id], select([literal(membership_fee.regular_fee, type_=Money), numbered_users.c.account_id, numbered_transactions.c.id]) .select_from(numbered_users.join(numbered_transactions, numbered_transactions.c.index == numbered_users.c.index))) .returning(Split.id) .cte('membership_fee_split_user')) session.session.execute(select([]).select_from(split_insert_fee_account .join(split_insert_user, split_insert_user.c.id == split_insert_fee_account.c.id))) affected_users = [dict(user) for user in affected_users_raw] return affected_users
def search_items( self, *, product_name: Optional[str] = None, time: Optional[Tuple[datetime, datetime]] = None, bbox: Tuple[float, float, float, float] = None, limit: int = 500, offset: int = 0, full_dataset: bool = False, dataset_ids: Sequence[UUID] = None, require_geometry=True, ordered=True, ) -> Generator[DatasetItem, None, None]: """ Search datasets using Cubedash's spatial table Returned as DatasetItem records, with optional embedded full Datasets (if full_dataset==True) Returned results are always sorted by (center_time, id) """ geom = func.ST_Transform(DATASET_SPATIAL.c.footprint, 4326) columns = [ geom.label("geometry"), func.Box2D(geom).label("bbox"), # TODO: dataset label? DATASET_SPATIAL.c.region_code.label("region_code"), DATASET_SPATIAL.c.creation_time, DATASET_SPATIAL.c.center_time, ] # If fetching the whole dataset, we need to join the ODC dataset table. if full_dataset: query: Select = select( (*columns, *_utils.DATASET_SELECT_FIELDS)).select_from( DATASET_SPATIAL.join( ODC_DATASET, onclause=ODC_DATASET.c.id == DATASET_SPATIAL.c.id)) # Otherwise query purely from the spatial table. else: query: Select = select((*columns, DATASET_SPATIAL.c.id, DATASET_SPATIAL.c.dataset_type_ref )).select_from(DATASET_SPATIAL) if time: query = query.where( func.tstzrange( _utils.default_utc(time[0]), _utils.default_utc(time[1]), "[]", type_=TSTZRANGE, ).contains(DATASET_SPATIAL.c.center_time)) if bbox: query = query.where( func.ST_Transform(DATASET_SPATIAL.c.footprint, 4326).intersects( func.ST_MakeEnvelope(*bbox))) if product_name: query = query.where(DATASET_SPATIAL.c.dataset_type_ref == select( [ODC_DATASET_TYPE.c.id]).where( ODC_DATASET_TYPE.c.name == product_name)) if dataset_ids: query = query.where(DATASET_SPATIAL.c.id.in_(dataset_ids)) if require_geometry: query = query.where(DATASET_SPATIAL.c.footprint != None) if ordered: query = query.order_by(DATASET_SPATIAL.c.center_time, DATASET_SPATIAL.c.id) query = query.limit(limit).offset( # TODO: Offset/limit isn't particularly efficient for paging... offset) for r in self._engine.execute(query): yield DatasetItem( dataset_id=r.id, bbox=_box2d_to_bbox(r.bbox) if r.bbox else None, product_name=self.index.products.get(r.dataset_type_ref).name, geometry=_get_shape(r.geometry), region_code=r.region_code, creation_time=r.creation_time, center_time=r.center_time, odc_dataset=(_utils.make_dataset_from_select_fields( self.index, r) if full_dataset else None), )