Exemplo n.º 1
0
    def __init__(
        self,
        queue_name: str,
        routing_keys: Sequence[str],
        uses_db: bool = True,
    ) -> None:
        """Initialize a new queue, bindings, and consumer for it."""
        self.connection = Connection()
        self.channel = self.connection.channel()

        self.channel.queue_declare(queue_name, durable=True, auto_delete=False)

        for routing_key in routing_keys:
            self.channel.queue_bind(
                queue_name,
                exchange=self.PGSQL_EXCHANGE_NAME,
                routing_key=routing_key,
            )

        if uses_db:
            self.db_session = get_session_from_config(os.environ['INI_FILE'])
        else:
            self.db_session = None

        super().__init__(self.channel, queue_name)
Exemplo n.º 2
0
def update_common_topic_tags(config_path: str) -> None:
    """Update the list of common topic tags for all groups."""
    db_session = get_session_from_config(config_path)

    all_groups = db_session.query(Group).all()

    for group in all_groups:
        # create a subquery for all tags from topics in that group - UNNEST() converts
        # the arrays of tags into rows so that we can easily group and count, and
        # created_time will be used to determine when a particular tag was last used
        group_tags = (db_session.query(
            func.unnest(Topic.tags).label("tag"),
            Topic.created_time).filter(Topic.group == group).subquery())

        # get the list of the most common tags, based on frequency and breaking ties
        # with which was used most recently
        common_tags = (db_session.query(
            group_tags.columns["tag"],
            func.count().label("frequency"),
            func.max(group_tags.columns["created_time"]).label("last_used"),
        ).group_by("tag").order_by(
            desc("frequency"),
            desc("last_used")).limit(MAX_NUM_COMMON_TAGS).all())

        group.common_topic_tags = [common_tag[0] for common_tag in common_tags]

        db_session.add(group)
        db_session.commit()
Exemplo n.º 3
0
def clean_all_data(config_path: str) -> None:
    """Clean all private/deleted data.

    This should generally be the only function called in most cases, and will initiate
    the full cleanup process.
    """
    db_session = get_session_from_config(config_path)

    cleaner = DataCleaner(db_session, RETENTION_PERIOD)
    cleaner.clean_all()
Exemplo n.º 4
0
def insert_dev_data(config_path: str) -> None:
    """Load the app config and insert some "starter" data for a dev version."""
    session = get_session_from_config(config_path)

    user = User("TestUser", "password")
    group = Group("testing", "An automatically created group to use for testing")
    subscription = GroupSubscription(user, group)

    session.add_all([user, group, subscription])

    session.commit()
Exemplo n.º 5
0
def post_scheduled_topics(config_path: str) -> None:
    """Post all scheduled topics that are due to be posted."""
    db_session = get_session_from_config(config_path)

    due_topic = _get_next_due_topic(db_session)

    while due_topic:
        db_session.add(due_topic.create_topic())
        due_topic.advance_schedule_to_future()
        db_session.add(due_topic)
        db_session.commit()

        due_topic = _get_next_due_topic(db_session)
Exemplo n.º 6
0
def insert_dev_data(config_path: str) -> None:
    """Load the app config and insert some "starter" data for a dev version."""
    session = get_session_from_config(config_path)

    session.add_all([
        User('TestUser', 'password'),
        Group(
            'testing',
            'An automatically created group to use for testing purposes',
        ),
    ])

    session.commit()
def lift_expired_temporary_bans(config_path: str) -> None:
    """Lift temporary bans that have expired."""
    db_session = get_session_from_config(config_path)

    db_session.query(User).filter(
        User.ban_expiry_time < utc_now(),  # type: ignore
        User.is_banned == True,  # noqa
    ).update({
        "is_banned": False,
        "ban_expiry_time": None
    },
             synchronize_session=False)

    db_session.commit()
Exemplo n.º 8
0
def close_voting_on_old_posts(config_path: str) -> None:
    """Update is_voting_closed column on all posts older than the voting period."""
    db_session = get_session_from_config(config_path)

    db_session.query(Comment).filter(
        Comment.created_time < utc_now() - COMMENT_VOTING_PERIOD,
        Comment._is_voting_closed == False,  # noqa
    ).update({"_is_voting_closed": True}, synchronize_session=False)

    db_session.query(Topic).filter(
        Topic.created_time < utc_now() - TOPIC_VOTING_PERIOD,
        Topic._is_voting_closed == False,  # noqa
    ).update({"_is_voting_closed": True}, synchronize_session=False)

    db_session.commit()
Exemplo n.º 9
0
def initialize_db(config_path: str, alembic_config_path: Optional[str] = None) -> None:
    """Load the app config and create the database tables."""
    db_session = get_session_from_config(config_path)
    engine = db_session.bind

    create_tables(engine)

    run_sql_scripts_in_dir("sql/init/", engine)

    # if an Alembic config file wasn't specified, assume it's alembic.ini in the same
    # directory
    if not alembic_config_path:
        path = os.path.split(config_path)[0]
        alembic_config_path = os.path.join(path, "alembic.ini")

    # mark current Alembic revision in db so migrations start from this point
    alembic_cfg = Config(alembic_config_path)
    command.stamp(alembic_cfg, "head")
Exemplo n.º 10
0
    def __init__(
        self,
        consumer_group: str,
        source_streams: Sequence[str],
        uses_db: bool = True,
        skip_pending: bool = False,
    ):
        """Initialize a new consumer, creating consumer groups and streams if needed."""
        ini_file_path = os.environ["INI_FILE"]
        config = ConfigParser()
        config.read(ini_file_path)

        self.redis = Redis(
            unix_socket_path=config.get("app:main", "redis.unix_socket_path"))
        self.consumer_group = consumer_group
        self.source_streams = [
            f"{REDIS_KEY_PREFIX}{stream}" for stream in source_streams
        ]

        # hardcoded for now, will need to change for multiple consumers in same group
        self.name = f"{consumer_group}-1"

        # create all the consumer groups and streams (if necessary)
        for stream in self.source_streams:
            try:
                self.redis.xgroup_create(stream, consumer_group, mkstream=True)
            except ResponseError as error:
                # if the consumer group already exists, a BUSYGROUP error will be
                # returned, so we want to ignore that one but raise anything else
                if not str(error).startswith("BUSYGROUP"):
                    raise

        if uses_db:
            self.db_session = get_session_from_config(ini_file_path)
        else:
            self.db_session = None

        # start by reading any already-pending messages by default
        self.is_reading_pending = not skip_pending

        self._init_metrics()
def generate_stats(config_path: str) -> None:
    """Generate all stats for all groups for yesterday (UTC)."""
    db_session = get_session_from_config(config_path)

    # the end time is the start of the current day, start time 1 day before that
    end_time = utc_now().replace(hour=0, minute=0, second=0, microsecond=0)
    start_time = end_time - timedelta(days=1)

    groups = db_session.query(Group).all()

    for group in groups:
        with db_session.no_autoflush:
            db_session.add(
                topics_posted(db_session, group, start_time, end_time))
            db_session.add(
                comments_posted(db_session, group, start_time, end_time))

        try:
            db_session.commit()
        except IntegrityError:
            # stats have already run for this group/period combination, just skip
            continue
Exemplo n.º 12
0
def initialize_db(config_path: str,
                  alembic_config_path: Optional[str] = None) -> None:
    """Load the app config and create the database tables.

    This function will probably only complete successfully when run as user postgres,
    since the run_sql_scripts_in_dir method runs psql through that user to be able to
    create functions using untrusted languages (PL/Python specifically).
    """
    db_session = get_session_from_config(config_path)
    engine = db_session.bind

    create_tables(engine)

    run_sql_scripts_in_dir("sql/init/", engine)

    # if an Alembic config file wasn't specified, assume it's alembic.ini in the same
    # directory
    if not alembic_config_path:
        path = os.path.split(config_path)[0]
        alembic_config_path = os.path.join(path, "alembic.ini")

    # mark current Alembic revision in db so migrations start from this point
    alembic_cfg = Config(alembic_config_path)
    command.stamp(alembic_cfg, "head")