Пример #1
0
    def __init__(self, hub):
        # If we're in development mode, add the dev versions of the topics so
        # local playback with fedmsg-dg-replay works as expected.
        prefix, env = hub.config['topic_prefix'], hub.config['environment']
        self.topic = ['.'.join([prefix, env, topic]) for topic in self.topic]
        _log.info('Subscribing to the following fedmsg topics: %r', self.topic)

        initialize(config.config)

        super(LibrariesioConsumer, self).__init__(hub)
Пример #2
0
    def __init__(self):
        """
        Constructor loads relevant values from configuration.
        """
        self.feed = config.config["SSE_FEED"]
        self.whitelist = config.config["LIBRARIESIO_PLATFORM_WHITELIST"]
        _log.info("Subscribing to the following SSE feed: {}".format(
            self.feed))

        initialize(config.config)
Пример #3
0
    def __init__(self, hub):
        # If we're in development mode, add the dev versions of the topics so
        # local playback with fedmsg-dg-replay works as expected.
        prefix, env = hub.config["topic_prefix"], hub.config["environment"]
        self.topic = [".".join([prefix, env, topic]) for topic in self.topic]
        _log.info("Subscribing to the following fedmsg topics: %r", self.topic)

        initialize(config.config)

        super(LibrariesioConsumer, self).__init__(hub)
        hub.config["topic_prefix"] = "org.release-monitoring"
Пример #4
0
def main(debug, feed):
    ''' Retrieve all the packages and for each of them update the release
    version.
    '''
    time = arrow.utcnow().datetime
    db.initialize(config)
    session = db.Session()
    run = db.Run(status='started')
    session.add(run)
    session.commit()
    LOG.setLevel(logging.DEBUG)

    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    if debug:
        # Console handler
        chand = logging.StreamHandler()
        chand.setLevel(logging.INFO)
        chand.setFormatter(formatter)
        LOG.addHandler(chand)

    # Save the logs in a file
    fhand = logging.FileHandler('/var/tmp/anitya_cron.log')
    fhand.setLevel(logging.INFO)
    fhand.setFormatter(formatter)
    LOG.addHandler(fhand)

    if feed:
        projects = list(projects_by_feed(session))
        session.commit()
    else:
        # Get all projects, that are ready for check
        projects = db.Project.query.order_by(
            sa.func.lower(db.Project.name)
        ).filter(db.Project.next_check < time).all()

    project_ids = [project.id for project in projects]

    N = config.get('CRON_POOL', 10)
    LOG.info("Launching pool (%i) to update %i projects", N, len(project_ids))
    p = multiprocessing.Pool(N)
    p.map(update_project, project_ids)

    run = db.Run(status='ended')
    session.add(run)
    session.commit()
Пример #5
0
def main():
    """
    Retrieve database entry for user.
    """
    db.initialize(config)
    sar_username = os.getenv("SAR_USERNAME")
    sar_email = os.getenv("SAR_EMAIL")

    users = []

    if sar_email:
        _log.debug("Find users by e-mail {}".format(sar_email))
        users = users + db.User.query.filter_by(email=sar_email).all()

    if sar_username:
        _log.debug("Find users by username {}".format(sar_username))
        users = users + db.User.query.filter_by(username=sar_username).all()

    users_list = []
    for user in users:
        user_dict = user.to_dict()
        user_social_auths = db.Session.execute(
            "SELECT provider,extra_data,uid FROM social_auth_usersocialauth WHERE user_id = :val",
            {"val": str(user.id)},
        )
        user_dict["user_social_auths"] = []
        # This part is working in postgresql, but in tests we are using sqlite
        # which doesn't know the UUID type
        for user_social_auth in user_social_auths:  # pragma: no cover
            user_dict["user_social_auths"].append({
                "provider":
                user_social_auth["provider"],
                "extra_data":
                user_social_auth["extra_data"],
                "uid":
                user_social_auth["uid"],
            })
        users_list.append(user_dict)

    json.dump(users_list, sys.stdout)
Пример #6
0
def main():
    """
    Retrieve database entry for user.
    """
    db.initialize(config)
    _log.setLevel(logging.DEBUG)
    sar_username = os.getenv("SAR_USERNAME")
    sar_email = os.getenv("SAR_EMAIL")

    users = []

    if sar_email:
        _log.debug("Find users by e-mail {}".format(sar_email))
        users = users + db.User.query.filter_by(email=sar_email).all()

    if sar_username:
        _log.debug("Find users by username {}".format(sar_username))
        users = users + db.User.query.filter_by(username=sar_username).all()

    users_list = []
    for user in users:
        users_list.append(user.to_dict())

    json.dump(users_list, sys.stdout)
Пример #7
0
                    del self.ratelimit_queue[backend]

                backends.append(backend)

        # Erase backends that were processed from blacklist dictionary
        for backend in backends:
            del self.blacklist_dict[backend]

        # Get all projects, that are ready for check
        projects = (
            db.Project.query.order_by(sa.func.lower(db.Project.name))
            .filter(db.Project.next_check < time)
            .all()
        )

        queue += [project.id for project in projects]

        # Use ordered set to have the order of the elements, but still have uniqueness
        ord_set = OrderedSet(queue)

        return list(ord_set)


if __name__ == "__main__":
    # Main
    db.initialize(config)
    checker = Checker()
    while True:
        checker.run()
        sleep(WAIT_TIME)
Пример #8
0
                    del self.ratelimit_queue[backend]

                backends.append(backend)

        # Erase backends that were processed from blacklist dictionary
        for backend in backends:
            del self.blacklist_dict[backend]

        # Get all projects, that are ready for check
        projects = (
            db.Project.query.order_by(sa.func.lower(db.Project.name))
            .filter(db.Project.next_check < time)
            .all()
        )

        queue += [project.id for project in projects]

        # Use ordered set to have the order of the elements, but still have uniqueness
        ord_set = OrderedSet(queue)

        return list(ord_set)


if __name__ == "__main__":
    # Main
    db.initialize(config)
    checker = Checker()
    while True:
        checker.run()
        sleep(WAIT_TIME)