Beispiel #1
0
    def test_repo_commits(self):
        repo = Repo.create(**self.TEST_REPO)
        repo.save()

        response = self.fetch(self.get_app().reverse_url('view', repo.id))

        self.assertIn(self.MESSAGES['no_records'], response.body.decode())

        self.assertIn(self.MESSAGES['get_more'], response.body.decode())

        for commit in range(self.TEST_COUNT):
            commit_data = self.TEST_COMMIT
            commit_data.update({'repo': repo})
            c = Commit(**commit_data)
            c.save()

        response = self.fetch(self.get_app().reverse_url('view', repo.id))

        self.assertEqual(response.body.decode().count(self.TEST_COMMIT['message']),
                         self.TEST_COUNT)

        self.assertIn(self.MESSAGES['get_more'], response.body.decode())

        repo.next_page = None
        repo.save()

        response = self.fetch(self.get_app().reverse_url('view', repo.id))

        self.assertNotIn(self.MESSAGES['get_more'], response.body.decode())
Beispiel #2
0
 def post(self):
     reponame = self.get_argument("reponame", None)
     desc = self.get_argument("description", None)
     user = self.current_user
     if not reponame:
         self.redirect(self.reverse_url("web:create-repo"))
         return
     repo = Repo.create(user=user, name=reponame, desc=desc)
     self.redirect(self.reverse_url("web:repo", user.name, repo.name))
Beispiel #3
0
 def post(self):
     reponame = self.get_argument("reponame", None)
     desc = self.get_argument("description", None)
     user = self.current_user
     if not reponame:
         self.redirect(self.reverse_url("web:create-repo"))
         return
     repo = Repo.create(user=user, name=reponame, desc=desc)
     self.redirect(self.reverse_url("web:repo", user.name, repo.name))
Beispiel #4
0
async def monitor_apps_lists(monitor_git=False,
                             monitor_only_good_quality_apps=False):
    "parse apps lists every hour or so to detect new apps"

    # only support github for now :(
    async def get_master_commit_sha(url):
        command = await asyncio.create_subprocess_shell(
            f"git ls-remote {url} master",
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE)
        data = await command.stdout.read()
        commit_sha = data.decode().strip().replace("\t", " ").split(" ")[0]
        return commit_sha

    async with aiohttp.ClientSession() as session:
        task_logger.info(f"Downloading applist...")
        async with session.get(APPS_LIST) as resp:
            data = await resp.json()
            data = data["apps"]

    repos = {x.name: x for x in Repo.select()}

    for app_id, app_data in data.items():
        commit_sha = await get_master_commit_sha(app_data["git"]["url"])

        if app_data["state"] != "working":
            task_logger.debug(
                f"skip {app_id} because state is {app_data['state']}")
            continue

        if monitor_only_good_quality_apps:
            if app_data.get("level") in [None, "?"] or app_data["level"] <= 4:
                task_logger.debug(
                    f"skip {app_id} because app is not good quality")
                continue

        # already know, look to see if there is new commits
        if app_id in repos:
            repo = repos[app_id]

            # but first check if the URL has changed
            if repo.url != app_data["git"]["url"]:
                task_logger.info(
                    f"Application {app_id} has changed of url from {repo.url} to {app_data['git']['url']}"
                )

                repo.url = app_data["git"]["url"]
                repo.save()

                await broadcast(
                    {
                        "action": "update_app",
                        "data": model_to_dict(repo),
                    }, "apps")

                # change the url of all jobs that used to have this URL I
                # guess :/
                # this isn't perfect because that could overwrite added by
                # hand jobs but well...
                for job in Job.select().where(Job.url_or_path == repo.url,
                                              Job.state == "scheduled"):
                    job.url_or_path = repo.url
                    job.save()

                    task_logger.info(
                        f"Updating job {job.name} #{job.id} for {app_id} to {repo.url} since the app has changed of url"
                    )

                    await broadcast(
                        {
                            "action": "update_job",
                            "data": model_to_dict(job),
                        }, [
                            "jobs", f"job-{job.id}",
                            f"app-jobs-{job.url_or_path}"
                        ])

            # we don't want to do anything else
            if not monitor_git:
                continue

            repo_is_updated = False
            if repo.revision != commit_sha:
                task_logger.info(
                    f"Application {app_id} has new commits on github "
                    f"({repo.revision} → {commit_sha}), schedule new job")
                repo.revision = commit_sha
                repo.save()
                repo_is_updated = True

                await create_job(app_id, repo.url)

            repo_state = "working" if app_data[
                "state"] == "working" else "other_than_working"

            if repo.state != repo_state:
                repo.state = repo_state
                repo.save()
                repo_is_updated = True

            if repo.random_job_day is None:
                repo.random_job_day = random.randint(1, 28)
                repo.save()
                repo_is_updated = True

            if repo_is_updated:
                await broadcast(
                    {
                        "action": "update_app",
                        "data": model_to_dict(repo),
                    }, "apps")

        # new app
        elif app_id not in repos:
            task_logger.info(f"New application detected: {app_id} " +
                             (", scheduling a new job" if monitor_git else ""))
            repo = Repo.create(
                name=app_id,
                url=app_data["git"]["url"],
                revision=commit_sha,
                state="working"
                if app_data["state"] == "working" else "other_than_working",
                random_job_day=random.randint(1, 28),
            )

            await broadcast(
                {
                    "action": "new_app",
                    "data": model_to_dict(repo),
                }, "apps")

            if monitor_git:
                await create_job(app_id, repo.url)

        await asyncio.sleep(1)

    # delete apps removed from the list
    unseen_repos = set(repos.keys()) - set(data.keys())

    for repo_name in unseen_repos:
        repo = repos[repo_name]

        # delete scheduled jobs first
        task_logger.info(
            f"Application {repo_name} has been removed from the app list, start by removing its scheduled job if there are any..."
        )
        for job in Job.select().where(Job.url_or_path == repo.url,
                                      Job.state == "scheduled"):
            await api_stop_job(None, job.id)  # not sure this is going to work
            job_id = job.id

            task_logger.info(
                f"Delete scheduled job {job.name} #{job.id} for application {repo_name} because the application is being deleted."
            )

            data = model_to_dict(job)
            job.delete_instance()

            await broadcast({
                "action": "delete_job",
                "data": data,
            }, ["jobs", f"job-{job_id}", f"app-jobs-{job.url_or_path}"])

        task_logger.info(
            f"Delete application {repo_name} because it has been removed from the apps list."
        )
        data = model_to_dict(repo)
        repo.delete_instance()

        await broadcast({
            "action": "delete_app",
            "data": data,
        }, "apps")