Exemplo n.º 1
0
 def query(self, command):
     """
     WILL STALL CALLING THREAD UNTIL THE command IS COMPLETED
     :param command: COMMAND FOR SQLITE
     :return: list OF RESULTS
     """
     signal = Signal()
     result = Dict()
     self.queue.add((command, result, signal, None))
     signal.wait_for_go()
     if result.exception:
         Log.error("Problem with Sqlite call", cause=result.exception)
     return result
Exemplo n.º 2
0
 def query(self, command):
     """
     WILL BLOCK CALLING THREAD UNTIL THE command IS COMPLETED
     :param command: COMMAND FOR SQLITE
     :return: list OF RESULTS
     """
     signal = Signal()
     result = Dict()
     self.queue.add((command, result, signal, None))
     signal.wait_for_go()
     if result.exception:
         Log.error("Problem with Sqlite call", cause=result.exception)
     return result
Exemplo n.º 3
0
    def _get_job_results_from_th(self, branch, revision):
        output = []

        with self.locker:
            waiting_threads = self.pending.get((branch, revision))
            if waiting_threads is None:
                sig = None
                waiting_threads = self.pending[(branch, revision)] = [output]
            else:
                sig = Signal()
                waiting_threads.append(Signal())

        if sig is not None:
            Log.note("Holding thread for {{branch}}/{{revision}}",
                     branch=branch,
                     revision=revision)
            sig.wait_for_go()
            return waiting_threads[0]

        try:
            results = DictList()
            while True:
                response = self._rate_limited_get_json(
                    expand_template(RESULT_SET_URL, {
                        "branch": branch,
                        "revision": revision[0:12:]
                    }))
                results.extend(response.results)
                if len(response.results) != 1000:
                    break

            for g, repo_ids in jx.groupby(results.id, size=10):
                jobs = DictList()
                with Timer("Get {{num}} jobs", {"num": len(repo_ids)},
                           debug=DEBUG):
                    while True:
                        response = self._rate_limited_get_json(
                            expand_template(
                                JOBS_URL, {
                                    "branch":
                                    branch,
                                    "offset":
                                    len(jobs),
                                    "result_set_id":
                                    ",".join(map(unicode, repo_ids))
                                }))
                        jobs.extend(response.results)
                        if len(response.results) != 2000:
                            break

                with Timer("Get (up to {{num}}) details from TH",
                           {"num": len(jobs)},
                           debug=DEBUG):
                    details = []
                    for _, ids in jx.groupby(jobs.id, size=40):
                        details.extend(
                            self._rate_limited_get_json(url=expand_template(
                                DETAILS_URL, {
                                    "branch": branch,
                                    "job_id": ",".join(map(unicode, ids))
                                }),
                                                        retry={
                                                            "times": 3
                                                        }).results)
                    details = {
                        k.job_guid: list(v)
                        for k, v in jx.groupby(details, "job_guid")
                    }

                with Timer("Get (up to {{num}}) stars from TH",
                           {"num": len(jobs)},
                           debug=DEBUG):
                    stars = []
                    for _, ids in jx.groupby(jobs.id, size=40):
                        response = self._rate_limited_get_json(
                            expand_template(
                                JOB_BUG_MAP, {
                                    "branch": branch,
                                    "job_id": "&job_id=".join(map(
                                        unicode, ids))
                                }))
                        stars.extend(response),
                    stars = {
                        k.job_id: list(v)
                        for k, v in jx.groupby(stars, "job_id")
                    }

                with Timer("Get notes from TH", debug=DEBUG):
                    notes = []
                    for jid in set([
                            j.id
                            for j in jobs if j.failure_classification_id != 1
                    ] + stars.keys()):
                        response = self._rate_limited_get_json(
                            expand_template(NOTES_URL, {
                                "branch": branch,
                                "job_id": unicode(jid)
                            }))
                        notes.extend(response),
                    notes = {
                        k.job_id: list(v)
                        for k, v in jx.groupby(notes, "job_id")
                    }

                for j in jobs:
                    output.append(
                        self._normalize_job_result(branch, revision, j,
                                                   details, notes, stars))

            if output:
                with Timer("Write to ES cache", debug=DEBUG):
                    self.cache.extend(
                        {
                            "id": "-".join([c.repo.branch,
                                            unicode(c.job.id)]),
                            "value": c
                        } for c in output)
                    try:
                        self.cache.flush()
                    except Exception, e:
                        Log.warning("problem flushing. nevermind.", cause=e)
        finally:
            with self.locker:
                for p in waiting_threads[1:]:
                    if DEBUG:
                        Log.note(
                            "releasing thread for {{branch}}/{{revision}}",
                            branch=branch,
                            revision=revision)
                    p.go()
                self.pending[(branch, revision)] = None

        return output
Exemplo n.º 4
0
    def _get_job_results_from_th(self, branch, revision):
        output = []

        with self.locker:
            waiting_threads = self.pending.get((branch, revision))
            if waiting_threads is None:
                sig = None
                waiting_threads = self.pending[(branch, revision)] = [output]
            else:
                sig = Signal()
                waiting_threads.append(Signal())

        if sig is not None:
            Log.note("Holding thread for {{branch}}/{{revision}}", branch=branch, revision=revision)
            sig.wait_for_go()
            return waiting_threads[0]

        try:
            results = DictList()
            while True:
                response = self._rate_limited_get_json(expand_template(RESULT_SET_URL, {"branch": branch, "revision": revision[0:12:]}))
                results.extend(response.results)
                if len(response.results) != 1000:
                    break

            for g, repo_ids in jx.groupby(results.id, size=10):
                jobs = DictList()
                with Timer("Get {{num}} jobs", {"num": len(repo_ids)}, debug=DEBUG):
                    while True:
                        response = self._rate_limited_get_json(expand_template(JOBS_URL, {"branch": branch, "offset": len(jobs), "result_set_id": ",".join(map(unicode, repo_ids))}))
                        jobs.extend(response.results)
                        if len(response.results) != 2000:
                            break

                with Timer("Get (up to {{num}}) details from TH", {"num": len(jobs)}, debug=DEBUG):
                    details = []
                    for _, ids in jx.groupby(jobs.id, size=40):
                        details.extend(self._rate_limited_get_json(
                            url=expand_template(DETAILS_URL, {"branch": branch, "job_id": ",".join(map(unicode, ids))}),
                            retry={"times": 3}
                        ).results)
                    details = {k.job_guid: list(v) for k, v in jx.groupby(details, "job_guid")}

                with Timer("Get (up to {{num}}) stars from TH", {"num": len(jobs)}, debug=DEBUG):
                    stars = []
                    for _, ids in jx.groupby(jobs.id, size=40):
                        response = self._rate_limited_get_json(expand_template(JOB_BUG_MAP, {"branch": branch, "job_id": "&job_id=".join(map(unicode, ids))}))
                        stars.extend(response),
                    stars = {k.job_id: list(v) for k, v in jx.groupby(stars, "job_id")}

                with Timer("Get notes from TH", debug=DEBUG):
                    notes = []
                    for jid in set([j.id for j in jobs if j.failure_classification_id != 1] + stars.keys()):
                        response = self._rate_limited_get_json(expand_template(NOTES_URL, {"branch": branch, "job_id": unicode(jid)}))
                        notes.extend(response),
                    notes = {k.job_id: list(v) for k, v in jx.groupby(notes, "job_id")}

                for j in jobs:
                    output.append(self._normalize_job_result(branch, revision, j, details, notes, stars))

            if output:
                with Timer("Write to ES cache", debug=DEBUG):
                    self.cache.extend({"id": "-".join([c.repo.branch, unicode(c.job.id)]), "value": c} for c in output)
                    try:
                        self.cache.flush()
                    except Exception, e:
                        Log.warning("problem flushing. nevermind.", cause=e)
        finally:
            with self.locker:
                for p in waiting_threads[1:]:
                    if DEBUG:
                        Log.note("releasing thread for {{branch}}/{{revision}}", branch=branch, revision=revision)
                    p.go()
                self.pending[(branch, revision)] = None

        return output