예제 #1
0
    def setUpClass(self):
        while True:
            try:
                es = test_jx.global_settings.backend_es
                http.get_json(URL(es.host, port=es.port))
                break
            except Exception as e:
                e = Except.wrap(e)
                if "No connection could be made because the target machine actively refused it" in e or "Connection refused" in e:
                    Log.alert("Problem connecting")
                else:
                    Log.error("Server raised exception", e)

        # REMOVE OLD INDEXES
        cluster = elasticsearch.Cluster(test_jx.global_settings.backend_es)
        aliases = cluster.get_aliases()
        for a in aliases:
            try:
                if a.index.startswith("testing_"):
                    create_time = Date(
                        a.index[-15:], "%Y%m%d_%H%M%S"
                    )  # EXAMPLE testing_0ef53e45b320160118_180420
                    if create_time < Date.now() - 10 * MINUTE:
                        cluster.delete_index(a.index)
            except Exception as e:
                Log.warning("Problem removing {{index|quote}}",
                            index=a.index,
                            cause=e)
def download_perfherder(desc, repo, id, dummy, framework):
    sig_result = http.get_json(
        "https://treeherder.mozilla.org/api/project/"
        + repo
        + "/performance/signatures/?format=json&framework="
        + str(framework)
        + "&id="
        + str(id)
    )

    signature = first(sig_result.keys())
    data_result = http.get_json(
        "https://treeherder.mozilla.org/api/project/"
        + repo
        + "/performance/data/?signatures="
        + signature
    )

    Log.note(
        "{{result|json}}",
        result={
            "name": desc,
            "data": jx.run({
                "from": ListContainer("data", data_result[signature]),
                "sort": "push_timestamp",
                "select": "value"
            }).data
        },
    )
예제 #3
0
    def __init__(self, hg, rate_limit, use_cache=True, cache=None, settings=None):
        self.settings = settings
        self.failure_classification = {c.id: c.name for c in http.get_json(FAILURE_CLASSIFICATION_URL)}
        self.repo = {c.id: c.name for c in http.get_json(REPO_URL)}
        self.hg = hg
        self.cache = elasticsearch.Cluster(cache).get_or_create_index(cache)
        self.locker = Lock()
        self.pending = {}

        self.rate_locker = Lock()
        self.request_times = [0] * rate_limit
        self.request_pointer = 0
예제 #4
0
    def verify_jwt_token(self, token):
        jwks = http.get_json("https://" + self.auth0.domain +
                             "/.well-known/jwks.json")
        unverified_header = jwt.get_unverified_header(token)
        algorithm = unverified_header["alg"]
        if algorithm != "RS256":
            Log.error("Expecting a RS256 signed JWT Access Token")

        key_id = unverified_header["kid"]
        key = unwrap(first(key for key in jwks["keys"]
                           if key["kid"] == key_id))
        if not key:
            Log.error("could not find {{key}}", key=key_id)

        try:
            return jwt.decode(
                token,
                key,
                algorithms=algorithm,
                audience=self.auth0.api.identifier,
                issuer="https://" + self.auth0.domain + "/",
            )
        except jwt.ExpiredSignatureError as e:
            Log.error("Token has expired", code=403, cause=e)
        except jwt.JWTClaimsError as e:
            Log.error(
                "Incorrect claims, please check the audience and issuer",
                code=403,
                cause=e,
            )
        except Exception as e:
            Log.error("Problem parsing", cause=e)
예제 #5
0
def new_instance(
        host,
        index,
        type=None,
        name=None,
        port=9200,
        read_only=True,
        timeout=None,  # NUMBER OF SECONDS TO WAIT FOR RESPONSE, OR SECONDS TO WAIT FOR DOWNLOAD (PASSED TO requests)
        wait_for_active_shards=1,  # ES WRITE CONSISTENCY (https://www.elastic.co/guide/en/elasticsearch/reference/1.7/docs-index_.html#index-consistency)
        typed=None,
        kwargs=None):
    try:
        known = known_hosts.get((host, port))
        if known:
            return known(kwargs=kwargs)

        url = URL(host)
        url.port = port
        status = http.get_json(url, stream=False)
        version = status.version.number
        if version.startswith(("5.", "6.")):
            from jx_elasticsearch.es52 import ES52
            type2container.setdefault("elasticsearch", ES52)
            known_hosts[(host, port)] = ES52
            output = ES52(kwargs=kwargs)
            return output
        else:
            Log.error("No jx interpreter for Elasticsearch {{version}}",
                      version=version)
    except Exception as e:
        Log.error("Can not make an interpreter for Elasticsearch", cause=e)
예제 #6
0
 def verify_opaque_token(self, token):
     # Opaque Access Token
     url = "https://" + self.auth0.domain + "/userinfo"
     response = http.get_json(url,
                              headers={"Authorization": "Bearer " + token})
     DEBUG and Log.note("content: {{body|json}}", body=response)
     return response
예제 #7
0
    def _setup_grcov(self):
        sudo("apt-get install -y gcc")

        response = http.get_json("https://api.github.com/repos/marco-c/grcov/releases/latest")
        with cd("~/ActiveData-ETL"):
            for asset in response.assets:
                if self.settings.grcov.platform in asset.browser_download_url:
                    run("wget "+asset.browser_download_url)
                    run(expand_template("tar xf grcov-{{platform}}.tar.bz2", self.settings.grcov))
                    run(expand_template("rm grcov-{{platform}}.tar.bz2", self.settings.grcov))
예제 #8
0
 def _get_clog(self, clog_url):
     try:
         Log.note("Searching through changelog {{url}}", url=clog_url)
         clog_obj = http.get_json(clog_url, retry=RETRY)
         return clog_obj
     except Exception as e:
         Log.error(
             "Unexpected error getting changset-log for {{url}}: {{error}}",
             url=clog_url,
             error=e)
예제 #9
0
    def _rate_limited_get_json(self, *args, **kwargs):
        now = Date.now().unix
        with self.rate_locker:
            if self.request_times[self.request_pointer] >= now - 1:
                Log.note("Rate limiting")
                Thread.sleep(seconds=self.request_times[self.request_pointer] - now + 1)
            self.request_times[self.request_pointer] = now
            self.request_pointer += 1
            self.request_pointer %= len(self.request_times)

        return http.get_json(*args, **kwargs)
예제 #10
0
    def __init__(self,
                 hg,
                 rate_limit,
                 use_cache=True,
                 cache=None,
                 settings=None):
        self.settings = settings
        self.failure_classification = {
            c.id: c.name
            for c in http.get_json(FAILURE_CLASSIFICATION_URL)
        }
        self.repo = {c.id: c.name for c in http.get_json(REPO_URL)}
        self.hg = hg
        self.cache = elasticsearch.Cluster(cache).get_or_create_index(cache)
        self.locker = Lock()
        self.pending = {}

        self.rate_locker = Lock()
        self.request_times = [0] * rate_limit
        self.request_pointer = 0
예제 #11
0
파일: clogger.py 프로젝트: rv404674/TUID
 def _get_clog(self, clog_url):
     try:
         Log.note("Searching through changelog {{url}}", url=clog_url)
         clog_obj = http.get_json(clog_url, retry=RETRY)
         return clog_obj
     except Exception as e:
         Log.error(
             "Unexpected error getting changset-log for {{url}}: {{error}}",
             url=clog_url,
             error=e
         )
예제 #12
0
    def _rate_limited_get_json(self, *args, **kwargs):
        now = Date.now().unix
        with self.rate_locker:
            if self.request_times[self.request_pointer] >= now - 1:
                Log.note("Rate limiting")
                Thread.sleep(seconds=self.request_times[self.request_pointer] -
                             now + 1)
            self.request_times[self.request_pointer] = now
            self.request_pointer += 1
            self.request_pointer %= len(self.request_times)

        return http.get_json(*args, **kwargs)
예제 #13
0
    def get_job_classification(self, branch, revision):
        results = http.get_json(expand_template(RESULT_SET_URL, {"branch": branch, "revision": revision[0:12:]}))
        for r in results.results:
            jobs = http.get_json(expand_template(JOBS_URL, {"branch": branch, "result_set_id": r.id}))

            for j in jobs:
                notes = http.get_json(expand_template(NOTES_URL, {"branch": branch, "job_id": j.id}))
                for n in notes:
                    if not n.note:
                        continue

                    Log.note(
                        "{{note|json}}",
                        note={
                            "job_id": j.id,
                            "result_set_id": r.id,
                            "branch": branch,
                            "revision": r.revision,
                            "failure_classification_id": j.failure_classification_id,
                            "result": j.result,
                            "note_timestamp": n.timestamp,
                            "note": n.note
                        }
                    )
예제 #14
0
    def _setup_grcov(self):
        sudo("apt-get install -y gcc")

        response = http.get_json(
            "https://api.github.com/repos/marco-c/grcov/releases/latest")
        with cd("~/ActiveData-ETL"):
            for asset in response.assets:
                if self.settings.grcov.platform in asset.browser_download_url:
                    run("wget " + asset.browser_download_url)
                    run(
                        expand_template("tar xf grcov-{{platform}}.tar.bz2",
                                        self.settings.grcov))
                    run(
                        expand_template("rm grcov-{{platform}}.tar.bz2",
                                        self.settings.grcov))
예제 #15
0
파일: __init__.py 프로젝트: rv404674/TUID
def new_instance(
    host,
    index,
    type=None,
    name=None,
    port=9200,
    read_only=True,
    timeout=None,  # NUMBER OF SECONDS TO WAIT FOR RESPONSE, OR SECONDS TO WAIT FOR DOWNLOAD (PASSED TO requests)
    wait_for_active_shards=1,  # ES WRITE CONSISTENCY (https://www.elastic.co/guide/en/elasticsearch/reference/1.7/docs-index_.html#index-consistency)
    typed=None,
    kwargs=None
):
    try:
        known = known_hosts.get((host, port))
        if known:
            return known(kwargs=kwargs)

        url = URL(host)
        url.port = port
        status = http.get_json(url, stream=False)
        version = status.version.number
        if version.startswith("1."):
            from jx_elasticsearch.es14 import ES14
            type2container.setdefault("elasticsearch", ES14)
            known_hosts[(host, port)] = ES14
            output = ES14(kwargs=kwargs)
            return output
        elif version.startswith(("5.", "6.")):
            from jx_elasticsearch.es52 import ES52
            type2container.setdefault("elasticsearch", ES52)
            known_hosts[(host, port)] = ES52
            output = ES52(kwargs=kwargs)
            return output
        else:
            Log.error("No jx interpreter for Elasticsearch {{version}}", version=version)
    except Exception as e:
        Log.error("Can not make an interpreter for Elasticsearch", cause=e)
예제 #16
0
def queue_consumer(client, pull_queue, please_stop=None, kwargs=None):
    queue = aws.Queue(pull_queue)
    client = TuidClient(client)
    try_revs = {}
    test_try_revs = True

    #while len(queue) > 0:
    #    request = queue.pop(till=please_stop)
    #    if request:
    #        Log.note("Popping request from {{time}}", time=request.meta.request_time)
    #        queue.commit()

    while not please_stop:
        request = queue.pop(till=please_stop)
        if please_stop:
            break
        if not request:
            Log.note("Nothing in queue, pausing for 5 seconds...")
            (please_stop | Till(seconds=5)).wait()
            continue
        Log.note("Found something in queue")
        repo = 'mozilla-central'

        and_op = request.where['and']

        revision = None
        files = None
        for a in and_op:
            if a.eq.revision:
                revision = a.eq.revision
            elif a['in'].path:
                files = a['in'].path
            elif a.eq.path:
                files = [a.eq.path]

        if len(files) <= 0:
            Log.warning("No files in the given request: {{request}}",
                        request=request)
            continue

        if revision[:12] in try_revs and not test_try_revs:
            Log.warning(
                "Revision {{cset}} does not exist in the {{branch}} branch",
                cset=revision[:12],
                branch='mozilla-central')
            queue.commit()
            continue

        clog_url = HG_URL / 'mozilla-central' / 'json-log' / revision[:12]
        clog_obj = http.get_json(clog_url, retry=RETRY)
        if isinstance(clog_obj, (text_type, str)):
            Log.warning(
                "Revision {{cset}} does not exist in the {{branch}} branch",
                cset=revision[:12],
                branch='mozilla-central')
            try_revs[revision[:12]] = True
            if not test_try_revs:
                queue.commit()
                continue
            else:
                json_rev_url = 'https://hg.mozilla.org/try/json-rev/' + revision[:
                                                                                 12]
                clog_obj = http.get_json(json_rev_url, retry=RETRY)
                if 'phase' not in clog_obj:
                    Log.warning(
                        "Revision {{cset}} does not exist in the try branch",
                        cset=revision[:12],
                        branch='mozilla-central')
                    queue.commit()
                    continue

                if clog_obj['phase'] == 'draft':
                    repo = 'try'

        else:
            Log.note("Revision {{cset}} exists on mozilla-central.",
                     cset=revision[:12])

        request.branch = repo
        with Timer("Make TUID request from {{timestamp|date}}",
                   {"timestamp": request.meta.request_time}):
            client.enabled = True  # ENSURE THE REQUEST IS MADE
            result = http.post_json("http://localhost:5000/tuid",
                                    json=request,
                                    timeout=10000)
            if not client.enabled:
                Log.note("pausing consumer for {{num}}sec",
                         num=PAUSE_ON_FAILURE)
                Till(seconds=PAUSE_ON_FAILURE).wait()
            if result is None or len(result.data) != len(files):
                Log.warning("expecting response for every file requested")

        queue.commit()
예제 #17
0
파일: app.py 프로젝트: mars-f/ActiveData
 def backend_check():
     http.get_json(config.elasticsearch.host + ":" + text_type(config.elasticsearch.port))
예제 #18
0
    # Get the service to delete entries
    service = TUIDService(conn=sql.Sql("resources/tuid_app.db"), kwargs=config.tuid)

    # Get a list of 1000 files from stressfiles
    with open('resources/stressfiles.json', 'r') as f:
        files = json.load(f)

    # Get rev_count revisions from changelogs
    csets = []
    final_rev = ''
    while len(csets) < rev_count:
        # Get a changelog
        clog_url = HG_URL /'mozilla-central' / 'json-log' / final_rev
        try:
            Log.note("Searching through changelog {{url}}", url=clog_url)
            clog_obj = http.get_json(clog_url, retry=RETRY)
        except Exception as e:
            Log.error("Unexpected error getting changset-log for {{url}}", url=clog_url, error=e)

        cset = ''
        for clog_cset in clog_obj['changesets']:
            cset = clog_cset['node'][:12]
            if len(csets) < rev_count:
                csets.append(cset)

        final_rev = cset

    # Oldest is now first
    csets.reverse()

    # Make the initial insertion (always slow)
예제 #19
0
    service = TUIDService(conn=sql.Sql("resources/tuid_app.db"),
                          kwargs=config.tuid)

    # Get a list of 1000 files from stressfiles
    with open('resources/stressfiles.json', 'r') as f:
        files = json.load(f)

    # Get rev_count revisions from changelogs
    csets = []
    final_rev = ''
    while len(csets) < rev_count:
        # Get a changelog
        clog_url = HG_URL / 'mozilla-central' / 'json-log' / final_rev
        try:
            Log.note("Searching through changelog {{url}}", url=clog_url)
            clog_obj = http.get_json(clog_url, retry=RETRY)
        except Exception as e:
            Log.error("Unexpected error getting changset-log for {{url}}",
                      url=clog_url,
                      error=e)

        cset = ''
        for clog_cset in clog_obj['changesets']:
            cset = clog_cset['node'][:12]
            if len(csets) < rev_count:
                csets.append(cset)

        final_rev = cset

    # Oldest is now first
    csets.reverse()