Beispiel #1
0
 def __enter__(self):
     Log.note("SingleInstance.lockfile = " + self.lockfile)
     if sys.platform == 'win32':
         try:
             # file already exists, we try to remove (in case previous execution was interrupted)
             if os.path.exists(self.lockfile):
                 os.unlink(self.lockfile)
             self.fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR)
         except Exception, e:
             Log.alarm("Another instance is already running, quitting.")
             sys.exit(-1)
def process(source_key, source, dest_bucket, resources, please_stop=None):
    """
    SIMPLE CONVERT pulse_block INTO TALOS, IF ANY
    """
    etl_head_gen = EtlHeadGenerator(source_key)
    stats = Dict()
    counter = 0

    output = set()
    for i, pulse_line in enumerate(source.read_lines()):
        pulse_record = scrub_pulse_record(source_key, i, pulse_line, stats)
        if not pulse_record:
            continue

        if not pulse_record.payload.talos:
            continue

        all_talos = []
        etl_file = wrap({
            "id": counter,
            "file": pulse_record.payload.logurl,
            "timestamp": Date.now().unix,
            "source": pulse_record.etl,
            "type": "join"
        })
        with Timer("Read {{url}}", {"url": pulse_record.payload.logurl}, debug=DEBUG) as timer:
            try:
                response = http.get(pulse_record.payload.logurl)
                if response.status_code == 404:
                    Log.alarm("Talos log missing {{url}}", url=pulse_record.payload.logurl)
                    k = source_key + "." + unicode(counter)
                    try:
                        # IF IT EXISTS WE WILL ASSUME SOME PAST PROCESS TRANSFORMED THE MISSING DATA ALREADY
                        dest_bucket.get_key(k)
                        output |= {k}  # FOR DENSITY CALCULATIONS
                    except Exception:
                        _, dest_etl = etl_head_gen.next(etl_file, "talos")
                        dest_etl.error = "Talos log missing"
                        output |= dest_bucket.extend([{
                            "id": etl2key(dest_etl),
                            "value": {
                                "etl": dest_etl,
                                "pulse": pulse_record.payload
                            }
                        }])

                    continue
                all_log_lines = response.all_lines

                for log_line in all_log_lines:
                    s = log_line.find(TALOS_PREFIX)
                    if s < 0:
                        continue

                    log_line = strings.strip(log_line[s + len(TALOS_PREFIX):])
                    talos = convert.json2value(convert.utf82unicode(log_line))

                    for t in talos:
                        _, dest_etl = etl_head_gen.next(etl_file, "talos")
                        t.etl = dest_etl
                        t.pulse = pulse_record.payload
                    all_talos.extend(talos)
            except Exception, e:
                Log.error("Problem processing {{url}}", {
                    "url": pulse_record.payload.logurl
                }, e)
            finally:
Beispiel #3
0
def process(source_key, source, dest_bucket, resources, please_stop=None):
    """
    SIMPLE CONVERT pulse_block INTO TALOS, IF ANY
    """
    etl_head_gen = EtlHeadGenerator(source_key)
    stats = Dict()
    counter = 0

    output = set()
    for i, pulse_line in enumerate(source.read_lines()):
        pulse_record = scrub_pulse_record(source_key, i, pulse_line, stats)
        if not pulse_record:
            continue

        if not pulse_record.payload.talos:
            continue

        all_talos = []
        etl_file = wrap({
            "id": counter,
            "file": pulse_record.payload.logurl,
            "timestamp": Date.now().unix,
            "source": pulse_record.etl,
            "type": "join"
        })
        with Timer("Read {{url}}", {"url": pulse_record.payload.logurl},
                   debug=DEBUG) as timer:
            try:
                response = http.get(pulse_record.payload.logurl)
                if response.status_code == 404:
                    Log.alarm("Talos log missing {{url}}",
                              url=pulse_record.payload.logurl)
                    k = source_key + "." + unicode(counter)
                    try:
                        # IF IT EXISTS WE WILL ASSUME SOME PAST PROCESS TRANSFORMED THE MISSING DATA ALREADY
                        dest_bucket.get_key(k)
                        output |= {k}  # FOR DENSITY CALCULATIONS
                    except Exception:
                        _, dest_etl = etl_head_gen.next(etl_file, "talos")
                        dest_etl.error = "Talos log missing"
                        output |= dest_bucket.extend([{
                            "id": etl2key(dest_etl),
                            "value": {
                                "etl": dest_etl,
                                "pulse": pulse_record.payload
                            }
                        }])

                    continue
                all_log_lines = response.all_lines

                for log_line in all_log_lines:
                    s = log_line.find(TALOS_PREFIX)
                    if s < 0:
                        continue

                    log_line = strings.strip(log_line[s + len(TALOS_PREFIX):])
                    talos = convert.json2value(convert.utf82unicode(log_line))

                    for t in talos:
                        _, dest_etl = etl_head_gen.next(etl_file, "talos")
                        t.etl = dest_etl
                        t.pulse = pulse_record.payload
                    all_talos.extend(talos)
            except Exception, e:
                Log.error("Problem processing {{url}}",
                          {"url": pulse_record.payload.logurl}, e)
            finally:
                    if DEBUG_SHOW_LINE:
                        Log.note("Copied {{key}}: {{url}}",
                                 key=dest_key,
                                 url=url)
            except Exception, e:
                Log.error("Problem processing {{name}} = {{url}}",
                          name=name,
                          url=url,
                          cause=e)

        if not file_num and DEBUG_SHOW_NO_LOG:
            Log.note("No structured log {{json}}", json=pulse_record.payload)

    if stats.num_missing_envelope:
        Log.alarm("{{num}} lines have pulse message stripped of envelope",
                  num=stats.num_missing_envelope)

    return output


def verify_blobber_file(line_number, name, url):
    """
    :param line_number:  for debugging
    :param name:  for debugging
    :param url:  TO BE READ
    :return:  RETURNS BYTES **NOT** UNICODE
    """
    if name in ["emulator-5554.log", "qemu.log"] or any(
            map(name.endswith, [".png", ".html"])):
        return None, 0
                    buildbot_summary = transform_buildbot(pulse_record.payload, resources, filename=name)
                    if not PARSE_TRY and buildbot_summary.build.branch == "try":
                        continue
                    dest_key, dest_etl = etl_header_gen.next(pulse_record.etl, name)
                    set_default(dest_etl, machine_metadata)
                    new_keys = process_unittest(dest_key, dest_etl, buildbot_summary, log_content, destination, please_stop=please_stop)

                    file_num += 1
                    output.append(dest_key)

                    if source.bucket.settings.fast_forward:
                        fast_forward=True

                    if DEBUG_SHOW_LINE:
                        Log.note(
                            "ETLed line {{key}}: {{url}}",
                            key=dest_key,
                            url=url
                        )
            except Exception, e:
                Log.error("Problem processing {{name}} = {{url}}", name=name, url=url, cause=e)

        if not file_num and DEBUG_SHOW_NO_LOG:
            Log.note("No structured log {{json}}", json=pulse_record.payload)

    if stats.num_missing_envelope:
        Log.alarm("{{num}} lines have pulse message stripped of envelope", num=stats.num_missing_envelope)

    return output