Exemplo n.º 1
0
    def get_data_info(self, data_filter=None):
        data_filter = data_filter or {}
        ds_filename = self._get_ds_filename()
        if not os.path.exists(ds_filename):
            return {}
        data_file = HDFLockingFile(ds_filename, "r", retry_count=10, retry_wait=0.2)
        try:
            res_info = {}
            max_rows_org = max_rows = data_filter.get("max_rows", DEFAULT_MAX_ROWS)
            start_time = data_filter.get("start_time", None)
            end_time = data_filter.get("end_time", None)
            start_time_include = data_filter.get("start_time_include", True) is True
            should_decimate = data_filter.get("decimate", False) is True

            ds_time = data_file["vars/%s" % self.time_var]
            cur_idx = ds_time.attrs["cur_row"]

            res_info["ds_rows"] = cur_idx
            res_info["ds_size"] = len(ds_time)
            res_info["file_size"] = os.path.getsize(ds_filename)
            res_info["file_name"] = ds_filename
            res_info["vars"] = list(data_file["vars"])

            start_row, end_row = self._get_row_interval(data_file, start_time, end_time, start_time_include)
            res_info["need_expand"] = self.expand_info.get("need_expand", False)
            if self.expand_info.get("need_expand", False):
                max_rows = max_rows / self.expand_info["num_steps"]  # Compensate expansion
            res_info["should_decimate"] = should_decimate
            res_info["need_decimate"] = bool(should_decimate and end_row - start_row > max_rows)

            res_info["ts_first"] = NTP4Time.from_ntp64(ds_time.value[0].tostring()).to_unix()
            res_info["ts_last"] = NTP4Time.from_ntp64(ds_time.value[cur_idx - 1].tostring()).to_unix()
            res_info["ts_first_str"] = get_datetime_str(res_info["ts_first"] * 1000, local_time=False)
            res_info["ts_last_str"] = get_datetime_str(res_info["ts_last"] * 1000, local_time=False)

            res_info["ds_samples"] = cur_idx * self.expand_info["num_steps"] if res_info["need_expand"] else cur_idx

            res_info["filter_start_row"] = start_row
            res_info["filter_end_row"] = end_row
            res_info["filter_max_rows"] = max_rows
            res_info["filter_ts_first"] = NTP4Time.from_ntp64(ds_time.value[start_row].tostring()).to_unix()
            res_info["filter_ts_last"] = NTP4Time.from_ntp64(ds_time.value[end_row - 1].tostring()).to_unix()
            res_info["filter_ts_first_str"] = get_datetime_str(res_info["filter_ts_first"] * 1000, local_time=False)
            res_info["filter_ts_last_str"] = get_datetime_str(res_info["filter_ts_last"] * 1000, local_time=False)

            return res_info

        finally:
            data_file.close()
Exemplo n.º 2
0
    def _default_formatter(log_entry, **kwargs):
        truncate = kwargs.get("truncate", 0)
        color = kwargs.get("color", False)
        logscope = log_entry["scope"]
        scope_cat = logscope.split(".", 1)[0]

        entry_color = SCOPE_COLOR.get(logscope, None) or SCOPE_COLOR.get(scope_cat, DEFAULT_COLOR)
        frags = []
        if color:
            frags.append("\033[1m\033[%sm" % entry_color)
        stmt_time = " [%.5f s]" % log_entry.get('statement_time') if 'statement_time' in log_entry else ""
        frags.append("\n%s: #%s @%s (%s)%s -> %s" % (log_entry['scope'], log_entry['seq'], log_entry['ts'],
                                                     get_datetime_str(log_entry['ts'], show_millis=True),
                                                     stmt_time, log_entry.get("status", "OK")))
        if color:
            frags.append("\033[22m")    # Bold off
        statement = log_entry.get('statement', "")
        if truncate:
            frags.append("\n" + statement[:truncate])
            if len(statement) > truncate:
                frags.append("...")
                frags.append("[%s]" % (len(statement) - truncate))
        else:
            frags.append("\n" + statement)
        if color:
            frags.append("\033[0m")
        if "stack" in log_entry and kwargs.get("stack", False):
            frags.append("\n ")
            frags.append("\n ".join(log_entry["stack"]))
        return "".join(frags)
Exemplo n.º 3
0
    def _default_formatter(log_entry, **kwargs):
        truncate = kwargs.get("truncate", 0)
        color = kwargs.get("color", False)
        logscope = log_entry["scope"]
        scope_cat = logscope.split(".", 1)[0]

        entry_color = SCOPE_COLOR.get(logscope, None) or SCOPE_COLOR.get(
            scope_cat, DEFAULT_COLOR)
        frags = []
        if color:
            frags.append("\033[1m\033[%sm" % entry_color)
        stmt_time = " [%.5f s]" % log_entry.get(
            'statement_time') if 'statement_time' in log_entry else ""
        frags.append("\n%s: #%s @%s (%s)%s -> %s" %
                     (log_entry['scope'], log_entry['seq'], log_entry['ts'],
                      get_datetime_str(log_entry['ts'], show_millis=True),
                      stmt_time, log_entry.get("status", "OK")))
        if color:
            frags.append("\033[22m")  # Bold off
        statement = log_entry.get('statement', "")
        if truncate:
            frags.append("\n" + statement[:truncate])
            if len(statement) > truncate:
                frags.append("...")
                frags.append("[%s]" % (len(statement) - truncate))
        else:
            frags.append("\n" + statement)
        if color:
            frags.append("\033[0m")
        if "stack" in log_entry and kwargs.get("stack", False):
            frags.append("\n ")
            frags.append("\n ".join(log_entry["stack"]))
        return "".join(frags)
Exemplo n.º 4
0
 def _alarm_handler(self, signum, frame):
     if self._momento_time == self._last_switch_time:
         st = traceback.extract_stack(frame)
         from pyon.util.containers import get_ion_ts, get_datetime_str
         alarm_trace ='CRITICAL> gevent blocking detected at %s!\n' % get_datetime_str(get_ion_ts()) +\
             'At %s():%d of file %s.\n' % ( frame.f_code.co_name, frame.f_lineno, frame.f_code.co_filename)
         alarm_trace += "Stack trace: %s\n" % "".join(traceback.format_list(st))
         self._last_traces.append(alarm_trace)
         # Optional.  Let's check if it's still blocking in next iteration.
         signal.alarm(MAX_BLOCKING_TIME)
Exemplo n.º 5
0
    def get_data_info(self, data_filter=None):
        data_filter = data_filter or {}
        ds_filename = self._get_ds_filename()
        if not os.path.exists(ds_filename):
            return {}
        data_file = HDFLockingFile(ds_filename,
                                   "r",
                                   retry_count=10,
                                   retry_wait=0.2)
        try:
            res_info = {}
            max_rows_org = max_rows = data_filter.get("max_rows",
                                                      DEFAULT_MAX_ROWS)
            start_time = data_filter.get("start_time", None)
            end_time = data_filter.get("end_time", None)
            start_time_include = data_filter.get("start_time_include",
                                                 True) is True
            should_decimate = data_filter.get("decimate", False) is True

            ds_time = data_file["vars/%s" % self.time_var]
            cur_idx = ds_time.attrs["cur_row"]

            res_info["ds_rows"] = cur_idx
            res_info["ds_size"] = len(ds_time)
            res_info["file_size"] = os.path.getsize(ds_filename)
            res_info["file_name"] = ds_filename
            res_info["vars"] = list(data_file["vars"])

            start_row, end_row = self._get_row_interval(
                data_file, start_time, end_time, start_time_include)
            res_info["need_expand"] = self.expand_info.get(
                "need_expand", False)
            if self.expand_info.get("need_expand", False):
                max_rows = max_rows / self.expand_info[
                    "num_steps"]  # Compensate expansion
            res_info["should_decimate"] = should_decimate
            res_info["need_decimate"] = bool(
                should_decimate and end_row - start_row > max_rows)

            res_info["ts_first"] = NTP4Time.from_ntp64(
                ds_time.value[0].tostring()).to_unix()
            res_info["ts_last"] = NTP4Time.from_ntp64(
                ds_time.value[cur_idx - 1].tostring()).to_unix()
            res_info["ts_first_str"] = get_datetime_str(res_info["ts_first"] *
                                                        1000,
                                                        local_time=False)
            res_info["ts_last_str"] = get_datetime_str(res_info["ts_last"] *
                                                       1000,
                                                       local_time=False)

            res_info["ds_samples"] = cur_idx * self.expand_info[
                "num_steps"] if res_info["need_expand"] else cur_idx

            res_info["filter_start_row"] = start_row
            res_info["filter_end_row"] = end_row
            res_info["filter_max_rows"] = max_rows
            res_info["filter_ts_first"] = NTP4Time.from_ntp64(
                ds_time.value[start_row].tostring()).to_unix()
            res_info["filter_ts_last"] = NTP4Time.from_ntp64(
                ds_time.value[end_row - 1].tostring()).to_unix()
            res_info["filter_ts_first_str"] = get_datetime_str(
                res_info["filter_ts_first"] * 1000, local_time=False)
            res_info["filter_ts_last_str"] = get_datetime_str(
                res_info["filter_ts_last"] * 1000, local_time=False)

            return res_info

        finally:
            data_file.close()
Exemplo n.º 6
0
 def _greenlet_blocking_monitor(self):
     while not self._stop_monitor:
         old_switch_counter = self._greenlet_switch_counter
         _real_sleep(MAX_BLOCKING_TIME)
         active_greenlet = self._active_greenlet
         new_switch_counter = self._greenlet_switch_counter
         # If we have detected a successful switch, reset the counter
         # to zero.  This might race with it being incrememted in the
         # other thread, but should succeed often enough to prevent
         # the counter from growing without bound.
         if new_switch_counter != old_switch_counter:
             self._greenlet_switch_counter = 0
         # If we detected a blocking greenlet, grab the stack trace
         # and log an error.  The active greenlet's frame is not
         # available from the greenlet object itself, we have to look
         # up the current frame of the main thread for the traceback.
         else:
             if active_greenlet not in (None, self._active_hub):
                 frame = sys._current_frames()[self._main_thread_id]
                 stack = traceback.format_stack(frame)
                 from pyon.util.containers import get_ion_ts, get_datetime_str
                 self._last_traces.append("Greenlet appears to be blocked at %s \n" % get_datetime_str(get_ion_ts()) + "".join(stack))