class TestExistServos(unittest.TestCase):
    """In WebCam4 now (20160911), have 2 command servos.
    servo id is 1 and 2.
    """
    def setUp(self):
        """ open serial port """
        self.ser = serial.Serial(com_port, com_boud, timeout=com_timeout)
        self.logger = getLogger(__name__)
        formatter = Formatter('%(asctime)s - '
                              '%(levelname)s - '
                              '%(filename)s:%(lineno)d - '
                              '%(funcName)s - '
                              '%(message)s')
        self.sh = FileHandler(log_file, delay=True)
        self.sh.setLevel(DEBUG)
        self.sh.setFormatter(formatter)
        self.logger.setLevel(DEBUG)
        self.logger.addHandler(self.sh)

    def tearDown(self):
        """Deleting self.cmd and closing serial port."""
        self.ser.close()
        self.sh.close()
        self.logger.removeHandler(self.sh)

    def test_exist_servos(self):
        """Servos return a \x07 if exists and ready. """
        self.cmd = CmdServo.CmdAck(self.logger)
        for id in [1, 2]:
            with self.subTest(id=id):
                self.cmd.prepare(id)
                self.assertTrue(self.cmd.execute(self.ser))
                self.assertTrue(len(self.cmd.recv) > 0)
                self.assertEqual(self.cmd.recv[0], 7)
 def close(self):
     if self.closed:
         return
     FileHandler.close(self)
     f = os.fdopen(self.fd)
     summary = f.read().decode(self.charset)
     f.close()
     # try and encode in ascii, to keep emails simpler:
     try:
         summary = summary.encode('ascii')
     except UnicodeEncodeError:
         # unicode it is then
         pass
     if os.path.exists(self.filename):
         os.remove(self.filename)
     if self.send_level is None or self.maxlevelno >= self.send_level:
         self.mailer.handle(
             LogRecord(
                 name = 'Summary',
                 level = self.maxlevelno,
                 pathname = '',
                 lineno = 0,
                 msg = summary,
                 args = (),
                 exc_info = None
                 )
             )
     self.closed = True
Beispiel #3
0
class CaptureLog(object):
    """Context to capture log from a specific logger and write it to a file

    Parameters
    ----------
    filename : str
        Where to write the log file.
    mode : str
        Mode for opening the log file (default 'w').
    name : str
        Name of the logger from which to capture (default 'mne').
    """
    def __init__(self, filename, mode='w', logger='mne', level='debug'):
        self.logger = logger
        self.level = log_level(level)
        self.handler = FileHandler(filename, mode)
        self.handler.setLevel(self.level)
        self.handler.setFormatter(Formatter("%(levelname)-8s :%(message)s"))
        self._old_level = None

    def __enter__(self):
        logger = getLogger(self.logger)
        logger.addHandler(self.handler)
        if logger.level == 0 or logger.level > self.level:
            self._old_level = logger.level
            logger.setLevel(self.level)

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.handler.close()
        logger = getLogger(self.logger)
        logger.removeHandler(self.handler)
        if self._old_level is not None:
            logger.setLevel(self._old_level)
class Logger(unittest.TestCase):
    def __init__(self, filename):
        self.prepare_logger(filename)
        print '---------------------------------------------------------------'

    def prepare_logger(self, filename):
        self.logger = logging.getLogger('NBI:')
        self.logger.setLevel(logging.INFO)
        self.getHdlr(filename)
        self.logger.addHandler(self.hdlr)

    def getHdlr(self, filename):
        self.hdlr = FileHandler("/tmp/" + filename + "log")
        _format = '%(asctime)s::%(levelname)s::%(message)s'
        _datefmt = '%Y/%m/%d %H:%M:%S'
        self.hdlr.setFormatter(logging.Formatter(_format, datefmt=_datefmt))

    def release_logger(self):
        self.logger.removeHandler(self.hdlr)
        self.hdlr.close()

    def logResult(self, test, expected, result):
        level = logging.INFO if str(expected) in ["N.A."] or \
                                expected == result.status_code else logging.ERROR
        self.logger.log(level, test + ".\t" + \
                        "Expected:" + str(expected) + ".\t" + \
                        "Returned: " + str(result.status_code) + ".")
        if result._content is not None:
            self.logger.log(level, "\t" + str(result._content))

    def release(self):
        self.release_logger()
Beispiel #5
0
 def close(self):
     if self.closed:
         return
     FileHandler.close(self)
     f = os.fdopen(self.fd)
     summary = f.read().decode(self.charset)
     f.close()
     # try and encode in ascii, to keep emails simpler:
     try:
         summary = summary.encode('ascii')
     except UnicodeEncodeError:
         # unicode it is then
         pass
     if os.path.exists(self.filename):
         os.remove(self.filename)
     if self.send_level is None or self.maxlevelno >= self.send_level:
         self.mailer.handle(
             LogRecord(name='Summary',
                       level=self.maxlevelno,
                       pathname='',
                       lineno=0,
                       msg=summary,
                       args=(),
                       exc_info=None))
     self.closed = True
Beispiel #6
0
class CaptureLog:
    """Context to capture log from a specific logger and write it to a file

    Parameters
    ----------
    filename : str
        Where to write the log file.
    mode : str
        Mode for opening the log file (default 'w').
    name : str
        Name of the logger from which to capture (default 'mne').
    """
    def __init__(self, filename, mode='w', logger='mne', level='debug'):
        self.logger = logger
        self.level = log_level(level)
        self.handler = FileHandler(filename, mode)
        self.handler.setLevel(self.level)
        self.handler.setFormatter(Formatter("%(levelname)-8s :%(message)s"))
        self._old_level = None

    def __enter__(self):
        logger = getLogger(self.logger)
        logger.addHandler(self.handler)
        if logger.level == 0 or logger.level > self.level:
            self._old_level = logger.level
            logger.setLevel(self.level)
        return logger

    def __exit__(self, exc_type, exc_val, exc_tb):
        self.handler.close()
        logger = getLogger(self.logger)
        logger.removeHandler(self.handler)
        if self._old_level is not None:
            logger.setLevel(self._old_level)
class CustomLogHandler(logging.Handler):
    """multiprocessing log handler

    This handler makes it possible for several processes
    to log to the same file by using a queue.

    """
    def __init__(self, fname):
        logging.Handler.__init__(self)
        timestamp = str(datetime.now().strftime('%Y%m%dT_%H%M%S'))

        self._handler = FH("logs/{0}-{1}.log".format(fname, timestamp))
        self.queue = multiprocessing.Queue(-1)

        thrd = threading.Thread(target=self.receive)
        thrd.daemon = True
        thrd.start()

    def setFormatter(self, fmt):
        logging.Handler.setFormatter(self, fmt)
        self._handler.setFormatter(fmt)

    def receive(self):
        while True:
            try:
                record = self.queue.get()
                self._handler.emit(record)
            except (KeyboardInterrupt, SystemExit):
                raise
            except EOFError:
                break
            except:
                traceback.print_exc(file=sys.stderr)

    def send(self, s):
        self.queue.put_nowait(s)

    def _format_record(self, record):
        if record.args:
            record.msg = record.msg % record.args
            record.args = None
        if record.exc_info:
            dummy = self.format(record)
            record.exc_info = None

        return record

    def emit(self, record):
        try:
            s = self._format_record(record)
            self.send(s)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def close(self):
        self._handler.close()
        logging.Handler.close(self)
    def close(self):
        if self.closed:
            return
        self.closed = True

        if self.message_count > self.flood_level:
            hidden = self.message_count - self.flood_level - len(self.tail)
            if hidden:
                # send critical error
                FileHandler.emit(self, LogRecord(
                    name='flood',
                    level=CRITICAL,
                    pathname='',
                    lineno=0,
                    msg=flood_template % (
                        self.message_count - self.flood_level - len(self.tail),
                        self.flood_level
                    ),
                    args=(),
                    exc_info=None
                ))
            for record in self.tail:
                FileHandler.emit(self, record)

        FileHandler.close(self)


        if PY2:
            f = os.fdopen(self.fd)
            summary = f.read().decode(self.charset)
        else:
            f = open(self.fd, encoding=self.charset)
            summary = f.read()
        f.close()
        try:
            encoded_summary = summary.encode('ascii')
            self.mailer.charset = 'ascii'
        except UnicodeEncodeError:
            pass
        else:
            if PY2:
                summary = encoded_summary


        if os.path.exists(self.filename):
            os.remove(self.filename)
        if self.send_level is None or self.maxlevelno >= self.send_level:
            self.mailer.handle(
                LogRecord(
                    name='Summary',
                    level=self.maxlevelno,
                    pathname='',
                    lineno=0,
                    msg=summary,
                    args=(),
                    exc_info=None
                )
            )
class MultiProcessingLog(logging.Handler):
    def __init__(self, name):
        logging.Handler.__init__(self)

        self._handler = FileHandler(name)
        self.queue = multiprocessing.Queue(-1)

        t = threading.Thread(target=self.receive)
        t.daemon = True
        t.start()

    def setFormatter(self, fmt):
        logging.Handler.setFormatter(self, fmt)
        self._handler.setFormatter(fmt)

    def receive(self):
        while True:
            try:
                record = self.queue.get()
                self._handler.emit(record)
            except (KeyboardInterrupt, SystemExit):
                raise
            except EOFError:
                break
            except:
                traceback.print_exc(file=sys.stderr)

    def send(self, s):
        self.queue.put_nowait(s)

    def _format_record(self, record):
        # ensure that exc_info and args
        # have been stringified.  Removes any chance of
        # unpickleable things inside and possibly reduces
        # message size sent over the pipe
        if record.args:
            record.msg = record.msg % record.args
            record.args = None
        if record.exc_info:
            dummy = self.format(record)
            record.exc_info = None

        return record

    def emit(self, record):
        try:
            s = self._format_record(record)
            self.send(s)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def close(self):
        self._handler.close()
        logging.Handler.close(self)
Beispiel #10
0
class MPLogHandler(logging.Handler):
    """a multiprocessing-compatible file log handler -
    all processes log to the same file"""
    def __init__(self, fname):
        logging.Handler.__init__(self)

        self._handler = FileHandler(fname)
        self.queue = multiprocessing.Queue(-1)

        thrd = threading.Thread(target=self.receive)
        thrd.daemon = True
        thrd.start()

    def setFormatter(self, fmt):
        logging.Handler.setFormatter(self, fmt)
        self._handler.setFormatter(fmt)

    def receive(self):
        while True:
            try:
                record = self.queue.get()
                self._handler.emit(record)
            except (KeyboardInterrupt, SystemExit):
                raise
            except EOFError:
                break
            except:
                traceback.print_exc(file=sys.stderr)

    def send(self, s):
        self.queue.put_nowait(s)

    def _format_record(self, record):
        if record.args:
            record.msg = record.msg % record.args
            record.args = None
        if record.exc_info:
            # dummy = self.format(record)
            record.exc_info = None
        return record

    def emit(self, record):
        try:
            s = self._format_record(record)
            self.send(s)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def close(self):
        self._handler.close()
        logging.Handler.close(self)
    def close(self):
        if self.closed:
            return
        self.closed = True

        if self.message_count > self.flood_level:
            hidden = self.message_count - self.flood_level - len(self.tail)
            if hidden:
                # send critical error
                FileHandler.emit(
                    self,
                    LogRecord(name='flood',
                              level=CRITICAL,
                              pathname='',
                              lineno=0,
                              msg=flood_template %
                              (self.message_count - self.flood_level -
                               len(self.tail), self.flood_level),
                              args=(),
                              exc_info=None))
            for record in self.tail:
                FileHandler.emit(self, record)

        FileHandler.close(self)

        if PY2:
            f = os.fdopen(self.fd)
            summary = f.read().decode(self.charset)
        else:
            f = open(self.fd, encoding=self.charset)
            summary = f.read()
        f.close()
        try:
            encoded_summary = summary.encode('ascii')
            self.mailer.charset = 'ascii'
        except UnicodeEncodeError:
            pass
        else:
            if PY2:
                summary = encoded_summary

        if os.path.exists(self.filename):
            os.remove(self.filename)
        if self.send_level is None or self.maxlevelno >= self.send_level:
            self.mailer.handle(
                LogRecord(name='Summary',
                          level=self.maxlevelno,
                          pathname='',
                          lineno=0,
                          msg=summary,
                          args=(),
                          exc_info=None))
def create_logging_handler_for_collection(tempdir, prefix):
    from sys import maxsize
    from os import path
    from logging import FileHandler, DEBUG, Formatter
    from logging.handlers import MemoryHandler
    target = FileHandler(path.join(tempdir, "collection-logs", "{}.{}.debug.log".format(prefix, get_timestamp())))
    target.setFormatter(Formatter(**LOGGING_FORMATTER_KWARGS))
    handler = MemoryHandler(maxsize, target=target)
    handler.setLevel(DEBUG)
    try:
        yield handler
    finally:
        handler.close()
        target.close()
def cleanup_global_logging(stdout_handler: logging.FileHandler) -> None:
    """
    This function closes any open file handles and logs set up by `prepare_global_logging`.
    Parameters
    ----------
    stdout_handler : ``logging.FileHandler``, required.
        The file handler returned from `prepare_global_logging`, attached to the global logger.
    """
    stdout_handler.close()
    logging.getLogger().removeHandler(stdout_handler)

    if isinstance(sys.stdout, TeeLogger):
        sys.stdout = sys.stdout.cleanup()
    if isinstance(sys.stderr, TeeLogger):
        sys.stderr = sys.stderr.cleanup()
    def close(self):
        if self.closed:
            return

        if self.message_count > self.flood_level:
            hidden = self.message_count - self.flood_level - len(self.tail)
            if hidden:
                # send critical error
                FileHandler.emit(self, LogRecord(
                    name='flood',
                    level=CRITICAL,
                    pathname='',
                    lineno=0,
                    msg=flood_template % (
                        self.message_count - self.flood_level - len(self.tail),
                        self.flood_level
                    ),
                    args=(),
                    exc_info=None
                ))
            for record in self.tail:
                FileHandler.emit(self, record)

        FileHandler.close(self)
        f = os.fdopen(self.fd)
        summary = f.read().decode(self.charset)
        f.close()
        # try and encode in ascii, to keep emails simpler:
        try:
            summary = summary.encode('ascii')
        except UnicodeEncodeError:
            # unicode it is then
            pass
        if os.path.exists(self.filename):
            os.remove(self.filename)
        if self.send_level is None or self.maxlevelno >= self.send_level:
            self.mailer.handle(
                LogRecord(
                    name='Summary',
                    level=self.maxlevelno,
                    pathname='',
                    lineno=0,
                    msg=summary,
                    args=(),
                    exc_info=None
                )
            )
        self.closed = True
Beispiel #15
0
def create_logging_handler_for_collection(tempdir, prefix):
    from sys import maxsize
    from os import path
    from logging import FileHandler, DEBUG, Formatter
    from logging.handlers import MemoryHandler
    target = FileHandler(
        path.join(tempdir, "collection-logs",
                  "{}.{}.debug.log".format(prefix, get_timestamp())))
    target.setFormatter(Formatter(**LOGGING_FORMATTER_KWARGS))
    handler = MemoryHandler(maxsize, target=target)
    handler.setLevel(DEBUG)
    try:
        yield handler
    finally:
        handler.close()
        target.close()
Beispiel #16
0
def cleanup_global_logging(stdout_handler: logging.FileHandler) -> None:
    """
    Code taken from https://github.com/allenai/allennlp/blob/master/allennlp/common/util.py#L253

    This function closes any open file handles and logs set up by `prepare_global_logging`.
    Parameters
    ----------
    stdout_handler : ``logging.FileHandler``, required.
        The file handler returned from `prepare_global_logging`, attached to the global logger.
    """
    stdout_handler.close()
    logging.getLogger().removeHandler(stdout_handler)

    if isinstance(sys.stdout, TeeLogger):
        sys.stdout = sys.stdout.cleanup()
    if isinstance(sys.stderr, TeeLogger):
        sys.stderr = sys.stderr.cleanup()
Beispiel #17
0
 def close(self):
     if self.closed:
         return
     FileHandler.close(self)
     f = open(self.filename)
     summary = f.read()
     f.close()
     os.close(self.fd)
     os.remove(self.filename)
     self.mailer.handle(
         LogRecord(
             name = 'Summary',
             level = self.maxlevelno,
             pathname = '',
             lineno = 0,
             msg = summary,
             args = (),
             exc_info = None
             )
         )
     self.closed = True
Beispiel #18
0
class Logger:
  def __init__(self,logfile=None):
    self.logger = getLogger(__name__)
    self.handler = None
    if not logfile: 
      self.handler = StreamHandler()
    else: 
      self.handler = FileHandler(str(logfile))
    formatter = Formatter('[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s] %(message)s')
    self.handler.setLevel(DEBUG)
    self.handler.setFormatter(formatter)
    self.logger.setLevel(DEBUG)
    self.logger.addHandler(self.handler)
    self.logger.propagate = False

  def __enter__(self):
    return self

  def __exit__(self,exc_type,exc_value,traceback):
    if self.handler: 
      self.handler.close()
      self.logger.removeHandler(self.handler)
    return True
class TestExistServos(unittest.TestCase):
    """In WebCam4 now (20160911), have 2 command servos.
    servo id is 1 and 2.
    """
    def setUp(self):
        """ open serial port """
        self.ser = serial.Serial(com_port,
                                 com_boud,
                                 timeout=com_timeout)
        self.logger = getLogger(__name__)
        formatter = Formatter('%(asctime)s - '
                              '%(levelname)s - '
                              '%(filename)s:%(lineno)d - '
                              '%(funcName)s - '
                              '%(message)s')
        self.sh = FileHandler(log_file, delay=True)
        self.sh.setLevel(DEBUG)
        self.sh.setFormatter(formatter)
        self.logger.setLevel(DEBUG)
        self.logger.addHandler(self.sh)

    def tearDown(self):
        """Deleting self.cmd and closing serial port."""
        self.ser.close()
        self.sh.close()
        self.logger.removeHandler(self.sh)

    def test_exist_servos(self):
        """Servos return a \x07 if exists and ready. """
        self.cmd = CmdServo.CmdAck(self.logger)
        for id in [1, 2]:
            with self.subTest(id=id):
                self.cmd.prepare(id)
                self.assertTrue(self.cmd.execute(self.ser))
                self.assertTrue(len(self.cmd.recv) > 0)
                self.assertEqual(self.cmd.recv[0], 7)
Beispiel #20
0
class MultiProcessingLogHandler(logging.Handler):
    def __init__(self, name, queue):
        logging.Handler.__init__(self)

        self._handler = FileHandler(name)
        self.queue = queue

        t = threading.Thread(target=self.receive)
        t.daemon = True
        t.start()

    def setFormatter(self, fmt):
        logging.Handler.setFormatter(self, fmt)
        self._handler.setFormatter(fmt)

    def receive(self):
        while True:
            try:
                record = self.queue.get()
                if record == StopIteration:
                    break
                self._handler.emit(record)
            except (KeyboardInterrupt, SystemExit):
                raise
            except EOFError:
                break
            except Exception:
                traceback.print_exc(file=sys.stderr)
                break

        return

    def send(self, s):
        self.queue.put_nowait(s)

    def _format_record(self, record):
        # ensure that exc_info and args
        # have been stringified.  Removes any chance of
        # unpickleable things inside and possibly reduces
        # message size sent over the pipe
        if record.args:
            record.msg = record.msg % record.args
            record.args = None
        if record.exc_info:
            dummy = self.format(record)
            record.exc_info = None

        return record

    def emit(self, record):
        try:
            s = self._format_record(record)
            self.send(s)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception:
            self.handleError(record)

    def close(self):
        self.queue.put_nowait(StopIteration)
        self._handler.close()
        logging.Handler.close(self)
Beispiel #21
0
class OperationLogger(object):
    """
    Instances of this class represents unit operation done on the ynh instance.

    Each time an action of the yunohost cli/api change the system, one or
    several unit operations should be registered.

    This class record logs and metadata like context or start time/end time.
    """

    _instances = []

    def __init__(self, operation, related_to=None, **kwargs):
        # TODO add a way to not save password on app installation
        self.operation = operation
        self.related_to = related_to
        self.extra = kwargs
        self.started_at = None
        self.ended_at = None
        self.logger = None
        self._name = None
        self.data_to_redact = []
        self.parent = self.parent_logger()
        self._instances.append(self)

        for filename in ["/etc/yunohost/mysql", "/etc/yunohost/psql"]:
            if os.path.exists(filename):
                self.data_to_redact.append(read_file(filename).strip())

        self.path = OPERATIONS_PATH

        if not os.path.exists(self.path):
            os.makedirs(self.path)

    def parent_logger(self):

        # If there are other operation logger instances
        for instance in reversed(self._instances):
            # Is one of these operation logger started but not yet done ?
            if instance.started_at is not None and instance.ended_at is None:
                # We are a child of the first one we found
                return instance.name

        # If no lock exists, we are probably in tests or yunohost is used as a
        # lib ... let's not really care about that case and assume we're the
        # root logger then.
        if not os.path.exists("/var/run/moulinette_yunohost.lock"):
            return None

        locks = read_file("/var/run/moulinette_yunohost.lock").strip().split(
            "\n")
        # If we're the process with the lock, we're the root logger
        if locks == [] or str(os.getpid()) in locks:
            return None

        # If we get here, we are in a yunohost command called by a yunohost
        # (maybe indirectly from an app script for example...)
        #
        # The strategy is :
        # 1. list 20 most recent log files
        # 2. iterate over the PID of parent processes
        # 3. see if parent process has some log file open (being actively
        # written in)
        # 4. if among those file, there's an operation log file, we use the id
        # of the most recent file

        recent_operation_logs = sorted(glob.iglob(OPERATIONS_PATH + "*.log"),
                                       key=os.path.getctime,
                                       reverse=True)[:20]

        proc = psutil.Process().parent()
        while proc is not None:
            # We use proc.open_files() to list files opened / actively used by this proc
            # We only keep files matching a recent yunohost operation log
            active_logs = sorted(
                [
                    f.path for f in proc.open_files()
                    if f.path in recent_operation_logs
                ],
                key=os.path.getctime,
                reverse=True,
            )
            if active_logs != []:
                # extra the log if from the full path
                return os.path.basename(active_logs[0])[:-4]
            else:
                proc = proc.parent()
                continue

        # If nothing found, assume we're the root operation logger
        return None

    def start(self):
        """
        Start to record logs that change the system
        Until this start method is run, no unit operation will be registered.
        """

        if self.started_at is None:
            self.started_at = datetime.utcnow()
            self.flush()
            self._register_log()

    @property
    def md_path(self):
        """
        Metadata path file
        """
        return os.path.join(self.path, self.name + METADATA_FILE_EXT)

    @property
    def log_path(self):
        """
        Log path file
        """
        return os.path.join(self.path, self.name + LOG_FILE_EXT)

    def _register_log(self):
        """
        Register log with a handler connected on log system
        """

        self.file_handler = FileHandler(self.log_path)
        # We use a custom formatter that's able to redact all stuff in self.data_to_redact
        # N.B. : the subtle thing here is that the class will remember a pointer to the list,
        # so we can directly append stuff to self.data_to_redact and that'll be automatically
        # propagated to the RedactingFormatter
        self.file_handler.formatter = RedactingFormatter(
            "%(asctime)s: %(levelname)s - %(message)s", self.data_to_redact)

        # Listen to the root logger
        self.logger = getLogger("yunohost")
        self.logger.addHandler(self.file_handler)

    def flush(self):
        """
        Write or rewrite the metadata file with all metadata known
        """

        dump = yaml.safe_dump(self.metadata, default_flow_style=False)
        for data in self.data_to_redact:
            # N.B. : we need quotes here, otherwise yaml isn't happy about loading the yml later
            dump = dump.replace(data, "'**********'")
        with open(self.md_path, "w") as outfile:
            outfile.write(dump)

    @property
    def name(self):
        """
        Name of the operation
        This name is used as filename, so don't use space
        """
        if self._name is not None:
            return self._name

        name = [self.started_at.strftime("%Y%m%d-%H%M%S")]
        name += [self.operation]

        if hasattr(self, "name_parameter_override"):
            # This is for special cases where the operation is not really
            # unitary. For instance, the regen conf cannot be logged "per
            # service" because of the way it's built
            name.append(self.name_parameter_override)
        elif self.related_to:
            # We use the name of the first related thing
            name.append(self.related_to[0][1])

        self._name = "-".join(name)
        return self._name

    @property
    def metadata(self):
        """
        Dictionnary of all metadata collected
        """

        data = {
            "started_at": self.started_at,
            "operation": self.operation,
            "parent": self.parent,
            "yunohost_version": get_ynh_package_version("yunohost")["version"],
            "interface": msettings.get("interface"),
        }
        if self.related_to is not None:
            data["related_to"] = self.related_to
        if self.ended_at is not None:
            data["ended_at"] = self.ended_at
            data["success"] = self._success
            if self.error is not None:
                data["error"] = self._error
        # TODO: detect if 'extra' erase some key of 'data'
        data.update(self.extra)
        return data

    def success(self):
        """
        Declare the success end of the unit operation
        """
        self.close()

    def error(self, error):
        """
        Declare the failure of the unit operation
        """
        return self.close(error)

    def close(self, error=None):
        """
        Close properly the unit operation
        """

        # When the error happen's in the is_unit_operation try/except,
        # we want to inject the log ref in the exception, such that it may be
        # transmitted to the webadmin which can then redirect to the appropriate
        # log page
        if isinstance(error, Exception) and not isinstance(
                error, YunohostValidationError):
            error.log_ref = self.name

        if self.ended_at is not None or self.started_at is None:
            return
        if error is not None and not isinstance(error, str):
            error = str(error)

        self.ended_at = datetime.utcnow()
        self._error = error
        self._success = error is None

        if self.logger is not None:
            self.logger.removeHandler(self.file_handler)
            self.file_handler.close()

        is_api = msettings.get("interface") == "api"
        desc = _get_description_from_name(self.name)
        if error is None:
            if is_api:
                msg = m18n.n("log_link_to_log", name=self.name, desc=desc)
            else:
                msg = m18n.n("log_help_to_get_log", name=self.name, desc=desc)
            logger.debug(msg)
        else:
            if is_api:
                msg = ("<strong>" + m18n.n(
                    "log_link_to_failed_log", name=self.name, desc=desc) +
                       "</strong>")
            else:
                msg = m18n.n("log_help_to_get_failed_log",
                             name=self.name,
                             desc=desc)
            logger.info(msg)
        self.flush()
        return msg

    def __del__(self):
        """
        Try to close the unit operation, if it's missing.
        The missing of the message below could help to see an electrical
        shortage.
        """
        if self.ended_at is not None or self.started_at is None:
            return
        else:
            self.error(m18n.n("log_operation_unit_unclosed_properly"))
Beispiel #22
0
class Log:
    """High level class for MPI-aware logging, with log save in hdf5."""

    def __init__(
        self,
        filename: str = invalidstr,
        level: str = "INFO",
        timeformat: str = "%H:%M:%S, %d/%m/%y",
    ):
        """Create a logging interface.

        It gets connected to a text file in a filename is provided.

        Connection to a hdf5 file must be performed manually with self.setsaver.

        @param filename: name of log file
        @type filename: str
        @param level: logging level (INFO, DEBUG, etc.) (Default value = "INFO")
        @type level: str
        @param timeformat: format string for date/time output (Default value = "%H:%M:%S, %d/%m/%y")
        @type timeformat: str

        """
        self.connected: bool = False
        """Connected flag (is the logger connected to a Handler?)"""
        self.timeformat: str = timeformat
        """format string for date/time output"""
        self._timer: Timer = Timer()
        self._logger: Logger = getLogger("Metadynamic Log")
        self._handler: Handler
        self.level: str
        """logging level"""
        self.filename: str
        """output log file"""
        self.writer: Optional[ResultWriter] = None
        """writer to hdf5 file"""
        self.setlevel(level)
        self.connect(filename)

    def setsaver(self, writer: ResultWriter) -> None:
        """Connect to a hdf5 writer.

        @param writer: hdf5 writer
        @type writer: ResultWriter
        """
        self.writer = writer

    def setlevel(self, level: str = "INFO") -> None:
        """Set logging level.

        @param level: logging level (Default value = "INFO")
        @type level: str
        """
        self.debug(f"Switched to level {level}")
        self.level = level
        self._logger.setLevel(self.level)

    def connect(self, filename: str = "") -> None:
        """Direct logging to textfile 'filename'; if empty, log to standard output.

        @param filename: log file name
        @type filename: str

        """
        self.filename = filename if filename else invalidstr
        dest = filename if isvalid(filename) else "stream"
        if self.connected:
            self.disconnect("Disconnecting old handler before connection to new one.")
        self._handler = FileHandler(self.filename) if isvalid(self.filename) else StreamHandler()
        self._logger.addHandler(self._handler)
        self.debug(f"Connected to {self.filename}; reason: Logger directed to {dest}")
        self.connected = True

    def disconnect(self, reason: str = "unknown") -> None:
        """Disconnect from current handler.

        @param reason: message to debug log (Default value = "unknown")
        @type reason: str

        """
        self.debug(f"Disconnecting; reason: {reason}")
        self._logger.removeHandler(self._handler)
        self._handler.close()
        self.connected = False

    def _format_msg(self, level: str, msg: str) -> str:
        """Format the log message.

        @param level: severity level
        @type level: str
        @param msg: log message
        @type msg: str
        @return: formatted message
        @rtype:str
        """
        return (
            f"{level}-{MPI_STATUS.rank} : {msg}   (rt={self.runtime}, t={self.time})"
        )

    def _savelog(self, level: int, msg: str) -> None:
        """Save the log message to hdf5.

        @param level: severity level
        @type level: int
        @param msg: log message
        @type msg: str
        """
        if self.writer is not None:
            self.writer.write_log(level, self.time, self.runtime, msg)

    def debug(self, msg: str) -> None:
        """Log message at severity 'DEBUG'.

        @param msg: log message
        @type msg: str
        """
        self._savelog(10, msg)
        self._logger.debug(self._format_msg("DEBUG", msg))

    def info(self, msg: str) -> None:
        """Log message at severity 'INFO'.

        @param msg: log message
        @type msg: str
        """
        self._savelog(20, msg)
        self._logger.info(self._format_msg("INFO", msg))

    def warning(self, msg: str) -> None:
        """Log message at severity 'WARNING'.

        @param msg: log message
        @type msg: str
        """
        self._savelog(30, msg)
        self._logger.warning(self._format_msg("WARNING", msg))

    def error(self, msg: str) -> None:
        """Log message at severity 'ERROR'.

        @param msg: log message
        @type msg: str
        """
        self._savelog(40, msg)
        self._logger.error(self._format_msg("ERROR", msg))

    @property
    def time(self) -> str:
        """Return current time.

        @rtype time: str

        """
        return datetime.now().strftime(self.timeformat)

    @property
    def runtime(self) -> float:
        """Run time (in seconds).

        @rtype time: float
        """
        return self._timer.time

    def reset_timer(self) -> None:
        """Reset run timer to 0."""
        self.debug("Will reset the timer")
        self._timer.reset()
Beispiel #23
0
class LogHandler(logging.Handler):
    """multiprocessing log handler

    This handler makes it possible for several processes
    to log to the same file by using a queue.

    """
    def __init__(self, fname):
        logging.Handler.__init__(self)

        self._handler = FH(self._make_filename(fname))
        self.queue = multiprocessing.Queue(-1)

        thrd = threading.Thread(target=self.receive)
        thrd.daemon = True
        thrd.start()

    @classmethod
    def _make_filename(self, fname):
        filename = []

        filename.append(fname)
        filename.append('_')

        unix_timestamp = int(time.time())
        filename.append(str(unix_timestamp))

        filename.append('.log')

        return ''.join(filename)

    def setFormatter(self, fmt):
        logging.Handler.setFormatter(self, fmt)
        self._handler.setFormatter(fmt)

    def receive(self):
        while True:
            try:
                record = self.queue.get()
                self._handler.emit(record)
            except (KeyboardInterrupt, SystemExit):
                raise
            except EOFError:
                break
            except Exception:
                traceback.print_exc(file=sys.stderr)

    def send(self, s):
        self.queue.put_nowait(s)

    def _format_record(self, record):
        if record.args:
            record.msg = record.msg % record.args
            record.args = None
        if record.exc_info:
            self.format(record)
            record.exc_info = None

        return record

    def emit(self, record):
        try:
            s = self._format_record(record)
            self.send(s)
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception:
            self.handleError(record)

    def close(self):
        self._handler.close()
        logging.Handler.close(self)
Beispiel #24
0
        print("\t".join(res))
        return res


def main(fn):
    # 設定読み込み
    logger.debug(fn)
    try:
        dr = Deresta_recognizer()
        data = dr.extract(fn)
        if data == -1:
            return -1
        dr.pprint()
        return data
    except ValueError as err:
        print(err)


if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser(
        description='デレステのリザルト画像からスコアなどの情報を抽出し、'
        + 'jsonで出力するスクリプト')
    parser.add_argument('filename', metavar='fn', type=str,
                        help='リザルト画面をスクショしたファイルの名前')
    args = parser.parse_args()

    main(args.filename)

handler.close()
class TestInfo(unittest.TestCase):

    def setUp(self):
        """Open serial port."""
        self.ser = serial.Serial(com_port,
                                 com_boud,
                                 timeout=com_timeout)
        self.logger = getLogger(__name__)
        formatter = Formatter('%(asctime)s - '
                              '%(levelname)s - '
                              '%(filename)s:%(lineno)d - '
                              '%(funcName)s - '
                              '%(message)s')
        self.sh = FileHandler(log_file, delay=True)
        self.sh.setLevel(DEBUG)
        self.sh.setFormatter(formatter)
        self.logger.setLevel(DEBUG)
        self.logger.addHandler(self.sh)

    def tearDown(self):
        """Cosing serial port."""
        self.ser.close()
        self.sh.close();
        self.logger.removeHandler(self.sh)

    def test_info_return_short_packet_header(self):
        for servo_id in [1, 2]:
            with self.subTest(servo_id=servo_id):
                cmd = CmdServo.CmdInfo(self.logger)
                cmd.prepare(servo_id=servo_id, section=3)
                self.assertTrue(cmd.execute(self.ser))
                cmd.info()
                self.assertEqual(cmd.mem['packet_header'], 'FDDF')
                self.assertEqual(cmd.mem['servo_id'], servo_id)

    def test_info_section_3(self):
        """Model Number (L,H):(50H, 40H)(RS405CB) for servo 1 and 2."""
        for servo_id in [1, 2]:
            with self.subTest(servo_id=servo_id):
                cmd = CmdServo.CmdInfo(self.logger)
                cmd.prepare(servo_id=servo_id, section=3)
                self.assertTrue(cmd.execute(self.ser))
                self.assertEqual(cmd.recv[0], 0xfd)
                self.assertEqual(cmd.recv[1], 0xdf)
                self.assertEqual(cmd.get_checksum(cmd.recv[:-1]),
                                 cmd.recv[-1])
                self.assertTrue(cmd.check_return_packet(cmd.recv))
                cmd.info()
                self.assertEqual(cmd.mem['Model_Number_L'], 0x50)
                self.assertEqual(cmd.mem['Model_Number_H'], 0x40)
                self.assertEqual(cmd.mem['Servo_ID'], servo_id)
                self.assertEqual(cmd.mem['Reverse'], 0)

    def test_info_section_5(self):
        for servo_id in [1, 2]:
            with self.subTest(servo_id=servo_id):
                cmd = CmdServo.CmdInfo(self.logger)
                cmd.prepare(servo_id=servo_id, section=5)
                self.assertTrue(cmd.execute(self.ser))
                self.assertEqual(cmd.recv[0], 0xfd)
                self.assertEqual(cmd.recv[1], 0xdf)
                self.assertEqual(cmd.get_checksum(cmd.recv[:-1]),
                                 cmd.recv[-1])
                self.assertTrue(cmd.check_return_packet(cmd.recv))
                cmd.info()
                self.assertEqual(cmd.mem['Max_Torque'], 0x64)
                self.assertEqual(cmd.mem['Torque_Enable'], 0)
                self.assertEqual(cmd.mem['Present_Speed'], 0)
                self.logger.debug('Present_Posion:{0}'
                                  .format(cmd.mem['Present_Posion']/10))
Beispiel #26
0
def update_sqlite3db():
    global stream_logger
    global file_logger
    stream_logger = getLogger("stream")
    file_logger = getLogger("file")
    stream_handler = StreamHandler(stream=sys.stdout)
    file_handler = FileHandler(constants.logfile)
    logformat = Formatter("[%(asctime)s] %(message)s",
                          datefmt="%Y-%m-%d %H:%M:%S")
    stream_handler.setFormatter(logformat)
    file_handler.setFormatter(logformat)
    stream_logger.addHandler(stream_handler)
    file_logger.addHandler(file_handler)
    stream_logger.setLevel(INFO)
    file_logger.setLevel(INFO)

    mitoproteome_tuples = get_mito_id_gene_id_pairs()

    try:
        with sqlite3.connect(constants.sqlite3_dbpath) as conn:
            cursor = conn.cursor()
            table_name = "mitoproteome"
            schema_params = [
                ("mito_id", "TEXT"),
                ("gene_id", "INTEGER"),
            ]
            tuples = mitoproteome_tuples
            replace_table(cursor, table_name, schema_params, tuples)

            select_query = \
                    "SELECT DISTINCT gene_id \
                    FROM mitoproteome"

            results = cursor.execute(select_query)

            conn.commit()
    except sqlite3.Error as e:
        # print("ERROR at mitoproteome")
        sys.stderr.write("%s\n" % e)
        quit(1)

    gene_ids = map(lambda p: str(p[0]), results)
    gene_id_uniprot_ac_pairs = get_with_sleep(get_uniprot_acs, gene_ids)

    try:
        with sqlite3.connect(constants.sqlite3_dbpath) as conn:
            cursor = conn.cursor()
            table_name = "gene_uniprot"
            schema_params = [
                ("gene_id", "INTEGER"),
                ("uniprot_ac", "TEXT"),
            ]
            tuples = gene_id_uniprot_ac_pairs
            replace_table(cursor, table_name, schema_params, tuples)

            select_query = \
                    "SELECT DISTINCT uniprot_ac \
                    FROM gene_uniprot"

            results = cursor.execute(select_query)

            conn.commit()
    except sqlite3.Error as e:
        # print("ERROR at entrez_gene")
        sys.stderr.write("%s\n" % e)
        quit(1)

    uniprot_ac_list = [p[0] for p in results]

    uniprot_infos = get_with_sleep(get_uniprot_info, uniprot_ac_list)
    uniprot_ac_pdb_id_pairs = get_with_sleep(get_pdb_ids, uniprot_ac_list)
    uniprot_ac_kegg_id_pairs = get_with_sleep(get_kegg_id, uniprot_ac_list)

    del uniprot_ac_list

    try:
        with sqlite3.connect(constants.sqlite3_dbpath) as conn:
            cursor = conn.cursor()
            table_name = "uniprot_info"
            schema_params = [
                ("uniprot_ac", "TEXT"),
                ("protein_names", "TEXT"),
                ("gene_names", "TEXT"),
                ("organism", "TEXT"),
            ]
            tuples = uniprot_infos
            replace_table(cursor, table_name, schema_params, tuples)

            table_name = "uniprot_pdb"
            schema_params = [
                ("uniprot_ac", "TEXT"),
                ("pdb_id", "TEXT"),
            ]
            tuples = uniprot_ac_pdb_id_pairs
            replace_table(cursor, table_name, schema_params, tuples)

            table_name = "uniprot_kegg"
            schema_params = [
                ("uniprot_ac", "TEXT"),
                ("kegg_id", "TEXT"),
            ]
            tuples = uniprot_ac_kegg_id_pairs
            replace_table(cursor, table_name, schema_params, tuples)

            select_query = \
                    "SELECT DISTINCT pdb_id \
                    FROM uniprot_pdb"

            results = cursor.execute(select_query)

            conn.commit()
    except sqlite3.Error as e:
        # print("ERROR at uniprot")
        sys.stderr.write("%s\n" % e)
        quit(1)

    pdb_ids = map(lambda p: p[0], results)
    pdb_infos = get_with_sleep(get_pdb_info, pdb_ids)

    pdb_info_tuples = iterator_tools.concat_iterator(
        *iterator_tools.concat_iterator(
            *map(
                lambda t:
                # t = (PDB ID,
                #      resolution,
                #      [(entity ID, [chain ID ...]) ...])
                map(
                    lambda lt:  # l = (entity ID, [chain ID ...])
                    map(
                        lambda c:  # c is chain ID
                        (t[0], t[1], lt[0],
                         c),
                        # (PDB ID, resolution, entity ID, chain ID)
                        lt[1]  # [chain ID ...]
                    ),
                    # [(PDB ID, resolution, entity ID, chain ID) ...]
                    t[2]  # [(entity ID, [chain ID ...]) ...]
                ),
                # [[(PDB ID, resolution, entity ID, chain ID) ...] ...]
                pdb_infos
                # [(PDB ID,
                #   resolution,
                #   [(entity ID, [chain ID ...]) ...]) ...]
            )
            # [[[(PDB ID, resolution, entity ID, chain ID) ...] ...] ...]
        )  # [[(PDB ID, resolution, entity ID, chain ID) ...] ...]
    )  # [(PDB ID, resolution, entity ID, chain ID) ...]
    # pdb_info_tuples = [(PDB ID, resolution, entity ID, chain ID) ...]

    try:
        with sqlite3.connect(constants.sqlite3_dbpath) as conn:
            cursor = conn.cursor()
            table_name = "pdb_info"
            schema_params = [
                ("pdb_id", "TEXT"),
                ("resolution", "REAL"),
                ("entity_id", "INTEGER"),
                ("chain_id", "TEXT"),
            ]
            tuples = pdb_info_tuples
            replace_table(cursor, table_name, schema_params, tuples)

            select_query = \
                    "SELECT DISTINCT pdb_id, chain_id \
                    FROM pdb_info"

            results = cursor.execute(select_query)

            conn.commit()
    except sqlite3.Error as e:
        # print("ERROR at PDB")
        sys.stderr.write("%s\n" % e)
        quit(1)

    chain_infos = get_with_sleep(get_chain_info, results)

    try:
        with sqlite3.connect(constants.sqlite3_dbpath) as conn:
            cursor = conn.cursor()
            table_name = "chain_info"
            schema_params = [
                ("pdb_id", "TEXT"),
                ("chain_id", "TEXT"),
                ("length", "INTEGER"),
                ("uniprot_ac", "TEXT"),
            ]
            tuples = chain_infos
            replace_table(cursor, table_name, schema_params, tuples)

            conn.commit()
    except sqlite3.Error as e:
        # print("ERROR at chain")
        sys.stderr.write("%s\n" % e)
        quit(1)

    print("Update finished.")
    stream_handler.flush()
    file_handler.close()
class TestMove(unittest.TestCase):

    def setUp(self):
        """Open serial port."""
        self.ser = serial.Serial(com_port,
                                 com_boud,
                                 timeout=com_timeout)
        self.logger = getLogger(__name__)
        formatter = Formatter('%(asctime)s - '
                              '%(levelname)s - '
                              '%(filename)s:%(lineno)d - '
                              '%(funcName)s - '
                              '%(message)s')
        self.sh = FileHandler(log_file, delay=True)
        self.sh.setLevel(DEBUG)
        self.sh.setFormatter(formatter)
        self.logger.setLevel(DEBUG)
        self.logger.addHandler(self.sh)

    def tearDown(self):
        """Cosing serial port."""
        self.ser.close()
        self.sh.close();
        self.logger.removeHandler(self.sh)

    def no_test_move_slow(self):
        """Testing be able to slow move.
        Result: Over 300(3.00sec), lacking torque, can not move start.
        和文:どのぐらい遅く動かせるかテスト。
        300(3.00sec)以上を指定すると動かなかった。遅く動かすためにサーボ
        モータへの出力を下げるのかな?
        また同時に動作途中に動作をキャンセルできるかどうかテスト→NG。
        Present_PosionをGoal_Posisonに代入すれば動作をキャンセルできるか
        と思ったが、Present_Posionの更新は動作終了時に行っているようだ。"""
        Sv = 2
        info = CmdServo.CmdInfo(self.logger)
        info.prepare(Sv,section=5)
        info.execute(self.ser)
        info.info()
        self.logger.debug('Present_Posion:{0}'
                          .format(info.mem['Present_Posion']/10))
        trq = CmdServo.CmdTorque(self.logger)
        trq.prepare(Sv, 'on')
        trq.execute(self.ser)
        cmd = CmdServo.CmdAngle(self.logger)
        cmd.prepare(Sv, 900, 300)
        cmd.execute(self.ser)
        sleep(2)
        cmd.prepare(Sv, 300, 300)
        cmd.execute(self.ser)
        sleep(10)
        trq.prepare(Sv, 'off')
        trq.execute(self.ser)

    def test_kamae(self):
        """Testing 'kamae' form.
        0. Initial pos Sv1:90deg, Sv2:-90deg(both torque off)
        1. torque off CmdServo2.
        2. torque on CmdServo1.
        3. move CmdServo1 to 300(30.0 degree), speed 300
        4. torque on CmdServo2.(Beam and Arm -60deg)
        5. move CmdServo1 to -30
        6. stay hold and you see mjpg-stremaer shapshot.
        """
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(2, 'off')
        t.execute(self.ser)
        t.prepare(1, 'on')
        t.execute(self.ser)
        c.prepare(1, 300, 300)
        c.execute(self.ser)
        sleep(3)
        t.prepare(2, 'on')
        t.execute(self.ser)
        c.prepare(1, -300, 300)
        c.execute(self.ser)

    def test_kamae3(self):
        """Testing 'kamae' form.
        0. Initial pos Sv1:90deg, Sv2:-90deg(both torque off)
        1. torque off CmdServo2.
        2. torque on CmdServo1.
        3. move CmdServo1 to 300(30.0 degree), speed 300
        4. torque on CmdServo2.(Beam and Arm -60deg)
        5. move CmdServo1 to -30
        6. stay hold and you see mjpg-stremaer shapshot.
        """
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(1, 'on')
        t.execute(self.ser)
        c.prepare(1, 0, 0)
        c.execute(self.ser)

    def test_beam(self):
        """Testing arm(CmdServo id 1)."""
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(1, 'on')
        t.execute(self.ser)
        c.prepare(1, 300, 300)
        c.execute(self.ser)

    def test_arm(self):
        """Testing arm(CmdServo id 2)."""
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(2, 'on')
        t.execute(self.ser)
        c.prepare(2, -300, 0)
        c.execute(self.ser)

    def test_naore(self):
        """Testing 'naore' form.
        """
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        c.prepare(1, 900, 0)
        c.execute(self.ser)
        sleep(3)
        t.prepare(2, 'off')
        t.execute(self.ser)
        t.prepare(1, 'off')
        t.execute(self.ser)

    def test_datsuryoku(self):
        """Testing 'naore' form.
        """
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(2, 'off')
        t.execute(self.ser)
        t.prepare(1, 'off')
        t.execute(self.ser)
Beispiel #28
0
class SystemLogger(Component):

    FORMAT_DEFAULT = "%(levelname)s : %(asctime)s : %(message)s"
    FILE_NAME_DEFAULT = "systemlog"
    FILE_EXTENSION_DEFAULT = "log"

    def __init__(self,
                 lname: Optional[str] = None,
                 level: Union[int, str] = logging.INFO,
                 name: Optional[str] = None) -> None:

        super().__init__(name=name)
        self._logger: Logger = logging.getLogger(name=lname)
        self._logger.setLevel(level)
        self.fhandler: Optional[FileHandler] = None

    def setLevel(self, level):
        self._logger.setLevel(level)

    def debug(self, msg, *args, **kwargs):
        self._logger.debug(msg, *args, **kwargs)

    def info(self, msg, *args, **kwargs):
        self._logger.info(msg, *args, **kwargs)

    def warning(self, msg, *args, **kwargs):
        self._logger.warning(msg, *args, **kwargs)

    def warn(self, msg, *args, **kwargs):
        self._logger.warn(msg, *args, **kwargs)

    def error(self, msg, *args, **kwargs):
        self._logger.error(msg, *args, **kwargs)

    def exception(self, msg, *args, exc_info: bool = True, **kwargs):
        self._logger.exception(msg, *args, exc_info=exc_info, **kwargs)

    def critical(self, msg, *args, **kwargs):
        self._logger.critical(msg, *args, **kwargs)

    def log(self, level, msg, *args, **kwargs):
        self._logger.log(level, msg, *args, **kwargs)

    def findCaller(self, stack_info: bool = False):
        return self._logger.findCaller(stack_info)

    def makeRecord(self,
                   name,
                   level,
                   fn,
                   lno,
                   msg,
                   args,
                   exc_info,
                   func=None,
                   extra=None,
                   sinfo=None) -> LogRecord:
        return self._logger.makeRecord(name,
                                       level,
                                       fn,
                                       lno,
                                       msg,
                                       args,
                                       exc_info,
                                       func=func,
                                       extra=extra,
                                       sinfo=sinfo)

    def handle(self, record):
        self._logger.handle(record)

    def addHandler(self, hdlr):
        self._logger.addHandler(hdlr)

    def removeHandler(self, hdlr):
        self._logger.removeHandler(hdlr)

    def hasHandlers(self) -> bool:
        return self._logger.hasHandlers()

    def getEffectiveLevel(self) -> int:
        return self._logger.getEffectiveLevel()

    def isEnabledFor(self, level) -> bool:
        return self._logger.isEnabledFor(level)

    def getChild(self, suffix) -> Logger:
        return self._logger.getChild(suffix)

    def setFileHandler(self,
                       filename: Optional[str] = None,
                       level: int = logging.INFO,
                       fmt: Optional[str] = None,
                       mode: str = "a",
                       encoding: Optional[str] = None,
                       delay: bool = True):

        if filename is None:
            filename = get_time_stamp(self.FILE_NAME_DEFAULT,
                                      self.FILE_EXTENSION_DEFAULT)

        if fmt is not None:
            formatter = Formatter(fmt)
        else:
            formatter = Formatter(self.FORMAT_DEFAULT)

        self.fhandler = FileHandler(filename,
                                    mode=mode,
                                    encoding=encoding,
                                    delay=delay)
        self.fhandler.setLevel(level)
        self.fhandler.setFormatter(formatter)
        self._logger.addHandler(self.fhandler)

    def close(self):
        if self.fhandler is not None:
            self.fhandler.flush()
            self.fhandler.close()
Beispiel #29
0
		pass_not_fail = False
		logger.error("cross-correlation peak not above 5-sigma ({0} at sample {1} in window {2})".format(signal/noise,abs(s_0x1[s_peaks.argmax(),:]).argmax(),r[s_peaks.argmax()]))
	
	logger.info("APHIDS clock is early by {0} microseconds".format(aphids_clock_early/1e-6))
	# write entry into delays file
	delay_file_path = "{0}/{1}.rx{rx}.sb{sb}.quad{qd}".format(path_out,filename_delay,rx=args.frequency_band,sb=args.sideband,qd=args.quad)
	try:
		fh = open(delay_file_path,"r+")
		while True:
			pos = fh.tell()
			line = fh.readline()
			if not line:
				break
			csv_reader = reader([line])
			timestamp = datetime.strptime(csv_reader.next()[0],"%Y-%m-%d %H:%M:%S").replace(tzinfo=UTC())
			if int((timestamp - meta.start_datetime).total_seconds()) == 0:
				fh.seek(pos,0)
				break
	except IOError:
		fh = open(delay_file_path,"w")
	fh.write("{0},{1},{2:10.6f}\r\n".format(meta.start_datetime.strftime("%Y-%m-%d %H:%M:%S"),meta.end_datetime.strftime("%Y-%m-%d %H:%M:%S"),aphids_clock_early/1e-6))
	fh.close()
	
	loghndl.close()
	
	if pass_not_fail:
		sys_exit(0)
	else:
		sys_exit(2)

Beispiel #30
0
                            target = os.path.join(job_path, obj)
                            destination = os.path.join(temp_job_dir, obj)
                            if os.path.exists(target):
                                if os.path.isdir(target):
                                    shutil.copytree(target, destination)
                                    globalLogger.info("Saving " + obj + "...")
                                else:
                                    shutil.copy2(target, temp_job_dir)
                                    read_data = ""
                                    with open(target, 'r') as f:
                                        read_data = f.read()
                                    globalLogger.info("File " + obj)
                                    globalLogger.info("*" * 40)
                                    globalLogger.info(read_data)
                                    globalLogger.info("*" * 40)
                        job_log_handler.close()
                        globalLogger.removeHandler(job_log_handler)

                        if not DEBUG_MODE:
                            # tar.gz everything in temp job directory
                            try:
                                globalLogger.info("Archiving files...")
                                shutil.make_archive(archive_path, "gztar", temp_job_dir)
                                globalLogger.info("Files archived in " + archive_path + ".tar.gz")
                                # delete job and clean-up
                                delete_job(job_path)
                                delete_job(temp_job_dir)
                            except:
                                globalLogger.error("Error archiving file!")
                    elif job_status == JOB_STATUS_MISSING:
                        globalLogger.warn("Empty or incomplete job, deleting")
Beispiel #31
0
        sb=args.sideband,
        qd=args.quad)
    try:
        fh = open(delay_file_path, "r+")
        while True:
            pos = fh.tell()
            line = fh.readline()
            if not line:
                break
            csv_reader = reader([line])
            timestamp = datetime.strptime(
                csv_reader.next()[0],
                "%Y-%m-%d %H:%M:%S").replace(tzinfo=UTC())
            if int((timestamp - meta.start_datetime).total_seconds()) == 0:
                fh.seek(pos, 0)
                break
    except IOError:
        fh = open(delay_file_path, "w")
    fh.write("{0},{1},{2:10.6f}\r\n".format(
        meta.start_datetime.strftime("%Y-%m-%d %H:%M:%S"),
        meta.end_datetime.strftime("%Y-%m-%d %H:%M:%S"),
        aphids_clock_early / 1e-6))
    fh.close()

    loghndl.close()

    if pass_not_fail:
        sys_exit(0)
    else:
        sys_exit(2)
Beispiel #32
0
 def close(self):
     self.stream.write('</body>\n</html>')
     FileHandler.close(self)
songs_list = [make_Song(e) for e in tqdm(song_paths)]
songs_list = [e for e in songs_list if e]

album_mapping = get_mapping(songs_list)

for song in songs_list:
    ask_album(song, album_mapping)

if added_songs:
    print('\n\nCollect information about additional songs.\n'
          '-------------------------------------------')

    songs_list += [make_Song(e, True) for e in tqdm(added_songs)]

print(
    '\n\nDownload and save songs on computer...\n'
    '--------------------------------------',
    end='')

for song in songs_list:
    song.to_disk()

for item in listdir():
    if item.endswith(".webm"):
        remove(item)

print('\n\nAll songs downloaded!')
logger.info('End of logging')
fh.close()
logger.removeHandler(fh)
                            target = os.path.join(job_path, obj)
                            destination = os.path.join(temp_job_dir, obj)
                            if os.path.exists(target):
                                if os.path.isdir(target):
                                    shutil.copytree(target, destination)
                                    globalLogger.info("Saving " + obj + "...")
                                else:
                                    shutil.copy2(target, temp_job_dir)
                                    read_data = ""
                                    with open(target, 'r') as f:
                                        read_data = f.read()
                                    globalLogger.info("File " + obj)
                                    globalLogger.info("*" * 40)
                                    globalLogger.info(read_data)
                                    globalLogger.info("*" * 40)
                        job_log_handler.close()
                        globalLogger.removeHandler(job_log_handler)

                        if not DEBUG_MODE:
                            # tar.gz everything in temp job directory
                            try:
                                globalLogger.info("Archiving files...")
                                shutil.make_archive(archive_path, "gztar", temp_job_dir)
                                globalLogger.info("Files archived in " + archive_path + ".tar.gz")
                                # delete job and clean-up
                                delete_job(job_path)
                                delete_job(temp_job_dir)
                            except:
                                globalLogger.error("Error archiving file!")
                    elif job_status == JOB_STATUS_MISSING:
                        globalLogger.warn("Empty or incomplete job, deleting")
class TestMove(unittest.TestCase):
    def setUp(self):
        """Open serial port."""
        self.ser = serial.Serial(com_port, com_boud, timeout=com_timeout)
        self.logger = getLogger(__name__)
        formatter = Formatter('%(asctime)s - '
                              '%(levelname)s - '
                              '%(filename)s:%(lineno)d - '
                              '%(funcName)s - '
                              '%(message)s')
        self.sh = FileHandler(log_file, delay=True)
        self.sh.setLevel(DEBUG)
        self.sh.setFormatter(formatter)
        self.logger.setLevel(DEBUG)
        self.logger.addHandler(self.sh)

    def tearDown(self):
        """Cosing serial port."""
        self.ser.close()
        self.sh.close()
        self.logger.removeHandler(self.sh)

    def no_test_move_slow(self):
        """Testing be able to slow move.
        Result: Over 300(3.00sec), lacking torque, can not move start.
        和文:どのぐらい遅く動かせるかテスト。
        300(3.00sec)以上を指定すると動かなかった。遅く動かすためにサーボ
        モータへの出力を下げるのかな?
        また同時に動作途中に動作をキャンセルできるかどうかテスト→NG。
        Present_PosionをGoal_Posisonに代入すれば動作をキャンセルできるか
        と思ったが、Present_Posionの更新は動作終了時に行っているようだ。"""
        Sv = 2
        info = CmdServo.CmdInfo(self.logger)
        info.prepare(Sv, section=5)
        info.execute(self.ser)
        info.info()
        self.logger.debug('Present_Posion:{0}'.format(
            info.mem['Present_Posion'] / 10))
        trq = CmdServo.CmdTorque(self.logger)
        trq.prepare(Sv, 'on')
        trq.execute(self.ser)
        cmd = CmdServo.CmdAngle(self.logger)
        cmd.prepare(Sv, 900, 300)
        cmd.execute(self.ser)
        sleep(2)
        cmd.prepare(Sv, 300, 300)
        cmd.execute(self.ser)
        sleep(10)
        trq.prepare(Sv, 'off')
        trq.execute(self.ser)

    def test_kamae(self):
        """Testing 'kamae' form.
        0. Initial pos Sv1:90deg, Sv2:-90deg(both torque off)
        1. torque off CmdServo2.
        2. torque on CmdServo1.
        3. move CmdServo1 to 300(30.0 degree), speed 300
        4. torque on CmdServo2.(Beam and Arm -60deg)
        5. move CmdServo1 to -30
        6. stay hold and you see mjpg-stremaer shapshot.
        """
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(2, 'off')
        t.execute(self.ser)
        t.prepare(1, 'on')
        t.execute(self.ser)
        c.prepare(1, 300, 300)
        c.execute(self.ser)
        sleep(3)
        t.prepare(2, 'on')
        t.execute(self.ser)
        c.prepare(1, -300, 300)
        c.execute(self.ser)

    def test_kamae3(self):
        """Testing 'kamae' form.
        0. Initial pos Sv1:90deg, Sv2:-90deg(both torque off)
        1. torque off CmdServo2.
        2. torque on CmdServo1.
        3. move CmdServo1 to 300(30.0 degree), speed 300
        4. torque on CmdServo2.(Beam and Arm -60deg)
        5. move CmdServo1 to -30
        6. stay hold and you see mjpg-stremaer shapshot.
        """
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(1, 'on')
        t.execute(self.ser)
        c.prepare(1, 0, 0)
        c.execute(self.ser)

    def test_beam(self):
        """Testing arm(CmdServo id 1)."""
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(1, 'on')
        t.execute(self.ser)
        c.prepare(1, 300, 300)
        c.execute(self.ser)

    def test_arm(self):
        """Testing arm(CmdServo id 2)."""
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(2, 'on')
        t.execute(self.ser)
        c.prepare(2, -300, 0)
        c.execute(self.ser)

    def test_naore(self):
        """Testing 'naore' form.
        """
        c = CmdServo.CmdAngle(self.logger)
        t = CmdServo.CmdTorque(self.logger)
        c.prepare(1, 900, 0)
        c.execute(self.ser)
        sleep(3)
        t.prepare(2, 'off')
        t.execute(self.ser)
        t.prepare(1, 'off')
        t.execute(self.ser)

    def test_datsuryoku(self):
        """Testing 'naore' form.
        """
        t = CmdServo.CmdTorque(self.logger)
        t.prepare(2, 'off')
        t.execute(self.ser)
        t.prepare(1, 'off')
        t.execute(self.ser)
class TestInfo(unittest.TestCase):
    def setUp(self):
        """Open serial port."""
        self.ser = serial.Serial(com_port, com_boud, timeout=com_timeout)
        self.logger = getLogger(__name__)
        formatter = Formatter('%(asctime)s - '
                              '%(levelname)s - '
                              '%(filename)s:%(lineno)d - '
                              '%(funcName)s - '
                              '%(message)s')
        self.sh = FileHandler(log_file, delay=True)
        self.sh.setLevel(DEBUG)
        self.sh.setFormatter(formatter)
        self.logger.setLevel(DEBUG)
        self.logger.addHandler(self.sh)

    def tearDown(self):
        """Cosing serial port."""
        self.ser.close()
        self.sh.close()
        self.logger.removeHandler(self.sh)

    def test_info_return_short_packet_header(self):
        for servo_id in [1, 2]:
            with self.subTest(servo_id=servo_id):
                cmd = CmdServo.CmdInfo(self.logger)
                cmd.prepare(servo_id=servo_id, section=3)
                self.assertTrue(cmd.execute(self.ser))
                cmd.info()
                self.assertEqual(cmd.mem['packet_header'], 'FDDF')
                self.assertEqual(cmd.mem['servo_id'], servo_id)

    def test_info_section_3(self):
        """Model Number (L,H):(50H, 40H)(RS405CB) for servo 1 and 2."""
        for servo_id in [1, 2]:
            with self.subTest(servo_id=servo_id):
                cmd = CmdServo.CmdInfo(self.logger)
                cmd.prepare(servo_id=servo_id, section=3)
                self.assertTrue(cmd.execute(self.ser))
                self.assertEqual(cmd.recv[0], 0xfd)
                self.assertEqual(cmd.recv[1], 0xdf)
                self.assertEqual(cmd.get_checksum(cmd.recv[:-1]), cmd.recv[-1])
                self.assertTrue(cmd.check_return_packet(cmd.recv))
                cmd.info()
                self.assertEqual(cmd.mem['Model_Number_L'], 0x50)
                self.assertEqual(cmd.mem['Model_Number_H'], 0x40)
                self.assertEqual(cmd.mem['Servo_ID'], servo_id)
                self.assertEqual(cmd.mem['Reverse'], 0)

    def test_info_section_5(self):
        for servo_id in [1, 2]:
            with self.subTest(servo_id=servo_id):
                cmd = CmdServo.CmdInfo(self.logger)
                cmd.prepare(servo_id=servo_id, section=5)
                self.assertTrue(cmd.execute(self.ser))
                self.assertEqual(cmd.recv[0], 0xfd)
                self.assertEqual(cmd.recv[1], 0xdf)
                self.assertEqual(cmd.get_checksum(cmd.recv[:-1]), cmd.recv[-1])
                self.assertTrue(cmd.check_return_packet(cmd.recv))
                cmd.info()
                self.assertEqual(cmd.mem['Max_Torque'], 0x64)
                self.assertEqual(cmd.mem['Torque_Enable'], 0)
                self.assertEqual(cmd.mem['Present_Speed'], 0)
                self.logger.debug('Present_Posion:{0}'.format(
                    cmd.mem['Present_Posion'] / 10))
Beispiel #37
0
class Logger:
    """Writes system state to log files."""
    def __init__(self, name=None):
        self.__name = name
        self.__loggers = {
            'file': self.__file_logger,
            'console': self.__console_logger
        }
        self.__log_level = {'info': INFO, 'debug': DEBUG}
        self.__modes = []
        self.__logs_path = ''
        self.__common_log_handler = None
        self.__console_log_handler = None
        self.__log_format = None

        self.__logger = getLogger(self.__name)

        self.info = self.__logger.info
        self.debug = self.__logger.debug
        self.warning = self.__logger.warning
        self.error = self.__logger.error
        self.critical = self.__logger.critical

    def set_logs(self, mode=None, message_level='info', logs_directory=None):
        """Set logger handlers."""
        if mode not in self.__loggers:
            raise ValueError('Mode "{}" is not support'.format(mode))
        self.__modes.append(mode)
        if mode == 'file':
            if not logs_directory:
                raise ValueError('"logs_path" should not be None')
            self.__logs_directory = logs_directory

        self.__logger.setLevel(self.__log_level[message_level])

        message_format = '%(levelname)-8s %(asctime)s (%(filename)s:%(lineno)d) %(message)-40s'
        self.__log_format = Formatter(fmt=message_format,
                                      datefmt="%y-%m-%d %H:%M:%S")
        self.__loggers.get(mode).__call__()

    def __file_logger(self):
        """Create and start loggers file handler."""
        log_file = '{0}/{1}.log'.format(self.__logs_path, self.__name)

        # Existing log rewriting
        if os.path.exists(log_file):
            os.remove(log_file)

        self.__common_log_handler = FileHandler(log_file,
                                                mode='w',
                                                encoding='utf-8')
        self.__common_log_handler.setFormatter(self.__log_format)

        self.__logger.addHandler(self.__common_log_handler)

    def __console_logger(self):
        """Create and start loggers console handler."""
        self.__console_log_handler = StreamHandler()
        self.__console_log_handler.setFormatter(self.__log_format)
        self.__logger.addHandler(self.__console_log_handler)

    def close_logs(self, mode):
        """Close logger handlers."""
        if mode not in self.__modes:
            return
        if mode == 'file':
            self.__common_log_handler.close()
            self.__logger.removeHandler(self.__common_log_handler)
        elif mode == 'console':
            self.__console_log_handler.close()
            self.__logger.removeHandler(self.__console_log_handler)
        self.__modes.remove(mode)