class GroupingTail (object):

    def __init__(self, filepath, groupby):

        self.groupmatch = re.compile(groupby)

        # write an offset file so that we start somewhat at the end of the file
       
        self.offsetpath = "/tmp/" + str(uuid.uuid4())
        #print self.offsetpath
        try:
            inode = os.stat(filepath).st_ino
            offset = os.path.getsize(filepath) - 1024
            #print inode
            #print offset
        except OSError:
            pass
        else:
            if offset > 0:
                #print 'write offset'
                foffset = open(self.offsetpath, "w")
                foffset.write ("%s\n%s" % (inode, offset))
                foffset.close()

        self.fin = Pygtail(filepath, offset_file=self.offsetpath, copytruncate=True)
        #self.fin.readlines()

        self.match_definitions = []

    def update(self):
        for line in self.fin.readlines():
            #print 'line: %s' % line
            mo = self.groupmatch.match(line)
            if mo is not None and mo.groups():
                groupname = mo.groups()[0].replace(".", "_").replace("-", "_")
                for match in self.match_definitions:
                    instrument = match["instrument"]
                    instrument.write(groupname, line)

    def add_match(self,  instance_name, valuetype, instrument):
        self.match_definitions.append(dict(
            instance_name=instance_name,
            valuetype=valuetype,
            instrument=instrument
        ))

    def read_metrics(self):
        for match in self.match_definitions:
            instance_name = match["instance_name"]
            instrument = match["instrument"]
            valuetype = match["valuetype"]

            for groupname, value in instrument.read():
                metric_name = "%s.%s" % (groupname, instance_name)
                yield (metric_name, valuetype, value)
Пример #2
0
def start_installation(target):
    installer = InstallHandler(target=target)
    if target == "install-no-wait":
        installer.target = "install"
        installer.timeout = 0

    installer.run()

    while installer.thread.is_alive():

        if not installer.queue.empty():
            data = installer.queue.get()

        logs = Pygtail("./setup.log", paranoid=True)
        for log in logs.readlines():
            emit("response", {"title": data[0], "log": log, "status": data[1]})

    if not installer.queue.empty():
        data = installer.queue.get()

    logs = Pygtail("./setup.log", paranoid=True)
    for log in logs.readlines():
        emit("response", {"title": data[0], "log": log, "status": data[1]})
Пример #3
0
 def liveLog(self, logdir):
     """
     Method to tail  log lines for every 5 seconds.
     :return: It yields the log lines.
     """
     if logdir == None:
         pass
     else:
         if logdir == 'pgcli':
             logdir = PGC_LOGS
         log_file = Pygtail(logdir)
         ln = log_file.readlines()
         if ln:
             for log_line in ln:
                 yield self.session.publish('com.bigsql.log', log_line)
Пример #4
0
 def liveLog(self, logdir):
     """
     Method to tail  log lines for every 5 seconds.
     :return: It yields the log lines.
     """
     if logdir == None:
         pass
     else:
         if logdir == 'pgcli':
             logdir = PGC_LOGS
         log_file = Pygtail(logdir)
         ln = log_file.readlines()
         if ln:
             for log_line in ln:
                 line = unicode(str(log_line),
                                sys.getdefaultencoding(),
                                errors='ignore').strip()
                 yield self.session.publish('com.bigsql.log', line)
Пример #5
0
 def logIntLines(self, number, logdir):
     """
     Method to tail the selected number of lines from the selected log.
     :return: It yields the log lines.
     """
     if logdir == None:
         yield self.session.publish('com.bigsql.logError',
                                    "Log file does not exist")
     else:
         if logdir == 'pgcli':
             logdir = PGC_LOGS
         log_file = Pygtail(logdir)
         ln = log_file.readlines()
         read_file = open(logdir)
         _lines = read_file.readlines()[-number:]
         for _li in _lines:
             line = unicode(str(_li),
                            sys.getdefaultencoding(),
                            errors='ignore').strip()
             yield self.session.publish('com.bigsql.log', line)
Пример #6
0
 def selectedLog(self, logdir):
     """
     Method to tail the last 1000 lines from the PGC_LOGS to display default.
     :return: It yields the log lines.
     """
     if logdir == None:
         yield self.session.publish('com.bigsql.logError',
                                    "Log file does not exist")
     else:
         if logdir == 'pgcli':
             logdir = PGC_LOGS
         self.session.publish('com.bigsql.pgcliDir', logdir)
         log_file = Pygtail(logdir)
         ln = log_file.readlines()
         read_file = open(logdir)
         _lines = read_file.readlines()[-1000:]
         for _li in _lines:
             line = unicode(str(_li),
                            sys.getdefaultencoding(),
                            errors='ignore').strip()
             yield self.session.publish('com.bigsql.log', line)
Пример #7
0
    def updateLogWindow(self):
        self._mode = self.ID_UPDATE_LOG
        offset_file = "{logfile}.offset".format(logfile=self.logfile)
        if os.path.exists(offset_file) and os.path.isfile(offset_file):
            content = file(offset_file, "r").read()
            if len(content.split(
            )) < 2:  # less then two lines in a offset file - broken - remove
                logger.info(
                    "offset file is empty: {offset_file}. Trying to remove it. "
                    .format(offset_file=offset_file))
                time.sleep(1)
                os.remove(offset_file)

        my_tail = Pygtail(self.logfile)
        while True:
            time.sleep(0.3)
            try:
                content = "".join(my_tail.readlines())
                if content:
                    self.valueLogTextArea.write(content)
            except ValueError as e:
                if os.path.exists(offset_file):
                    logger.error(
                        "Problem with reading offset file: {offset_file}. Trying to remove it. "
                        .format(offset_file=offset_file))
                    time.sleep(1)
                    os.remove(offset_file)
                    if not os.path.exists(offset_file):
                        logger.info(
                            "offset file: {offset_file} removal successful. Restarting update log thread."
                            .format(offset_file=offset_file))
                        self.updateLogWindow()
                    else:
                        logger.fatal(
                            "Failed to remove offset file: {offset_file}.".
                            format(offset_file=offset_file))
Пример #8
0
 def test_readlines(self):
     pygtail = Pygtail(self.logfile.name)
     self.assertEqual(pygtail.readlines(), self.test_lines)
Пример #9
0
def do_main_menu():
    global DELAY_CHAR, DELAY, TYPESPEED, FAILSAFE, INPUT_FILE, TRADE_LIMIT, SHIP_HOLDS, logfile

    selection = None
    while True:
        print("==== MAIN MENU====")
        print()
        print("-- Trade Operations --")
        print("l)  Set input log file (Currently: {0})".format(INPUT_FILE))
        print(
            "tl) Set trade limit (Default: Stop when {0} turns left.)".format(
                TRADE_LIMIT))
        print("at) Auto-trade, NO haggling (CTRL-C to stop)")
        print("tav) Trade Advisor - View")
        print("tas) Trade Advisor - Save to file")
        print()
        print("-- Other Tasks --")
        print("mc) Move colonists from one commodity to another")
        print()
        print("-- Settings --")
        print(
            "b)  Begin following log file [Used for Debugging] (CTRL-C to stop)"
        )
        print("g)  Get next line in log file [Used for Debugging]")
        print("dc) Set macro delay char (Currently: {0})".format(DELAY_CHAR))
        print("dv) Set macro delay char value (Currently: {0} seconds)".format(
            DELAY))
        print("td) Set inter-character typing delay (Currently: {0} seconds)".
              format(TYPESPEED))
        print(
            "f)  Toggle FAILSAFE (aborts on mouse move to upper left corner): Currently {0}"
            .format(FAILSAFE))
        print()
        print("q)  Quit")
        print()
        selection = input("Selection: ").lower()

        if selection == "l":
            INPUT_FILE = input("File to monitor: ")
            if not os.path.isfile(INPUT_FILE):
                print("WARNING: File {0} does not exist (yet?).".format(
                    INPUT_FILE))
            else:
                logfile = Pygtail(INPUT_FILE,
                                  read_at_end=True,
                                  copytruncate=False,
                                  offset_file="NUL")
                logfile.readlines(
                )  # Skip to end (should already happen, but just in case).
        elif selection == "q" or selection == "Q":
            sys.exit(0)
        elif selection == "dc":
            DELAY_CHAR = str(
                input("Enter character to represent delay in macro string: "))
        elif selection == "dv":
            DELAY = float(
                input(
                    "Enter value of delay character, in seconds (float value allowed): "
                ))
        elif selection == "td":
            TYPESPEED = float(
                input(
                    "Enter delay between keystrokes, in seconds (float value allowed): "
                ))
        elif selection == "f":
            FAILSAFE = not FAILSAFE
        elif selection == "tl":
            print()
            print("-- Set trade limit --")
            try:
                TRADE_LIMIT = int(input("Stop when this many turns left: "))
            except:
                print("Invalid value (number expected).  Restoring default.")
                TRADE_LIMIT = 40
                sleep(1)
            print()
        elif selection == "b":
            print()
            print("CTRL-C to stop")
            print()
            try:
                for line in logfile:
                    print(line)
            except KeyboardInterrupt:
                print()
                print("Stopped following file")
                print()
        elif selection == "g":
            print()
            print(next(logfile))

        elif selection == "at":
            try:
                auto_trade(False)
            except KeyboardInterrupt:
                print("User aborted trade routine...")
        elif selection == "tav":
            try:
                trade_advisor()
            except KeyboardInterrupt:
                print("User aborted trade advisor...")
        elif selection == "tas":
            try:
                fileName = input("Enter path and filename to save: ")

                with open(fileName, "w") as outFile:
                    trade_advisor(reportFile=outFile)

            except KeyboardInterrupt:
                print("User aborted trade advisor...")
        elif selection == "mc":
            try:
                move_colonists()
            except KeyboardInterrupt:
                print("User aborted colonist move routine...")
        else:
            print("Unknown selection '{0}'".format(selection))
Пример #10
0
    TYPESPEED = 0.05
    DELAY_CHAR = "`"
    DELAY = 1

    FAILSAFE = True

    TRADE_LIMIT = 40

    # Holds a list of TradePairs
    TRADE_PAIRS = []

    commerceReportRe = "Fuel Ore +(?P<oreStatus>Buying|Selling) +(?P<oreAmt>[0-9]+) .+Organics +(?P<orgStatus>Buying|Selling) +(?P<orgAmt>[0-9]+) .+Equipment +(?P<equStatus>Buying|Selling) +(?P<equAmt>[0-9]+) "

    CIMPortReportRe = "^ +(?P<CIMPortSector>[0-9]+) +(?P<oreStatus>-)? +(?P<oreAmt>[0-9]+) +(?P<orePct>[0-9]+)% +(?P<orgStatus>-)? +(?P<orgAmt>[0-9]+) +(?P<orgPct>[0-9]+)% +(?P<equStatus>-)? +(?P<equAmt>[0-9]+) +(?P<equPct>[0-9]+)%"

    # Note, we pass NUL a the filename to Pygtail because we DON'T want a persistent offset file,
    # we want Pygtail to always start at the end of the terminal log file.  This is especially
    # important if the log file is overwritten with each new session.
    logfile = Pygtail(INPUT_FILE,
                      read_from_end=True,
                      copytruncate=False,
                      offset_file="NUL")
    logfile.readlines()

    SHIP_HOLDS = 50  # This will be updated once we get selfShip info later.

    print("***** STAK - Trade Tool {0} *****".format(VERSION))
    print()
    do_main_menu()
Пример #11
0
 def test_readlines(self):
     pygtail = Pygtail(self.logfile.name)
     self.assertEqual(pygtail.readlines(), self.test_lines)
class GroupingTail(object):
    def __init__(self, filepath, groupby, groupname=None):
        self.groupmatch = re.compile(groupby)
        # write an offset file so that we start somewhat at the end of the file

        # either filepath is a path or a syslogd url
        (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(filepath)
        if scheme == 'syslog':
            host, port = netloc.split(':')
            self.fin = QueueFile()
            self.server = SocketServer.UDPServer((host, int(port)), SyslogUDPHandler)
            self.server.queue = self.fin

            th = threading.Thread(target=lambda: self.server.serve_forever(poll_interval=0.5))
            th.setDaemon(True)
            th.start()
        else:
            # Create a temporal file with offset info
            self.offsetpath = "/tmp/" + str(uuid.uuid4())
            try:
                inode = os.stat(filepath).st_ino
                offset = os.path.getsize(filepath) - 1024
            except OSError:
                pass
            else:
                if offset > 0:
                    foffset = open(self.offsetpath, "w")
                    foffset.write("%s\n%s" % (inode, offset))
                    foffset.close()

            self.fin = Pygtail(filepath, offset_file=self.offsetpath, copytruncate=True)

        # List of matchings
        self.match_definitions = []
        # Regex group name for grouping
        self.groupbygroup = groupname

    def __del__(self):
        if hasattr(self, 'server'):
            self.server.socket.close()

    # Update method processing last lines
    def update(self):
        for line in self.fin.readlines():
            groupname = None
            mo = self.groupmatch.match(line)
            if mo is not None:
                if self.groupbygroup is None and mo.groups():
                    # No groupbygroup get first group name
                    groupname = mo.groups()[0]
                elif self.groupbygroup is not None:
                    # Get groupname from line
                    groupname = mo.groupdict().get(self.groupbygroup)

            if groupname is not None:
                # Normalize groupname
                groupname = groupname.replace(".", "_").replace("-", "_")
                # Check all possible matchings
                for match in self.match_definitions:
                    instrument = match["instrument"]
                    instrument.write(groupname, line)

    # Attatch match to groupingtail class
    def add_match(self, instance_name, valuetype, instrument):
        self.match_definitions.append(dict(
            instance_name=instance_name,
            valuetype=valuetype,
            instrument=instrument
        ))

    # Get stored values from instrument
    def read_metrics(self):
        # For all matchings
        for match in self.match_definitions:
            instance_name = match["instance_name"]
            instrument = match["instrument"]
            valuetype = match["valuetype"]

            # Get metric info
            for groupname, value in instrument.read():
                # Construct grouping name for this metric value
                metric_name = "%s*%s" % (groupname, instance_name)
                # Send metric info
                yield (metric_name, valuetype, value)
class GroupingTail (object):

    def __init__(self, filepath, groupby, groupname=None):

        self.groupmatch = re.compile(groupby)

        # write an offset file so that we start somewhat at the end of the file

        # either filepath is a path or a syslogd url
        (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(filepath)
        if scheme == 'syslog':
            host, port = netloc.split(':')
            self.fin = QueueFile()
            self.server = SocketServer.UDPServer((host, int(port)), SyslogUDPHandler)
            self.server.queue = self.fin

            th = threading.Thread(target=lambda: self.server.serve_forever(poll_interval=0.5))
            th.daemon = True
            th.start()

        else:

            self.offsetpath = "/tmp/" + str(uuid.uuid4())
            #print self.offsetpath
            try:
                inode = os.stat(filepath).st_ino
                offset = os.path.getsize(filepath) - 1024
                #print inode
                #print offset
            except OSError:
                pass
            else:
                if offset > 0:
                    #print 'write offset'
                    foffset = open(self.offsetpath, "w")
                    foffset.write ("%s\n%s" % (inode, offset))
                    foffset.close()

            self.fin = Pygtail(filepath, offset_file=self.offsetpath, copytruncate=True)
            #self.fin.readlines()

        self.match_definitions = []
        self.groupbygroup = groupname

    def __del__(self):
        if hasattr(self, 'server'):
            self.server.socket.close()

    def update(self):
        for line in self.fin.readlines():
            #print 'line: %s' % line
            groupname = None
            mo = self.groupmatch.match(line)
            if mo is not None:
                if self.groupbygroup is None and mo.groups():
                    groupname = mo.groups()[0]
                elif self.groupbygroup is not None:
                    groupname = mo.groupdict().get(self.groupbygroup)
            if groupname is not None:
                groupname = groupname.replace(".", "_").replace("-", "_")
                for match in self.match_definitions:
                    instrument = match["instrument"]
                    instrument.write(groupname, line)

    def add_match(self,  instance_name, valuetype, instrument):
        self.match_definitions.append(dict(
            instance_name=instance_name,
            valuetype=valuetype,
            instrument=instrument
        ))

    def read_metrics(self):
        for match in self.match_definitions:
            instance_name = match["instance_name"]
            instrument = match["instrument"]
            valuetype = match["valuetype"]

            for groupname, value in instrument.read():
                metric_name = "%s.%s" % (groupname, instance_name)
                yield (metric_name, valuetype, value)
Пример #14
0
displayLine("   TIPI   Waiting...")

logpath = "/home/tipi/log"
try:
    os.makedirs(logpath)
except OSError as exc:
    if exc.errno == errno.EEXIST and os.path.isdir(logpath):
        pass
    else:
        raise

logfile = "{}/tipi.log".format(logpath)
with open(logfile, 'a'):
    os.utime(logfile, None)

pygtail = Pygtail(logfile)
oldlines = filter(lambda x: "oled" in x, pygtail.readlines())

if len(oldlines) > 0:
    line = oldlines[len(oldlines) - 1]
else:
    line = ""

while True:
    for line in pygtail:
        if "oled" in line:
            m = pat.match(line)
            msg = m.group(1)
            displayLine(msg)
    time.sleep(0.100)