Beispiel #1
0
def tailf(logfile):
    print("Starting to monitor {0} with pattern for rclone {1}".format(
        logfile, cfg['backend']))

    # Validate which backend we're using
    if cfg['backend'] == 'cache':
        # Use cache backend
        for line in tail("-Fn0", logfile, _iter=True):
            if re.match(
                    r".*(mkv:|mp4:|mpeg4:|avi:) received cache expiry notification",
                    line):
                f = re.sub(r"^(.*rclone\[[0-9]+\]: )([^:]*)(:.*)$", r'\2',
                           line)
                print("Detected new file: {0}".format(f))
                scan(os.path.dirname(f))

    elif cfg['backend'] == 'vfs':
        # Use vfs backend
        timePrev = ''
        for line in tail("-Fn0", logfile, _iter=True):
            if re.match(r".*: forgetting directory cache", line):
                f = re.sub(r"^.*\s:\s(.*):\sforgetting directory cache", r'\1',
                           line)
                timeCurr = re.sub(
                    r"^.*\s([0-9]+:[0-9]+:[0-9]+)\s.*\s:\s.*:\sforgetting directory cache",
                    r'\1', line)

                if timeCurr != timePrev:
                    print("Detected directory cache expiration: {0}".format(f))
                    scan(os.path.dirname(f))
                    timePrev = timeCurr
Beispiel #2
0
 def _do(self):
     with tempfile.TemporaryFile('w+t') as tf:
         # Write tail of logfile into temporary file
         sh.tail(self.path, lines=self.nlines, _in=self.path, _out=tf)
         # Rewind temporary file
         tf.seek(0)
         # Overwrite old file
         sh.cat(_in=tf, _out=self.path)
     return True
Beispiel #3
0
    def _execute(self, command_line):
        LOG.debug(
            'Begin executing commmand: %s',
            ' '.join("'" + arg + "'" for arg in command_line))

        self.status = "EXECUTING"
        self.exit_code = None

        try:
            sh.bash('-x', *command_line,
                    _out=self.write_stdout,
                    _err=self.write_stderr)

        except sh.ErrorReturnCode as error:
            exit_code = error.exit_code
            status = "FAILED"
            severity = logging.ERROR

        except Exception:
            exit_code = 1
            status = "ERROR"
            severity = logging.ERROR
            LOG.exception('Internal error.')

        except BaseException:
            exit_code = 1
            severity = logging.WARNING
            status = "INTERRUPTED"

        else:
            exit_code = 0
            status = 'SUCCESS'
            severity = logging.DEBUG

        self.exit_code = exit_code
        self.status = status

        if exit_code != 0 and self.log_level < logging.ERROR:
            stream = sys.stderr
            stream.write('=' * 79 + '\n')
            sh.tail('-n', '100', self.log_path + '.ansi', _out=stream)
            stream.write('=' * 79 + '\n')

        LOG.log(
            severity,
            'Finished executing command:\n'
            '    Command line: %s\n'
            '    Status: %s\n'
            '    Exit code: %s\n'
            '    Log file: %s\n',
            command_line, status, exit_code, self.log_path)

        return exit_code
Beispiel #4
0
def xxd_patch(old, new, patch_file):
    tmp_old = '/var/tmp/old_hex'
    sh.xxd('-p', old, _out=tmp_old)
    sh.tail('-n', '+2', patch_file, _out=patch_file+'.nohead')
    sh.patch(tmp_old, patch_file+'.nohead')

    if new:
        sh.xxd('-r', '-p', tmp_old, _out=new)
    else:
        sh.xxd('-r', '-p', tmp_old, _out=old)

    sh.rm('-f', tmp_old)
    sh.rm('-f', patch_file+'.nohead')
Beispiel #5
0
def containers():
    """
    Return list of container (names) instanciated
    """
    from io import StringIO

    buf = StringIO()
    try:
        tail(awk(docker('ps', '-a'), '{print $NF}'), '-n+2', _out=buf)
    except Exception as err:
        log.error(err)

    containers = buf.getvalue().split()
    return containers
Beispiel #6
0
def max_file_hash(n=10, short=False):
    pack_path = glob('.git/objects/pack/*.idx')

    if not pack_path:
        git.gc()
        pack_path = glob('.git/objects/pack/*.idx')

    if short:
        return awk(
            tail(sort(git('verify-pack', '-v', pack_path), '-k', '3'), '-n',
                 '-{0:d}'.format(n)), '{print $1}')
    else:
        return tail(sort(git('verify-pack', '-v', pack_path), '-k', '3', '-n'),
                    '-{0:d}'.format(n))
Beispiel #7
0
    def _get_data(self):
        try:
            data = {'produced': 0}
            search = []
            now = DT.datetime.now()
            last = now + relativedelta(seconds=-126)
            for line in tail("-n 500", self.log_path, _iter=True):
                if "Received block" in line or "Produced block" in line:
                    match = re.match(
                        r'.*info  (.*) thread.* signed by (.*) \[.*', line)
                    if match != None:
                        info = {
                            'date': match.group(1),
                            'producer': match.group(2)
                        }
                        date = parse(info["date"])
                        offset = -self.configuration.get(
                            'timezone_offset') * 3600 if self.configuration.get(
                                'timezone_offset') else 0
                        date = date + relativedelta(seconds=offset)
                        #pdb.set_trace()
                        # filtering entries for the last 126 seconds
                        if date > last and date < now or date == now:
                            search.append(info)
                            if info['producer'] == self.configuration.get(
                                    'producer'):
                                data['produced'] += 1

            return data
        except (ValueError, AttributeError):
            return None
def _get_keynames():
    global key_cache
    key_cache = []
    result = fgrep(tail(nova('keypair-list'), '-n', '+4'), '-v', '+')
    for line in result:
        (front, name, signature, back) = line.split('|')
        key_cache.append(name.strip())
Beispiel #9
0
def _get_keynames():
    global key_cache
    key_cache = []
    result = fgrep(tail(nova('keypair-list'), '-n', '+4'), '-v', '+')
    for line in result:
        (front, name, signature, back) = line.split('|')
        key_cache.append(name.strip())
Beispiel #10
0
def tail_and_follow(path, ansible_verbosity):
    """Follow and provide output

    :param path: tuple containing thepath to file to follow
    :param ansible_verbosity: the verbosity level
    """
    if len(path) > 0:  # pylint: disable=len-as-condition
        truncate = 1
        if ansible_verbosity:
            truncate = ansible_verbosity

        print_line = truncate
        plabook_started = False
        truncated = False

        # pylint: disable=no-member
        for line in sh.tail('-f', '-n', '+0', path, _iter=True):
            line = line.strip('\n')
            if line.startswith('TASK') or line.startswith('PLAY'):
                print(line)
                print_line = truncate
                plabook_started = True
                truncated = False
            elif print_line > 0:
                line_len = len(line)
                char_truncate = truncate * 100
                if line_len > char_truncate:
                    print(line[0:char_truncate] + '...')
                else:
                    print(line)
                print_line = print_line - 1
            elif print_line == 0 and not truncated and plabook_started:
                print(_('-- output truncated --'))
                truncated = True
Beispiel #11
0
def agent_logs(amount):
    static = get_static()
    if not os.path.isfile(static.main_log):
        return jsonify(messages=[])

    lines = tail('-n {}'.format(amount), static.main_log).splitlines()
    return jsonify(messages=lines)
Beispiel #12
0
        def post_update():
            if not self._service._enabled:
                return

            lines = [ x for x in sh.tail("/var/log/syslog", "-n", "10", _iter=True) ]
            dns_query = None
            for line in lines[::-1]:
                if "CONNECT" in line and "dreampi" in line:
                    # Don't seek back past connection
                    break

                if "query[A]" in line:
                    # We did a DNS lookup, what was it?
                    remainder = line[line.find("query[A]") + len("query[A]"):].strip()
                    domain = remainder.split(" ", 1)[0].strip()
                    dns_query = sha256(domain).hexdigest()
                    break

            user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT), Dreamcast Now'
            header = { 'User-Agent' : user_agent }
            mac_address = self._service._mac_address
            data = {}
            if dns_query:
                data["dns_query"] = dns_query

            data = urllib.urlencode(data)
            req = urllib2.Request(API_ROOT + UPDATE_END_POINT.format(mac_address=mac_address), data, header)
            urllib2.urlopen(req) # Send POST update
Beispiel #13
0
def tail_F(some_file):
    while True:
        try:
            for line in sh.tail("-f", some_file, _iter=True):
                yield line
        except sh.ErrorReturnCode_1:
            yield None
Beispiel #14
0
    def get_log(self, request, format=None):
        def tail(file_path, num_lines=20000):
            if os.path.exists(file_path) is False:
                return "File does not exist."
            else:
                try:
                    if os.path.getsize(file_path) == 0:
                        return "File is empty."
                    else:
                        from sh import tail
                        return tail("-n", int(num_lines), file_path)
                except OSError as err:
                    return "Failed getting file size: {}".format(err)

        qp = request.query_params
        log_filename = str(
            qp["log_filename"]) if "log_filename" in qp else None
        download = True if "download" in qp else False

        file_path = os.path.join("/app/logs", os.path.basename(log_filename))
        response = HttpResponse(tail(file_path), content_type="text/plain")

        if download is True:
            response[
                "Content-Disposition"] = "attachment; filename=\"{}\"".format(
                    log_filename)
            response["Cache-Control"] = "no-cache"

        return response
Beispiel #15
0
        def post_update():
            if not self._service._enabled:
                return

            lines = [
                x for x in sh.tail("/var/log/syslog", "-n", "10", _iter=True)
            ]
            dns_query = None
            for line in lines[::-1]:
                if "CONNECT" in line and "dreampi" in line:
                    # Don't seek back past connection
                    break

                if "query[A]" in line:
                    # We did a DNS lookup, what was it?
                    remainder = line[line.find("query[A]") +
                                     len("query[A]"):].strip()
                    domain = remainder.split(" ", 1)[0].strip()
                    dns_query = sha256(domain).hexdigest()
                    break

            user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT), Dreamcast Now'
            header = {'User-Agent': user_agent}
            mac_address = self._service._mac_address
            data = {}
            if dns_query:
                data["dns_query"] = dns_query

            data = urllib.urlencode(data)
            req = urllib2.Request(
                API_ROOT + UPDATE_END_POINT.format(mac_address=mac_address),
                data, header)
            urllib2.urlopen(req)  # Send POST update
Beispiel #16
0
def get_appyters(appyter_path):
    for path in map(os.path.dirname,
                    glob.glob(os.path.join(appyter_path, '*',
                                           'appyter.json'))):
        appyter = json.load(open(os.path.join(path, 'appyter.json'), 'r'))
        yield dict(
            appyter,
            path=path,
            long_description=open(os.path.join(path, 'README.md'), 'r').read(),
            # find the oldest commit containing the appyter's appyter.json (follow for detecting renames)
            creation_timestamp=str(
                sh.tail(
                    sh.git.log(
                        '--follow',
                        r'--pretty=format:%aI',
                        '--',
                        os.path.join(path, 'appyter.json'),
                        _tty_out=False,
                    ), '-n1')).strip(),
            # find the most recent commit containing the appyter's directory
            update_timestamp=str(
                sh.head(
                    sh.git.log(
                        r'--pretty=format:%aI',
                        '--',
                        path,
                        _tty_out=False,
                    ), '-n1')).strip(),
        )
Beispiel #17
0
def read_log(id: int):
    log = find_log_file_by_id(id)

    if not log:
        raise NotFound()

    filepath = log.filepath

    lines: str = sh.tail("-n200", filepath)

    lines = lines.split("\n")

    ansi_escape = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
    lines = [ansi_escape.sub('', line) for line in lines]

    # clrc.info("Splitted lines: ")
    # clrc.info(lines)

    data = [
        "2019-04-28 13:53:36.694 | INFO     | clrc.printers.loguru_printer:info:15 - SMS COOLDOWN FOR 79964716158 LEFT 0",
        "2019-04-28 12:07:47.668 | SUCCESS  | clrc.printers.loguru_printer:success:9 - Success sending typed email!",
        "2019-04-28 13:53:23.992 | ERROR    | clrc.printers.loguru_printer:error:21 - Traceback (most recent call last):"
    ]

    return jsonify(lines)
Beispiel #18
0
def get_alert_sms(now_stm):
    alert_sms = []
    sms_data = []
    try:
        sms_data = list(sh.tail("-n5", "/var/log/sms.log"))
    except Exception, e:
        pass
Beispiel #19
0
 def test_nonblocking_iter(self):
     import tempfile
     from sh import tail
     from errno import EWOULDBLOCK
     
     tmp = tempfile.NamedTemporaryFile()
     for line in tail("-f", tmp.name, _iter_noblock=True): break
     self.assertEqual(line, EWOULDBLOCK)
Beispiel #20
0
 def _doRollover(self):
     for filename in self.filenames:
         tmp_name = filename + '.tmp'
         _ = sh.tail('-n',
                     int(self.maxlen),
                     filename,
                     _out=open(tmp_name, 'w'))
         _ = sh.cp(tmp_name, filename)
Beispiel #21
0
 def test_nonblocking_iter(self):
     import tempfile
     from sh import tail
     from errno import EWOULDBLOCK
     
     tmp = tempfile.NamedTemporaryFile()
     for line in tail("-f", tmp.name, _iter_noblock=True): break
     self.assertEqual(line, EWOULDBLOCK)
def append_file(path, end):
    last_row = tail('-n', '1', path)

    date = [ json.loads(d)['timestamp'] for d in last_row ]

    print "Appending"
    with open(path, 'a') as fh:
        write_data(fh, date[0], end)
Beispiel #23
0
 def run(self):
     log_info("Run thread LogParser")
     while True:
         try:
             for line in tail("-n", 1, "-f", eosio_log_file, _iter=True):
                 log_parse(line)
         except:
             log_err("eosio log file:" + eosio_log_file + " parse failed!")
         time.sleep(10)
Beispiel #24
0
 def run(self):
     if is_docker == "true":
         client = docker.from_env()
         nodeos = client.containers.get("nodeos")
         for line in nodeos.logs(stream=True, tail=1):
             log_parse(line)
     else:
         for line in tail("-n", 1, "-f", log_file, _iter=True):
             log_parse(line)
Beispiel #25
0
    def follow(self, process_line_func):
        def ignore(*args):
            pass

        def _process(line):
            return process_line_func({"name": self.cname, "msg": line, "pid": None, "t": datetime.now()})

        p = sh.tail("--follow=name", "-q", "--lines=3", self.filename, _out=_process, _err=ignore)
        p.wait()
Beispiel #26
0
def monitor_system(now_stm):
    system_pool = []
    system_log = []
    system_name = {'active': 30887, 'wservice': 30780, 'order': 31499, 'passport': 30592, 'pay': 31545,
                   'product': 30342, 'shopping': 29138, 'slist': 33027, 'track': 30779, 'tuan': 30778, 'wap_v5': 33008}
    try:
        system_log = list(sh.tail("-n5", "/var/log/shuffling.log"))
    except Exception, e:
        pass
def _get_swarm_service_state(service_name):
    docker = sh.Command("docker")
    try:
        state = sh.awk(
            sh.tail(sh.head(docker(["service", "ps", service_name]), "-2"),
                    "-1"), "{print $6}").strip(' \t\n\r\'')
    except:
        state = "Not present"
    return state
def check_deployments_status(deployments, exit_on_error=False):
    """Simple asynchronous checks if some deployment is finished"""

    # In single cluster deployment, logs are forwarded to stdout.
    if len(deployments) == 1:
        logging.info(
            "Only one cluster is being deployed. Redirecting logs to stdout.")
        sh.tail(
            "-n",
            "100000",
            "-f",  # pylint: disable=no-member
            deployments[0].log_file.name,
            _out=print_log_line,
            _bg=True,
            _new_session=False)
    else:
        logging.info(
            "More than one deployment is running, "
            "please check the log files for detailed deployment logs.")

    while not has_deployments_ended(deployments):
        for _, deployment in enumerate(deployments):
            if deployment.process.poll() is None or deployment.has_ended:
                pass
            elif deployment.process.poll() == 0:
                logging.info('%s %s: succeed.', deployment.cluster_name,
                             deployment.playbook_file_name)
                deployment.log_file.close()
                deployment.has_ended = True
                deployment.ended_successfully = True
            elif deployment.process.poll() != 0:
                logging.error('%s %s: failed. Please check the logs: %s',
                              deployment.cluster_name,
                              deployment.playbook_file_name,
                              os.path.realpath(deployment.log_file.name))
                deployment.log_file.close()
                deployment.has_ended = True
                deployment.ended_successfully = False
                if exit_on_error is True:
                    logging.info(
                        "--any-errors-fatal flag raised, terminating other deployments..."
                    )
                    kill_deployments(deployments)
        time.sleep(1)
Beispiel #29
0
def load_kvs():
    with open(FLORFILE, "r", encoding="utf-8") as f:
        d = json.load(f)

    p = Path.home()
    p = p / ".flor"
    p = p / d["NAME"]  # type: ignore
    p = p / "replay_jsons"

    seq = []

    for q in p.iterdir():
        # q will contain the timestamp: 2022-02-07T20:42:25.json
        tstamp = q.stem
        # 2022-02-07T20:42:25
        with open(str(q), "r", encoding="utf-8") as f:
            d = json.load(f)

        _kvs = d["KVS"]

        for k in _kvs:
            z = k.split(".")
            e = z.pop(0)
            r = z.pop(0)
            n = ".".join(z)
            for s, x in enumerate(_kvs[k]):
                # pvresnx
                seq.append((d["NAME"], d["MEMO"], tstamp, r, e, s, n, x))

    df1 = pd.DataFrame(
        seq,
        columns=[
            "projid", "vid", "tstamp", "alpha", "epoch", "step", "name",
            "value"
        ],
        # dtype=(str, str, np.datetime64, str, int, int, str, object),
    ).astype({
        "projid": str,
        "vid": str,
        "tstamp": np.datetime64,
        "alpha": str,
        "epoch": int,
        "step": int,
        "name": str,
        "value": object,
    })

    # I want to build a mapper from FLORFILE to GIT HASH
    vid_mapper = dict()
    for path in df1["vid"].drop_duplicates().to_list():
        eof = json.loads(tail("-1", path, _iter=True).next())
        vid_mapper[path] = eof["COMMIT_SHA"]

    df1["vid"] = df1["vid"].apply(lambda x: vid_mapper[x])

    return df1.sort_values(by=["tstamp", "epoch", "step"])
def log():
    uwsgi_logfile = config.common['UWSGI_LOGFILE']
    def process_output(line):
        print(line.strip(), file=sys.stdout)
    p = sh.tail(uwsgi_logfile, follow=True, _out=process_output)
    try:
        p.wait()
    except KeyboardInterrupt:
        print(colored('\nleave log', 'red'))
        p.terminate()
Beispiel #31
0
    def process_tmp_ta(self, filename="", is_standalone=True):
        if filename == "":
            log.info("INFO: No files to process now.")
        else:

            if not os.path.isfile(filename):
                log.error("ERROR: filename " + filename + " is not a file.")
                return False
            else:
                log.info("Processing: " + filename)
                self.iostatus[filename]['running'] = True
                self.iostatus[filename]['tail'] = sh.tail("-F",
                                                          filename,
                                                          _iter=True,
                                                          _bg_exc=False)

            while self.iostatus[filename]['running'] and self.thread_ctrl[
                    'continue']:
                try:
                    line = self.iostatus[filename]['tail'].next()
                    if is_standalone:
                        j = json.loads(line)
                        summary = get_summary_from_json(j)
                        if summary is not None:
                            self.influxdb.influxdb_queue.put({
                                'summary': summary,
                                'json': line
                            })
                        else:
                            log.warning(
                                "can't get summary from {0}".format(line))
                    else:
                        self.rabbitmq.rabbitmq_queue.put(
                            line)  # < --- sending raw lines for now

                except sh.ErrorReturnCode_1:  # as e:
                    log.info(
                        "process_tmp_ta: tail terminated {0}, (permission denied ?) "
                        .format(filename))
                    break
                except sh.SignalException_SIGKILL as e:
                    log.info("process_tmp_ta: tail terminated {0} {1}".format(
                        filename, e))
                    break
                except Exception as e:
                    exc_type, _, exc_tb = sys.exc_info()
                    fname = os.path.split(
                        exc_tb.tb_frame.f_code.co_filename)[1]
                    log.error("process_tmp_ta: {0} {1} {2} {4}".format(
                        exc_type, fname, exc_tb.tb_lineno, e))

            log.info('process_tmp_ta: exiting ' + filename)
            #with open(iostatus, 'wb') as handle:
            #  pickle.dump(iostatus, handle, protocol=pickle.HIGHEST_PROTOCOL)
            return True
Beispiel #32
0
def transport_assembly_run( seed_name, tfilename, workdir='./build' ):

   # Push package: to-be processed packages are placed in 'put' directory.
   # Pulled package: finished work is in the 'get' directory.
   retry = 1
   # Credentials wide out in the open:
   ssh = createSSHClient(CL_SERVER_NAME, 22, CL_SERVER_USER, CL_SERVER_PW)
   while (retry < 10):
      try:
         scp = SCPClient(ssh.get_transport())
         scp.put(tfilename, 'run')
         # If it got this far, it succeeded
         break;
      except Exception as err:
         print '=============='
         print err[0]
         print '    Retry attemp #'+retry
         print '=============='
         sleep(5)

   # 
   # Server Processing Phase
   #
   waittime = 0
   while (waittime < 600):
      try:
         scp = SCPClient(ssh.get_transport())
         # Check the log file
         scp.get('get/'+seed_name+'_run.log', local_path=workdir)
         line  = tail("-1", os.path.join(workdir,seed_name+'_run.log') ).strip()
         print line
         if 'Assembly Complete' in line:
            break
         if 'ERROR' in line:
            exit(1)
      except Exception as err:
         print '=============='
         print err[0]
         print '=============='
         if 'No such file or directory' in err[0]:
            print "---- No status yet ----"
      sleep(2)
      waittime = waittime + 1
      scp.close()
    # Compile complete.  Download results.
   try:
      scp.get('get/'+seed_name+'_assembly.dat', local_path=workdir)
      os.rename( os.path.join(workdir, seed_name+'_assembly.dat'), os.path.join(workdir,'assembly.dat'))
   except Exception as err:
      print '=============='
      print err[0]
      print '=============='
      exit(1)

   return True
 def __init__(self):
     global LOGFILE
     self.NETWORKSERVER = NetworkServer()
     self.LoadCellDataRecorder = DataRecorder(LoadCellDataRecorderKeys,
                                              "LoadCellVoltageData",
                                              network=self.NETWORKSERVER)
     self.LOGGER = logging.getLogger("TESTBED")
     self.Phypervisor = PressureHyperVisor(700, 700)
     init_gpio()
     self.LOGTAIL = sh.tail("-f", LOGFILE, _iter_noblock=True)
     self.report()
Beispiel #34
0
    def __init__(self, path):
        if os.path.exists(path):
            self.path = path
        else:
            raise Exception("Path '{0}' does not exist.".format(path))

        self.dependencies = []

        with pushd(self.path):
            self.package_name = tail(python('setup.py', '--name'),
                                     '-1').rstrip()
Beispiel #35
0
def get_chinamap_ping_data(now_stm):
    ping_dic = {}
    ping_data = list(sh.tail("-n5", "/var/log/map_ping.log"))
    for ptm in ping_data:
        if int(ptm.split()[0]) > now_stm:
            try:
                ping_dic[ptm.split()[2].split('_')[3]] = {'type': ptm.split()[1], 'loss': ptm.split()[4],
                                                          'rtt': ptm.split()[6]}
            except:
                pass
    return json.dumps(ping_dic)
Beispiel #36
0
def agent_get_outputs_reporter_parallel(lock, handler, defaultfilepath):
    import sh
    if not os.path.exists(defaultfilepath) or not os.path.isfile(
            defaultfilepath):
        print('Ops! Not a file in reporter parallel (%s)' % (defaultfilepath))
        return
    print('Listening defaults (%s)' % (defaultfilepath))
    for line in sh.tail('-f', defaultfilepath, _iter=True):
        div = line.find(" ")
        service = line[:div]
        output = line[div + 1:]
        _agent_send_output(service, output)
Beispiel #37
0
 def tail(file_path, num_lines=20000):
     if os.path.exists(file_path) is False:
         return "File does not exist."
     else:
         try:
             if os.path.getsize(file_path) == 0:
                 return "File is empty."
             else:
                 from sh import tail
                 return tail("-n", int(num_lines), file_path)
         except OSError as err:
             return "Failed getting file size: {}".format(err)
Beispiel #38
0
def agent_get_file_outputs_parallel(lock, handler, filepath, defaultfilepath):
    import sh
    if not os.path.exists(filepath) or not os.path.exists(
            defaultfilepath) or not os.path.isfile(
                filepath) or not os.path.isfile(defaultfilepath):
        print('Wow! Invalid arguments in get outputs parallel (%s, %s)' %
              (filepath, defaultfilepath))
        return
    print('Listening (%s, %s)' % (filepath, defaultfilepath))
    for line in sh.tail('-f', filepath, _iter=True):
        with lock:
            with open(defaultfilepath, 'a') as defaultf:
                defaultf.write('%s\n' % (line))
Beispiel #39
0
    def _tag_commit(self):
        sh.git.fetch("origin", "--tags")
        deploy_commit = sh.git("rev-parse", self.env.code_branch).strip()
        self._check_deploy_commit(deploy_commit, self.env.code_branch)
        pattern = "*{}*".format(self.env.environment)
        self.last_tag = sh.tail(sh.git.tag("-l", pattern), "-1").strip()

        tag_name = "{}-{}-deploy".format(self.timestamp, self.env.environment)
        # turn whatever `code_branch` is into a commit
        msg = "{} deploy at {}".format(self.env.environment, self.timestamp)
        sh.git.tag(tag_name, "-m", msg, deploy_commit)
        sh.git.push("origin", tag_name)
        self._deploy_ref = tag_name
def output(job_id):
    lines = int(request.params.get('lines', 40))
    jobs_path = app.config.get('jobs.path')
    job_path = os.path.abspath(os.path.join(jobs_path, job_id))
    job_out = os.path.join(job_path, 'output')
    job_info = os.path.join(job_path, 'job.json')
    if not os.path.exists(job_out) or not os.path.exists(job_info):
        raise StopIteration
    with open(job_info) as f:
        info = json.load(f)
    args = ['--lines=%d' % lines, job_out] if "exit_code" in info else ['--lines=%d' % lines, '--pid=%d' % info['job_pid'], '-f', job_out]
    for line in sh.tail(*args, _iter=True):
        yield line
Beispiel #41
0
def main(test):
    """Console script for docker-svtplay-dl"""

    try:
        command: sh.RunningCommand = tail('-f',
                                          '/var/log/system.log',
                                          _bg=True)
        time.sleep(2)
        command.process.terminate()
        print(command.stdout)
    except BaseException as err:

        pass
Beispiel #42
0
 def log_parser(self):
     logger.info("Start LogParser")
     if not os.path.exists(eosio_log_file):
         logger.error("%s not exists", eosio_log_file)
         return
     while True:
         try:
             pre_line = ""
             for line in tail("-n", 1, "-f", eosio_log_file, _iter=True):
                 line = line.rstrip('\n')
                 pre_line = self.handle_line(pre_line, line)
         except Exception as e:
             logger.error("parse failed %s", e)
Beispiel #43
0
def main(argv, settings):
	logging.basicConfig(filename = settings.LOG_FILE, level = logging.DEBUG, format='[%(asctime)s][%(levelname)s] %(message)s')

	first = True
	for line in sh.tail('-f', settings.WATCH_LOG, _iter=True):
		line = line.strip()
		if not first:
			match = re.search(settings.MATCH_LINES, line)
			logging.debug('Looking for "%s" in "%s" -> %r.', settings.MATCH_LINES, line, bool(match))
			if match:
				message = '[{stamp}][{host}] {match}'.format(stamp = getDateTime(), host = sh.hostname('-s').strip(), match = match.group(0))
				sendDm(settings.TWITTER_AUTH, settings.DM_RECIPIENTS, message)

		first = False
Beispiel #44
0
def get_container_info():
    container_info = {}

    try:
        container_info['container_name'] = tail(sed(grep(cat('/proc/self/cgroup'), 'docker'), 's/^.*\///'), '-n1')
    except:
        container_info['container_name'] = 'unknown'

    try:
        container_info['app_version'] =  open('version', 'r').read()
    except:
        container_info['app_version'] = 'unknown'


    return container_info
Beispiel #45
0
    def follow(self, process_line_func):
        def ignore(*args):
            pass

        def _process(line):
            return process_line_func({
                'name': self.cname,
                'msg': line,
                'pid': None,
                't': datetime.now()
            })

        p = sh.tail('--follow=name', '-q', '--lines=0', self.filename,
                    _out=_process, _err=ignore)
        p.wait()
Beispiel #46
0
def populateImageParts(img):
    import sh, re
    from sh import tail
    logger = logging.getLogger('virtimg')
    df = sh.Command('virt-df')
    out = str(tail(df('-a', img.path), '-n+2'))
    logger.debug('%s: df-h: %s', img.name, out)
    regex = re.compile('^[\w.\-]+:/dev/sda(?P<partid>\d+)\s+(?P<size>\d+)\s+\d+\s+(?P<free>\d+)', re.M)
    res = re.findall(regex, out)
    img.parts = dict()
    for part in res:
        img.parts[part[0]] = dict()
        img.parts[part[0]]["size"] = part[1]
        img.parts[part[0]]["free"] = part[2]
    img.lastpart = max(img.parts.keys())
Beispiel #47
0
def logs(bot, update):
    debug_arr = []
    imap_arr = []
    app_arr = []
    for line in tail(
            "-n 1",
            "/home/feyruz/sandbox/RideAway-AutoResponder/logs/app.verbose.log",
            _iter=False):
        debug_arr.append(line)
    for line in tail(
            "-n 3",
            "/home/feyruz/sandbox/RideAway-AutoResponder/logs/app.log",
            _iter=False):
        app_arr.append(line)
    for line in tail(
            "-n 3",
            "/home/feyruz/sandbox/RideAway-AutoResponder/logs/imap.live.log",
            _iter=False):
        line = line.rstrip()
        imap_arr.append(line[0:30] + '\n')
    update.message.reply_text('******* APP ********\n' + ''.join(app_arr) +
                              '\n******* DEBUG ********\n' +
                              ''.join(debug_arr) +
                              '\n******* IMAP ********\n' + ''.join(imap_arr))
Beispiel #48
0
def _tag_commit():
    sh.git.fetch("origin", env.code_branch)
    deploy_time = datetime.datetime.utcnow()
    tag_name = "{:%Y-%m-%d_%H.%M}-{}-deploy".format(deploy_time, env.environment)
    pattern = "*{}*".format(env.environment)
    last_tag = sh.tail(sh.git.tag("-l", pattern), "-1").strip()
    branch = "origin/{}".format(env.code_branch)
    msg = getattr(env, "message", "")
    msg += "\n{} deploy at {}".format(env.environment, deploy_time.isoformat())
    sh.git.tag(tag_name, "-m", msg, branch)
    sh.git.push("origin", tag_name)
    diff_url = "https://github.com/dimagi/commcare-hq/compare/{}...{}".format(
        last_tag,
        tag_name
    )
    print "Here's a link to the changes you just deployed:\n{}".format(diff_url)
    return diff_url
Beispiel #49
0
	def run(self):
		# checkThread = False  ## Ping the webApp
		threadLife = 1800
		try:
			timeSlot = int(time.time())
			n = datetime.datetime.now()
			dt = datetime.datetime(n.year, n.month, n.day, n.hour, n.minute, 0, tzinfo=tzlocal())
			ts = dt.isoformat(' ')
			for line in tail("-f", self.access_log, _iter=True):
				if int(time.time()) - timeSlot >= 60:
					self.investigate(ts)
					timeSlot = int(time.time())
					n = datetime.datetime.now()
					dt = datetime.datetime(n.year, n.month, n.day, n.hour, n.minute, 0, tzinfo=tzlocal())
					ts = dt.isoformat(' ')

				if self.utility.isJson(line):
					# checkThread = False  ## Ping the webApp
					threadLife = 1800
					res = json.loads(line)
					res["query"] = None if res["query"] == '' else res["query"]
					res["status"] = int(res["status"])
					res["size"] = 0	if res["size"] == "-" else int(res["size"])
					res["referer"] = None if res["referer"] == "-" else res["referer"]
					res["responseTime"] = int(res["responseTime"])

					self.addToQueue(res)

				else:
					if threadLife == 0:
						if not self.is_ssl:
							syslog.syslog('%s gatekeeper thread restarting...' % self.name)
						else:
							syslog.syslog('%s ssl gatekeeper thread restarting...' % self.name)
						break
					else:
						threadLife = threadLife - 30
						time.sleep(30)
						continue

		except IOError:
			syslog.syslog('Error(%s): can\'t find file (%s) or read data.' % (self.name, self.access_log))

		if threadLife == 0:
			self.run()
Beispiel #50
0
def main(filename):
    """
    Open `filename` and start processing it line by line. If `filename` is
    none, process lines from `stdin`.
    """
    if filename:
        if not os.path.exists(filename):
            logger.error("'%s' doesn't exists!" % filename)
            sys.stderr.write("'%s' doesn't exists!\n" % filename)
            sys.exit(1)

        logger.info("Processing '%s'" % filename)
        for ir in process_log(sh.tail("-f", filename, _iter=True)):
            print ir
    else:
        logger.info("Processing stdin.")
        for ir in process_log(_read_stdin()):
            print ir
Beispiel #51
0
    def run(self):
        import sh

        file_path = self.inputs['PATH'].receive()
        if file_path is EndOfStream:
            self.terminate()
            return

        self.log.debug(u'Tailing file: {}'.format(file_path))

        for line in sh.tail('-f', file_path, _iter=True):
            stripped_line = line.rstrip()
            self.log.debug(u'Tailed line: {}'.format(stripped_line))

            self.outputs['OUT'].send(stripped_line)

            if self.is_terminated():
                break
    def body(self):
        """
        This method handles AMQP connection details and reacts to FTP events by
        sending messages to output queue.
        """
        self.connection = pika.BlockingConnection(self.connection_param)
        self.channel = self.connection.channel()

        print "Monitoring file '%s'." % self.ftp_extended_log

        file_iter = process_log(
            sh.tail("-f", self.ftp_extended_log, _iter=True)
        )
        for import_request in file_iter:
            self.sendMessage(
                exchange=self.output_exchange,
                routing_key=self.output_key,
                message=serializers.serialize(import_request),
                UUID=str(uuid.uuid1())
            )
 def start_reading(self, params):
     for line in tail("-n", 0, "-f", params['file'], _iter=True):
         if not self.running:
             return
         line = line.replace('\n', '')
         # notify subscribers
         self.notify_subscribers(line)
         if self.log_level > 0:
             # add line to buffer
             self.buffer.append(line)
             if len(self.buffer) >= self.buffer_size:
                 logger_func = None
                 for line in self.buffer:
                     if re.match(self.filter_regex, line):
                         logger_func = self.get_logger_for_level_in_log_line(line)
                         break
                 if logger_func:
                     for buffered_line in self.buffer:
                         logger_func(buffered_line)
                 self.buffer = []
Beispiel #54
0
    def extract(self, conf):
        merge = conf.get("should_line_merge", False)
        before = conf.get("break_only_before", None)
        if merge:
            before = re.compile(before)
        
        value = HashBuilder({
                "type": conf.get("type", "log"),
                "source_path": conf["path"],
                "tags": conf.get("tags", [])
            })

        if conf.get("host"):
            host = "%s@%s" % (conf["user"], conf["host"]) if conf.get("user") else conf["host"]
            remote_server = ssh.bake(host)
            mac = str(ssh(host, "ifconfig | grep 'eth0'"))
            value.shared["source_host"] = host
            value.shared["source_mac"] = re.findall("..:..:..:..:..:..", mac)[0]
            log = remote_server.tail("-f", conf["path"], _iter=True)
        else:
            touch(conf["path"])
            if conf.get("from", None) == "now":
                filesize = "-c0"
            else:
                filesize = "-c%s" % (int(stat("-nf" ,'%z', conf["path"])) * 2 + 10)
            log = tail("-F", filesize, conf["path"], _iter=True)

        for n, line in enumerate(log):
            if merge:
                if before.match(line):
                    if value.has_data():
                        yield value.dict()
                        # time.sleep(0.1)
                    value.build()
                value.log(line)
            else:
                value.build(line)
                yield value.dict()
    def run(self):

        try:

            #Starting background thread which process and cleans draft statistics periodically
            bg_thread = Thread(target=self._monitor_bg_process)
            bg_thread.daemon = True
            bg_thread.start()

            #Reading last line from tailing the log and appending it to draft statistics.
            for line in tail('-f', '-n 0', self._access_log_path, _iter=True):
                #logger.debug('Processing log line {0} ... '.format(line))
                sample = self._parser.parse(line)
                if (sample is not None):
                    #Adding even if request was for non-existing resource. Easier to write test.
                    self._draft_stats_lock.acquire()
                    self._draft_stats.add_sample(sample)
                    self._draft_stats_lock.release()

            bg_thread.join()

        except (KeyboardInterrupt, SystemExit):
            logger.error('Interrupted')
    def resolve_from_dir(self, path):
        self._pip_install_opts.append('-e')
        if not os.path.exists(path):
            raise Exception("Path not found '{0}'".format(path))

        rm('-r', '-f', "/tmp/pip_build_{0}".format(getuser()))

        with pushd(path):
            print("")
            print("'git status' in '{0}':".format(path))
            print("------------")
            print(git('status'))
            print("------------")

            self.package_name = tail(python("setup.py", "--name"), "-1").rstrip()

            print("")
            print("Gathering package requirements ...")
            for line in pip('install', self._pip_install_opts, '.'):
                string = line.rstrip()
                match = re.search(
                    'Downloading/unpacking (.*?) \(from (.*?)\)',
                    string
                )
                if match:
                    self._add_pip_package(match.group(1), from_package=match.group(2))
                    continue

                match = re.search(
                    'Requirement already satisfied.*?: (.*?) in .*?\(from (.*?)\)',
                    string
                )
                if match:
                    self._add_pip_package(match.group(1), from_package=match.group(2))
                    continue
            print("Done. {0} records found.".format(len(self.entries)))
Beispiel #57
0
def process():
    killer = GracefulKiller()

    dial_tone_enabled = not "--disable-dial-tone" in sys.argv

    # Make sure pppd isn't running
    with open(os.devnull, 'wb') as devnull:
        subprocess.call(["sudo", "killall", "pppd"], stderr=devnull)

    BAUD_SPEED = 57600

    device_and_speed, internet_connected = None, False

    ## Startup checks, make sure that we don't do anything until
    ## we have a modem and internet connection
    while True:
        logger.info("Detecting connection and modem...")
        internet_connected = check_internet_connection()
        device_and_speed = detect_device_and_speed()

        if internet_connected and device_and_speed:
            logger.info("Internet connected and device found!")
            break

        elif not internet_connected:
            logger.warn("Unable to detect an internet connection. Waiting...")
        elif not device_and_speed:
            logger.warn("Unable to find a modem device. Waiting...")

        time.sleep(5)

    modem = Modem(device_and_speed[0], device_and_speed[1], BAUD_SPEED, dial_tone_enabled)
    dreamcast_ip = autoconfigure_ppp(modem.device_name, modem.device_speed)

    mode = "LISTENING"

    modem.connect()
    if dial_tone_enabled:
        modem.start_dial_tone()

    time_digit_heard = None

    dcnow = DreamcastNowService()

    while True:
        if killer.kill_now:
            break
    
        now = datetime.now()

        if mode == "LISTENING":
            modem.update()
            char = modem._serial.read(1).strip()
            if not char:
                continue

            if ord(char) == 16:
                #DLE character
                try:
                    char = modem._serial.read(1)
                    digit = int(char)
                    logger.info("Heard: %s", digit)

                    mode = "ANSWERING"
                    modem.stop_dial_tone()
                    time_digit_heard = now
                except (TypeError, ValueError):
                    pass
        elif mode == "ANSWERING":
            if (now - time_digit_heard).total_seconds() > 8.0:
                time_digit_heard = None
                modem.answer()
                modem.disconnect()
                mode = "CONNECTED"

        elif mode == "CONNECTED":
            dcnow.go_online(dreamcast_ip)

            # We start watching /var/log/messages for the hang up message
            for line in sh.tail("-f", "/var/log/messages", "-n", "1", _iter=True):
                if "Modem hangup" in line:
                    logger.info("Detected modem hang up, going back to listening")
                    time.sleep(5) # Give the hangup some time
                    break

            dcnow.go_offline()

            mode = "LISTENING"
            modem = Modem(device_and_speed[0], device_and_speed[1], BAUD_SPEED, dial_tone_enabled)
            modem.connect()
            if dial_tone_enabled:
                modem.start_dial_tone()

    return 0
Beispiel #58
0
 def read(self):
     for l in sh.tail("-f", self.path, _iter=True):
         try:
             yield l.replace('\r\n','')
         except sh.ErrorReturnCode:
             yield None
Beispiel #59
0
        events.append(int(line))
# /eos/atlas/atlastier0/rucio/data15_cos/express_express/00266661/data15_cos.00266661.express_express.merge.RAW
files = [x[:-1] for x in list(eos.ls(args.eos, _iter=True))]

if args.bylb:
    for i, file in enumerate(["root://eosatlas/%s/%s" % (args.eos, x) for x in files]):
        lb = re_lb.search(file).group(1)
        run = re_run.search(file).group(1)
        output = "run%s_lb%s.RAW" % (run, lb)
        if os.path.exists(output):
            print("%d/%d Skip %s" % (i, len(files), file))
            continue
        else:
            print("%d/%d Process %s" % (i, len(files), file))

        file_events = set(int(x[:-1]) for x in list(cut(cut(tail(atl_list("-l", file), '-n+12'),
                                                            '-d', " ", '-f3'), '-d', '=', '-f2', _iter=True)) if len(str(x[:-1])) > 1)
        extract_events = file_events.intersection(events)
        if not extract_events:
            continue

        params = []
        for e in extract_events:
            params += ["-e", e]
        params += ["-o", output]
        params += [file]
        atl_copy(*params)
        print("Created %s file with events %s" % (output, str(extract_events)))
else:
    run = None
    params = []
Beispiel #60
0
from sh import git, ssh, head, tail, wc, sort, grep, du


# Get list of files
grep('*')
# Get sizes of each
du('-hM', grep('*'))
# Sort, numerically
sort(du('-hM', grep('*')), '-n')
# And get the largest
tail(sort(du('-hM', grep('*')), '-n'), '-n' 5)