예제 #1
0
def after_step(context, step):
    # if BEHAVE_DEBUG_ON_ERROR and step.status == "failed":
    if step.status == "failed":
        # import ipdb
        # ipdb.post_mortem(step.exc_traceback)

        from subprocess import Popen as sub_popen
        from subprocess import PIPE as sub_PIPE

        ft_name         =   context.scenario.feature.name
        scenario_type   =   context.scenario.keyword            # e.g., u'Scenario Outline'
        if ft_name == 'Verify Online Presence':
            active_outline  =   context.active_outline
            if context.timeout:
                msg         =   "Unit Test Failed: %s [%s] did not load in %s"%tuple(active_outline.cells)
            else:  msg      =   "Unit Test Failed: %s -- msg(%s),code(%s)"%(active_outline[0],context.resp_msg,context.resp_code)

            cmd             =   'echo "%s" | mail -s "Unit-Test Failure" [email protected]'%msg
            proc            =   sub_popen([''.join(cmd)], stdout=sub_PIPE, shell=True)
            (t, err)        =   proc.communicate()

        else:
            msg             =   'Unit Test Failed: %s'%ft_name
            cmd             =   'echo "%s" | mail -s "Unit-Test Failure" [email protected]'%msg
            proc            =   sub_popen([''.join(cmd)], stdout=sub_PIPE, shell=True)
            (t, err)        =   proc.communicate()
예제 #2
0
 def exec_cmds(cmds,cmd_host,this_worker):
     cmd                             =   ' '.join(cmds)
     if cmd_host==this_worker:
         p                           =   sub_popen(cmd,stdout=sub_PIPE,shell=True)
     else:
         cmd                         =   "ssh %s '%s'" % (cmd_host,cmd)
         p                           =   sub_popen(cmd,stdout=sub_PIPE,shell=True)
     return p.communicate()
예제 #3
0
def after_step(context, step):

    ft_name         =   context.feature.name
    scenario_name   =   context.scenario.name
    step_name       =   step.name

    if step.status == "failed" and [it for it in context.tags].count('debug')>0:
        import ipdb
        ipdb.post_mortem(step.exc_traceback)

    elif step.status == "failed" and [BEHAVE_TXT_ON_ERROR].count(True)>0:
        msg             =   ['Failure: %s --'%ft_name,
                            'Feature: %s'%ft_name,
                            'Scenario: %s'%scenario_name,
                            'STEPS:']

        all_steps       =   context.scenario.steps
        for it in all_steps:
            s           =   it.name
            if s==step_name:
                msg.append('\tFAILED -->> '+s)
                break
            else:
                msg.append('\t'+s)

        msg.append('VARS:')

        std_vars        =   ['scenario', 'tags', 'text', 'stdout_capture', 'feature', 'log_capture', 'table', 'stderr_capture']
        current_vars    =   context._record.keys()
        for it in current_vars:
            if std_vars.count(it)==0:
                try:
                    msg.append('\t'+it+': '+str(context.__getattr__(it)))
                except:
                    pass

        msg.append('-- END --')
        pb_msg          =   '\n'.join(msg)
        pb_url          =   make_pastebin(pb_msg,msg[0],'2W',1)
        mail_msg        =   '\n'.join([msg[0],pb_url])

        cmd             =   'logger -t "Aprinto_Unit_Test" "%s"' % mail_msg.replace('\n',' ').replace('-','')
        proc            =   sub_popen([''.join(cmd)], stdout=sub_PIPE, shell=True)
        (t, err)        =   proc.communicate()

        if BEHAVE_TXT_ON_ERROR==True:

            cmd         =   'echo "%s" | mail -t [email protected]'%mail_msg
            proc        =   sub_popen([''.join(cmd)], stdout=sub_PIPE, shell=True)
            (t, err)    =   proc.communicate()
def check_db_for_action():

    cmd =   """ select * from system_log
                where operation='rotate_pgdump'
                and stout='go';
            """
    df = pd.read_sql(                           cmd,SYS_R.T.sys_eng)
    if len(df):
        # from ipdb import set_trace as i_trace; i_trace()


        for idx,row in df.iterrows():
            logs                            =   row.parameters.split(',')
            cmds                            =   []
            for f in logs:
                cmds.append(                    'rm %s;' % f.strip())

            p                               =   sub_popen(cmds,stdout=sub_PIPE,shell=True)
            (_out, _err)                    =   p.communicate()
            assert not _out
            assert _err is None

            cmd =   """ update system_log set ended=now()
                        where uid = %d;
                    """ % row.uid

            SYS_R.T.conn.set_isolation_level(   0)
            SYS_R.T.cur.execute(                cmd)

            SYS_R._growl(                       'pgdump logrotate process finished')
예제 #5
0
    def wrapper(func):
        declared_args = getattr(func, ATTR_ARGS, [])
        run_qry = True
        if parsed_args:
            run_qry, qry_vars = False, []
            for it in parsed_args:
                if in_args.count(it):
                    idx = in_args.index(it)
                    qry_vars.append(in_args[idx + 1])

            if len(qry_vars) == len(parsed_args):
                query = query % tuple(qry_vars)
                run_qry = True
        if not run_qry or query.count('%s'):
            return []
        cmd = 'curl --get "http://%s:9999/qry" --data-urlencode "qry=%s" 2> /dev/null ;'
        _out, _err = sub_popen(cmd % (DB_HOST, query.replace('\n', '')),
                               stdout=sub_PIPE,
                               shell=True).communicate()
        assert _err is None
        try:
            j_res = eval(_out)
        except:
            print 'ERROR -- dropping to ipy'
            i_trace()

        # for it in ['action','default','choices']:
        #     if declared_args.has_key(it):
        #         del declared_args[it]

        setattr(func, 'res', sorted([it['res'] for it in j_res]))
        return func
예제 #6
0
    def wrapper(func):
        declared_args = getattr(func, ATTR_ARGS, [])
        run_qry = True
        if parsed_args:
            run_qry, qry_vars = False, []
            for it in parsed_args:
                if in_args.count(it):
                    idx = in_args.index(it)
                    qry_vars.append(in_args[idx + 1])

            if len(qry_vars) == len(parsed_args):
                query = query % tuple(qry_vars)
                run_qry = True
        if not run_qry or query.count('%s'):
            return []
        cmd = 'curl --get "http://%s:9999/qry" --data-urlencode "qry=%s" 2> /dev/null ;'
        _out, _err = sub_popen(cmd % (DB_HOST, query.replace('\n', '')), stdout=sub_PIPE, shell=True).communicate()
        assert _err is None
        try:
            j_res = eval(_out)
        except:
            print 'ERROR -- dropping to ipy'
            i_trace()

        # for it in ['action','default','choices']:
        #     if declared_args.has_key(it):
        #         del declared_args[it]

        setattr(func, 'res', sorted([ it['res'] for it in j_res ]))
        return func
예제 #7
0
def parse_choices_from_pgsql(query, parsed_args = []):
    global pgsql_queries
    query = re.sub('[\\s]{2,}', ' ', query).strip()
    if pgsql_queries.has_key(query):
        return pgsql_queries[query]
    from system_settings import DB_HOST
    run_qry = True
    if parsed_args:
        run_qry, qry_vars = False, []
        for it in parsed_args:
            if in_args.count(it):
                idx = in_args.index(it)
                qry_vars.append(in_args[idx + 1])

        if len(qry_vars) == len(parsed_args):
            query = query % tuple(qry_vars)
            run_qry = True
    if not run_qry or query.count('%s'):
        return []
    cmd = 'curl -s http://%s:9999/curl_query?qry="%s" 2> /dev/null;'
    cmd = re.sub('[\\s]{2,}', ' ', cmd % (DB_HOST, query)).strip()
    _out, _err = sub_popen(cmd, stdout=sub_PIPE, shell=True).communicate()
    assert _err is None
    try:
        j_res = eval(_out)
        this_res = sorted([ it['res'] for it in j_res ])
        pgsql_queries[query] = this_res
        return this_res
    except:
        print 'ERROR -- dropping to ipy'
        import ipdb as I; I.set_trace()
예제 #8
0
    def wrapper(func):
        declared_args = getattr(func, ATTR_ARGS, [])
        run_cmd = True
        if parsed_args:
            run_cmd, cmd_vars = False, []
            for it in parsed_args:
                if in_args.count(it):
                    idx = in_args.index(it)
                    cmd_vars.append(in_args[idx + 1])

            if len(cmd_vars) == len(parsed_args):
                cmd = cmd % tuple(cmd_vars)
                run_cmd = True
        if not run_cmd:
            RES = []
        # os.system(                                     "logger -t 'PY_PARSE_EXEC_1' '%s'" % str(cmd))
        _out, _err = sub_popen(cmd.replace('\n', ''),
                               stdout=sub_PIPE,
                               shell=True).communicate()
        assert _err is None
        RES = _out
        # for it in ['action','default','choices']:
        #     if declared_args.has_key(it):
        #         del declared_args[it]
        setattr(func, 'res', RES)
        return func
예제 #9
0
def parse_choices_from_pgsql(query, parsed_args=[]):
    global pgsql_queries
    query = re.sub('[\\s]{2,}', ' ', query).strip()
    if pgsql_queries.has_key(query):
        return pgsql_queries[query]
    from system_settings import DB_HOST
    run_qry = True
    if parsed_args:
        run_qry, qry_vars = False, []
        for it in parsed_args:
            if in_args.count(it):
                idx = in_args.index(it)
                qry_vars.append(in_args[idx + 1])

        if len(qry_vars) == len(parsed_args):
            query = query % tuple(qry_vars)
            run_qry = True
    if not run_qry or query.count('%s'):
        return []
    cmd = 'curl -s http://%s:9999/curl_query?qry="%s" 2> /dev/null;'
    cmd = re.sub('[\\s]{2,}', ' ', cmd % (DB_HOST, query)).strip()
    _out, _err = sub_popen(cmd, stdout=sub_PIPE, shell=True).communicate()
    assert _err is None
    try:
        j_res = eval(_out)
        this_res = sorted([it['res'] for it in j_res])
        pgsql_queries[query] = this_res
        return this_res
    except:
        print 'ERROR -- dropping to ipy'
        import ipdb as I
        I.set_trace()
예제 #10
0
def check_db_for_action():

    cmd = """ select * from system_log
                where operation='rotate_pgdump'
                and stout='go';
            """
    df = pd.read_sql(cmd, SYS_R.T.sys_eng)
    if len(df):
        # from ipdb import set_trace as i_trace; i_trace()

        for idx, row in df.iterrows():
            logs = row.parameters.split(',')
            cmds = []
            for f in logs:
                cmds.append('rm %s;' % f.strip())

            p = sub_popen(cmds, stdout=sub_PIPE, shell=True)
            (_out, _err) = p.communicate()
            assert not _out
            assert _err is None

            cmd = """ update system_log set ended=now()
                        where uid = %d;
                    """ % row.uid

            SYS_R.T.conn.set_isolation_level(0)
            SYS_R.T.cur.execute(cmd)

            SYS_R._growl('pgdump logrotate process finished')
예제 #11
0
 def run_cmd(cmd):
     p = sub_popen(cmd,stdout=sub_PIPE,
                   shell=True,
                   executable='/bin/bash')
     (_out,_err) = p.communicate()
     assert _err is None
     return _out.rstrip('\n')
def get_console_output():
    global tty
    if not tty:
        cmd = 'env ps -o tty,cmd -e | grep \'logrotate_console.py\' | grep -v grep | column -t | cut -d \' \' -f1;'
        p                                   =   sub_popen(cmd,stdout=sub_PIPE,shell=True)
        (_out, _err)                        =   p.communicate()
        tty                                 =   '/dev/%s' % _out.strip()
    return tty
예제 #13
0
def get_console_output():
    global tty
    if not tty:
        cmd = 'env ps -o tty,cmd -e | grep \'logrotate_console.py\' | grep -v grep | column -t | cut -d \' \' -f1;'
        p = sub_popen(cmd, stdout=sub_PIPE, shell=True)
        (_out, _err) = p.communicate()
        tty = '/dev/%s' % _out.strip()
    return tty
예제 #14
0
def post_to_wordpress(args):
    """convert markdown, clean up html, post to wordpress"""
    convert_cmd                             =   "grip --gfm --export --wide %s 2>&1"
    post_cmd                                =   "wp post create %s --post_title='%s'"

    if hasattr(args,'input'):
        (_out,_err)                         =   sub_popen(convert_cmd % args.input,stdout=sub_PIPE,
                                                    shell=True).communicate()
        assert _out.count('Exporting to ')
        args.html_file                      =   _out.replace('Exporting to ','').strip('\n')

    res                                     =   cleanup_grip(args.html_file)
    assert res==True

    (_out,_err)                             =   sub_popen(post_cmd % (args.html_file,args.title),
                                                    stdout=sub_PIPE,shell=True).communicate()

    print 'Posted!'
def print_to_console(msg):
    tty                                     =   get_console_output(obj_type)

    cmd                                     =   'printf \'\n%s\n\n\' "%s" > %s 2>&1' % ('%s',msg,tty)
    p                                       =   sub_popen(cmd,stdout=sub_PIPE,shell=True)
    (_out, _err)                            =   p.communicate()
    assert _out                            ==   ''
    assert _err                            is   None
    return
예제 #16
0
def print_to_console(msg):
    tty = get_console_output(obj_type)

    cmd = 'printf \'\n%s\n\n\' "%s" > %s 2>&1' % ('%s', msg, tty)
    p = sub_popen(cmd, stdout=sub_PIPE, shell=True)
    (_out, _err) = p.communicate()
    assert _out == ''
    assert _err is None
    return
예제 #17
0
def post_to_wordpress(args):
    """convert markdown, clean up html, post to wordpress"""
    convert_cmd = "grip --gfm --export --wide %s 2>&1"
    post_cmd = "wp post create %s --post_title='%s'"

    if hasattr(args, 'input'):
        (_out, _err) = sub_popen(convert_cmd % args.input,
                                 stdout=sub_PIPE,
                                 shell=True).communicate()
        assert _out.count('Exporting to ')
        args.html_file = _out.replace('Exporting to ', '').strip('\n')

    res = cleanup_grip(args.html_file)
    assert res == True

    (_out, _err) = sub_popen(post_cmd % (args.html_file, args.title),
                             stdout=sub_PIPE,
                             shell=True).communicate()

    print 'Posted!'
예제 #18
0
def parse_choices_from_exec(cmd, parsed_args = []):
    run_cmd = True
    if parsed_args:
        run_cmd, cmd_vars = False, []
        for it in parsed_args:
            if in_args.count(it):
                idx = in_args.index(it)
                cmd_vars.append(in_args[idx + 1])

        if len(cmd_vars) == len(parsed_args):
            cmd = cmd % tuple(cmd_vars)
            run_cmd = True
    if not run_cmd:
        return []
    # os.system(                                     "logger -t 'PY_PARSE_EXEC_1' '%s'" % str(cmd))
    _out, _err = sub_popen(cmd.replace('\n', ''), stdout=sub_PIPE, shell=True).communicate()
    assert _err is None
    return _out
예제 #19
0
def parse_choices_from_exec(cmd, parsed_args=[]):
    run_cmd = True
    if parsed_args:
        run_cmd, cmd_vars = False, []
        for it in parsed_args:
            if in_args.count(it):
                idx = in_args.index(it)
                cmd_vars.append(in_args[idx + 1])

        if len(cmd_vars) == len(parsed_args):
            cmd = cmd % tuple(cmd_vars)
            run_cmd = True
    if not run_cmd:
        return []
    # os.system(                                     "logger -t 'PY_PARSE_EXEC_1' '%s'" % str(cmd))
    _out, _err = sub_popen(cmd.replace('\n', ''), stdout=sub_PIPE,
                           shell=True).communicate()
    assert _err is None
    return _out
예제 #20
0
def before_scenario(context, scenario):

    celery_tails = ['A manager prints a Client Contract',
                    'After a manager prints a Client Contract, the Client prints the same Contract',
                    'Check In Requests & Document Post Attempts Made to Aporo']

    if scenario.name.find('Check In Requests')>=0:
        t                           =   str(scenario.steps[1])
        t                           =   t[t.find('"')+1:t.rfind('"')]
        cred,machine_id             =   re_findall(r'\"(.+?)\"',t)

        if cred=='manager':
            t=pd_read_sql("""   INSERT INTO aprinto_access (date_created,machine_id,ip_addr,vend_name,known_user,type_admin)
                                VALUES ('now'::timestamp with time zone,'%s','%s','tmp','true','true')
                                RETURNING uid
                                """ % (machine_id,THIS_IP),aprinto_engine)['uid'][0]
            context.row_created     =   t

        elif cred=='vendor':
            t=pd_read_sql("""   INSERT INTO aprinto_access (date_created,machine_id,ip_addr,vend_name,known_user,type_vendor)
                                VALUES ('now'::timestamp with time zone,'%s','%s','tmp','true','true')
                                RETURNING uid
                                """ % (machine_id,THIS_IP),aprinto_engine)['uid'][0]
            context.row_created     =   t

    if celery_tails.count(scenario.name)>0:
        context.celery_tail         =   '/tmp/aprinto_celery_tail'
        cmd                         =   PY_PATH + '/tests/files/tail_celery.bash'
        proc                        =   sub_popen([''.join(cmd)], stdout=sub_PIPE, shell=True)
        (t, err)                    =   proc.communicate()

        if hasattr(context,'processes'):
            context.processes.append(str(t))
        else:
            context.processes       =   [str(t)]
        # from ipdb import set_trace as i_trace; i_trace()
        delay(2)
예제 #21
0
    def wrapper(func):
        declared_args = getattr(func, ATTR_ARGS, [])
        run_cmd = True
        if parsed_args:
            run_cmd, cmd_vars = False, []
            for it in parsed_args:
                if in_args.count(it):
                    idx = in_args.index(it)
                    cmd_vars.append(in_args[idx + 1])

            if len(cmd_vars) == len(parsed_args):
                cmd = cmd % tuple(cmd_vars)
                run_cmd = True
        if not run_cmd:
            RES = []
         # os.system(                                     "logger -t 'PY_PARSE_EXEC_1' '%s'" % str(cmd))
        _out, _err = sub_popen(cmd.replace('\n', ''), stdout=sub_PIPE, shell=True).communicate()
        assert _err is None
        RES = _out
        # for it in ['action','default','choices']:
        #     if declared_args.has_key(it):
        #         del declared_args[it]
        setattr(func, 'res', RES)
        return func
예제 #22
0
from subprocess import Popen as sub_popen
from subprocess import PIPE as sub_PIPE
from os.path import isdir

folders = ['/Volumes/mb/Users/admin',
           '/Volumes/mbp1/Users/admin',
           '/Volumes/mbp2/Users/admin']

for it in folders:
    if isdir(it):
        cmd         =   ['cp -R ~/.alias_shared %s/'%it]
        proc        =   sub_popen(cmd, stdout=sub_PIPE, shell=True)
        (t, err)    =   proc.communicate()
예제 #23
0
def main(args,kwargs):
    if args.count('requests'):              import requests
    if args.count('urllib'):                from urllib import quote_plus,unquote

    import                                  datetime                as DT
    import                                  time
    delay                                   =   time.sleep
    from dateutil                           import parser           as DU               # e.g., DU.parse('some date as str') --> obj(datetime.datetime)
    from re                                 import findall          as re_findall
    from re                                 import sub              as re_sub           # re_sub('patt','repl','str','cnt')
    from re                                 import search           as re_search        # re_search('patt','str')
    import json
    from subprocess                         import Popen            as sub_popen
    from subprocess                         import PIPE             as sub_PIPE
    from traceback                          import format_exc       as tb_format_exc
    from sys                                import exc_info         as sys_exc_info
    from types                              import NoneType
    from uuid                               import uuid4            as get_guid
    from py_classes                         import To_Class,To_Class_Dict,To_Sub_Classes
    import                                  pandas                  as pd
    pd.set_option(                          'expand_frame_repr', False)
    pd.set_option(                          'display.max_columns', None)
    pd.set_option(                          'display.max_colwidth', 250)
    pd.set_option(                          'display.max_rows', 1000)
    pd.set_option(                          'display.width', 1500)
    pd.set_option(                          'display.colheader_justify','left')
    np                                      =   pd.np
    np.set_printoptions(                    linewidth=1500,threshold=np.nan)
    import logging
    logger = logging.getLogger(             'sqlalchemy.dialects.postgresql')
    logger.setLevel(logging.INFO)

    if args.count('pgsql'):              
        from sqlalchemy                     import create_engine
        from psycopg2                       import connect          as pg_connect
        try:
            eng                             =   create_engine(r'postgresql://%(DB_USER)s:%(DB_PW)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s'
                                                              % self.T.pgsql,
                                                              encoding='utf-8',
                                                              echo=False)
            conn                            =   pg_connect("dbname='%(DB_NAME)s' host='%(DB_HOST)s' port=%(DB_PORT)s \
                                                           user='******' password='******' "
                                                           % self.T.pgsql);
            cur                             =   conn.cursor()

        except:
            from getpass import getpass
            pw = getpass('Root password (to create DB:"%(DB_NAME)s" via CL): ' % self.T.pgsql)
            p = sub_popen(" ".join(["echo '%s' | sudo -S prompt='' " % pw,
                                    'su postgres -c "psql --cluster 9.4/main -c ',
                                    "'create database %(DB_NAME)s;'" % self.T.pgsql,
                                    '"']),
                          stdout=sub_PIPE,
                          shell=True)
            (_out, _err) = p.communicate()
            assert _err is None

    import inspect, os
    D                                       =   {'guid'                 :   str(get_guid().hex)[:7],
                                                 'pg_classes_pwd'       :   os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
                                                }
    D.update(                                   {'tmp_tbl'              :   'tmp_'+D['guid'],
                                                'current_filepath'     :   inspect.getfile(inspect.currentframe())})



    T                                       =   To_Class_Dict(  self,
                                                            dict_list=[D,locals()],
                                                            update_globals=True)    
예제 #24
0
def run_method(illustrate=True):
    """
        Every time log files are evaluated, keep a log file if:
            (1) the log file falls on a marked day as graphed above, or
            (2) there is no log file for a marked day "A" and
                a log file "F" is the closest in time
                between and including
                the day before X
                and
                the day after the next oldest marked day "B",
                i.e., B+1<= F <=A-1

        GOAL: find log closest to ILD
    """

    check_db_for_action()

    df = make_ideal_log_spectrum()

    # Drop non-log dates for now (and in practice, but will re-include for illustration)
    idx = df[df.log_dates.isnull()].index
    df = df.drop(idx, axis=0).reset_index(drop=True)

    # Conform Data Type
    df['log_dates'] = df.log_dates.map(lambda D: pd.to_datetime(D))

    ideal_log_dates = df.log_dates.tolist()

    # Get Logs
    p = sub_popen(['ls ~/.pg_dump'], stdout=sub_PIPE, shell=True)
    (_out, _err) = p.communicate()
    logs = _out.strip('\n').split('\n')

    log_dates = map(
        lambda X: dt.datetime.strptime(
            "%s/%s/%s" % re_findall(r'(\d{4})[_\.](\d{2})[_\.](\d{2})\.', X)[
                0], "%Y/%m/%d"), logs)
    log_dict = dict(zip(map(lambda D: pd.to_datetime(D), log_dates), logs))
    lf = pd.DataFrame(data={'logs': pd.unique(log_dates)})

    # Find Intersecting Values
    initial_matches = pd.Series(
        pd.np.intersect1d(df.log_dates.values, lf.logs.values))

    # (1) the log file falls on a marked day
    lf['keep'] = lf.logs.where(lf.logs.isin(initial_matches))
    df['paired'] = df.log_dates.where(df.log_dates.isin(initial_matches))

    # (2) What is left?
    #    A. Check by getting date bounds of unclaimed logs,
    #       then counting how many remaining ILDs are not yet paired with a log.
    #    B. Iterate these remaining ILDs to match up with log,
    #       then discard any unmatched logs.

    #   (A)
    to_check = lf[lf.keep.isnull()]
    oldest_log, latest_log = to_check.logs.min(), to_check.logs.max()

    older_dates, earlier_dates = df[df.log_dates < oldest_log], df[
        latest_log < df.log_dates]
    assert len(older_dates) + len(earlier_dates) >= 2

    next_older_date, prev_earlier_date = older_dates.iloc[
        0, :], earlier_dates.iloc[-1, :]
    idl_dates = df.ix[prev_earlier_date.name:next_older_date.name, :]

    #   (B)

    pt, last_idx = 0, idl_dates.index.tolist()[-1]
    for idx, row in idl_dates.iterrows():
        if idx == last_idx:
            break

        if pd.isnull(row.paired):
            A, B = row.log_dates, idl_dates.iloc[pt + 1, :].log_dates
            possible_logs = lf[(lf.logs < A) & (B < lf.logs)]
            if len(possible_logs):
                res = possible_logs.sort('logs',
                                         ascending=False).iloc[0, :].logs
                D = row.to_dict()
                D.update({'paired': res})
                df[df.index == idx].update(D.values())

        pt += 1

    # Find Intersecting Values b/t Paired IDLs and Remaining Logs
    final_matches = pd.Series(
        pd.np.intersect1d(idl_dates.paired.values, lf.logs.values))

    lf.keep.update(lf.logs.where(lf.logs.isin(final_matches)))

    if illustrate:

        # SHOW ME THE RESULTS: [ what did we want, what did we get, what did we do ]
        # Plot these "IDLs" as blue vertical bars
        # Then Overlay all logs in yellow
        # Then Overlay all logs to be deleted in red

        start, end = lf.logs.max(), lf.logs.min()
        one_day = dt.timedelta(days=+1)
        res = pd.DataFrame({
            'dates':
            [start - (i * one_day) for i in range((start - end).days)]
        })
        res['days'] = res.dates.map(lambda D: D.dayofyear)
        ndf = make_ideal_log_spectrum()
        ndf['log_dates'] = ndf.log_dates.map(lambda D: pd.to_datetime(D))
        all_log_dates = ndf.log_dates.tolist()
        res['IDLs'] = res.dates.map(lambda D: 0
                                    if not all_log_dates.count(D) else 3)
        logs_to_keep = lf[lf.keep.notnull()].logs.tolist()
        logs_to_delete = lf[lf.keep.isnull()].logs.tolist()
        res['Keep'] = res.dates.map(lambda D: 0
                                    if not logs_to_keep.count(D) else 2)
        res['Delete'] = res.dates.map(lambda D: 0
                                      if not logs_to_delete.count(D) else 1)

        # Make Plot
        import pylab as plt

        fig = plt.figure()
        axes = fig.add_subplot(1, 1, 1)

        res.plot(x='days',
                 y='IDLs',
                 ylim=(0, 6),
                 ax=axes,
                 kind='bar',
                 figsize=(107, 8),
                 color='b')
        res.plot(x='days',
                 y='Keep',
                 ylim=(0, 6),
                 ax=axes,
                 kind='bar',
                 figsize=(107, 8),
                 color='y')
        res.plot(x='days',
                 y='Delete',
                 ylim=(0, 6),
                 ax=axes,
                 kind='bar',
                 figsize=(107, 8),
                 color='r')
        axes.invert_xaxis()
        fig.savefig('/Volumes/mbp2/Users/admin/Desktop/plot.png')

        log_files = res[res.Delete == 1].dates.map(log_dict).tolist()

        cmd = """ insert into system_log (operation,started,parameters)
                    values ('rotate_pgdump',now(),'%s')
                """ % str(log_files).strip('[]').replace("'", '').replace(
            ' ', '')

        SYS_R.T.conn.set_isolation_level(0)
        SYS_R.T.cur.execute(cmd)

        SYS_R._growl(
            'check desktop for intended logrotate actions and update pgsql')
def run_method(illustrate=True):
    """
        Every time log files are evaluated, keep a log file if:
            (1) the log file falls on a marked day as graphed above, or
            (2) there is no log file for a marked day "A" and
                a log file "F" is the closest in time
                between and including
                the day before X
                and
                the day after the next oldest marked day "B",
                i.e., B+1<= F <=A-1

        GOAL: find log closest to ILD
    """

    check_db_for_action()

    df = make_ideal_log_spectrum()

    # Drop non-log dates for now (and in practice, but will re-include for illustration)
    idx = df[df.log_dates.isnull()].index
    df = df.drop(idx,axis=0).reset_index(drop=True)

    # Conform Data Type
    df['log_dates'] = df.log_dates.map(lambda D: pd.to_datetime(D))

    ideal_log_dates = df.log_dates.tolist()

    # Get Logs
    p                                       =   sub_popen(['ls ~/.pg_dump'],stdout=sub_PIPE,shell=True)
    (_out, _err)                            =   p.communicate()
    logs                                    =   _out.strip('\n').split('\n')

    log_dates = map(lambda X: dt.datetime.strptime("%s/%s/%s" %
                                       re_findall(r'(\d{4})[_\.](\d{2})[_\.](\d{2})\.',X)[0],
                                       "%Y/%m/%d"),logs)
    log_dict                                =   dict(zip(map(lambda D: pd.to_datetime(D),log_dates),logs))
    lf = pd.DataFrame(data={'logs':pd.unique(log_dates)})

    # Find Intersecting Values
    initial_matches = pd.Series(pd.np.intersect1d(df.log_dates.values,lf.logs.values))

    # (1) the log file falls on a marked day
    lf['keep'] = lf.logs.where(lf.logs.isin(initial_matches))
    df['paired'] = df.log_dates.where(df.log_dates.isin(initial_matches))

    # (2) What is left?
    #    A. Check by getting date bounds of unclaimed logs,
    #       then counting how many remaining ILDs are not yet paired with a log.
    #    B. Iterate these remaining ILDs to match up with log,
    #       then discard any unmatched logs.


    #   (A)
    to_check = lf[lf.keep.isnull()]
    oldest_log,latest_log = to_check.logs.min(),to_check.logs.max()

    older_dates,earlier_dates = df[df.log_dates<oldest_log],df[latest_log<df.log_dates]
    assert len(older_dates)+len(earlier_dates)>=2

    next_older_date,prev_earlier_date = older_dates.iloc[0,:],earlier_dates.iloc[-1,:]
    idl_dates = df.ix[prev_earlier_date.name:next_older_date.name,:]

    #   (B)

    pt,last_idx = 0,idl_dates.index.tolist()[-1]
    for idx,row in idl_dates.iterrows():
        if idx==last_idx:
            break

        if pd.isnull(row.paired):
            A,B=row.log_dates,idl_dates.iloc[pt+1,:].log_dates
            possible_logs = lf[(lf.logs<A)&(B<lf.logs)]
            if len(possible_logs):
                res = possible_logs.sort('logs',ascending=False).iloc[0,:].logs
                D=row.to_dict()
                D.update({'paired':res})
                df[df.index==idx].update(D.values())

        pt+=1

    # Find Intersecting Values b/t Paired IDLs and Remaining Logs
    final_matches = pd.Series(pd.np.intersect1d(idl_dates.paired.values,lf.logs.values))

    lf.keep.update(lf.logs.where(lf.logs.isin(final_matches)))

    if illustrate:

        # SHOW ME THE RESULTS: [ what did we want, what did we get, what did we do ]
        # Plot these "IDLs" as blue vertical bars
        # Then Overlay all logs in yellow
        # Then Overlay all logs to be deleted in red

        start,end=lf.logs.max(),lf.logs.min()
        one_day = dt.timedelta(days=+1)
        res = pd.DataFrame({'dates':[start-(i*one_day) for i in range( (start-end).days )] })
        res['days'] = res.dates.map(lambda D: D.dayofyear)
        ndf = make_ideal_log_spectrum()
        ndf['log_dates'] = ndf.log_dates.map(lambda D: pd.to_datetime(D))
        all_log_dates = ndf.log_dates.tolist()
        res['IDLs'] = res.dates.map(lambda D: 0 if not all_log_dates.count(D) else 3)
        logs_to_keep = lf[lf.keep.notnull()].logs.tolist()
        logs_to_delete = lf[lf.keep.isnull()].logs.tolist()
        res['Keep'] = res.dates.map(lambda D: 0 if not logs_to_keep.count(D) else 2)
        res['Delete'] = res.dates.map(lambda D: 0 if not logs_to_delete.count(D) else 1)

        # Make Plot
        import pylab as plt

        fig = plt.figure()
        axes = fig.add_subplot(1,1,1)

        res.plot(x='days',y='IDLs',ylim=(0,6),ax=axes,kind='bar',figsize=(107,8),color='b')
        res.plot(x='days',y='Keep',ylim=(0,6),ax=axes,kind='bar',figsize=(107,8),color='y')
        res.plot(x='days',y='Delete',ylim=(0,6),ax=axes,kind='bar',figsize=(107,8),color='r')
        axes.invert_xaxis()
        fig.savefig('/Volumes/mbp2/Users/admin/Desktop/plot.png')

        log_files                           =   res[res.Delete==1].dates.map(log_dict).tolist()

        cmd =   """ insert into system_log (operation,started,parameters)
                    values ('rotate_pgdump',now(),'%s')
                """ % str(log_files).strip('[]').replace("'",'').replace(' ','')

        SYS_R.T.conn.set_isolation_level(               0)
        SYS_R.T.cur.execute(                            cmd)

        SYS_R._growl('check desktop for intended logrotate actions and update pgsql')
예제 #26
0
def run_cmd(cmd):
    p = sub_popen(cmd,stdout=sub_PIPE,shell=True,executable='/bin/zsh')
    (_out,_err) = p.communicate()
    assert _err is None
    return _out.rstrip('\n')
def run_simulation(client_socket=None,run_cfgs=''):
    global end_test

    def signal_handler(signal,frame):
        global end_test
        print                                   '\nSimulation Ending...\n'
        end_test                            =   True

    SIGNAL.signal(SIGNAL.SIGINT,signal_handler)

    if not run_cfgs:
        run_cfgs                            =   get_configs(client_socket=None,print_cfgs=False)
    run_dir                                 =   os.environ['PWD'] + '/runtime'
    cmds                                    =   [
                                                'which logrotate;',
                                                'env ps -ef -o tty,comm= | grep test_logrotate | column -t | cut -d \' \' -f1;',
                                                'mkdir -p %s;' % run_dir,
                                                ]

    p                                       =   sub_popen(' '.join(cmds),stdout=sub_PIPE,shell=True)
    (_out, _err)                            =   p.communicate()
    assert _out
    assert _err                            is   None

    _out                                    =   _out.split('\n')
    T                                       =   {'_dir'             :   os.environ['PWD'] + '/mock_log_dir',
                                                 'f_path'           :   os.environ['PWD'] + '/mock_log_dir/transactions.log',
                                                 'lr_path'          :   _out[0],
                                                 'tty'              :   '/dev/%s' % _out[1],
                                                 'lr_cfg'           :   '%s/log_r_config' % run_dir,
                                                 'lr_status'        :   '%s/log_r_status' % run_dir,
                                                 'cfg_insert'       :   ''.join(['\t%s\n' % it for it in run_cfgs]),
                                                 }

    runtime_cfg                             =   [
                                                '"%(_dir)s/*.log" {' % T,
                                                T['cfg_insert'],
                                                '}'
                                                ]

    with open(T['lr_cfg'],'w') as f:
        f.write(                                '\n'.join(runtime_cfg))

    logrotate_prep="""
        #!/bin/bash

        mkdir -p %(_dir)s
        rm -f %(_dir)s/*
        :> %(f_path)s

        """ % T

    ## Setup Simulation
    args                                    =   shlex.split('bash -lc \'%s\'' % logrotate_prep)
    lp                                      =   sub_popen(args)
    print                                       'Press ctrl+c to end simulation\n'
    time.sleep(                                 1)

    list_dir_cmd                            =   shlex.split('bash -lc "ls -lAtr %(_dir)s | tail -n +2"' % T)
    lr_cmd                                  =   shlex.split('bash -lc " %(lr_path)s --state %(lr_status)s %(lr_cfg)s"' % T)

    while not end_test:
        with open(T['f_path'],"a+") as f:
            f.write(                            "TEST\n")
        p                                   =   sub_popen(list_dir_cmd,stdout=sub_PIPE)
        (_out,_err)                         =   p.communicate()
        print                                   '\n',_out.strip('\n'),'\n'
        p                                   =   sub_popen(lr_cmd,stdout=sub_PIPE)
        (_out,_err)                         =   p.communicate()

        time.sleep(                             1)

        if end_test:                            break

    end_test                                =   False
    return
예제 #28
0
def run_simulation(client_socket=None, run_cfgs=''):
    global end_test

    def signal_handler(signal, frame):
        global end_test
        print '\nSimulation Ending...\n'
        end_test = True

    SIGNAL.signal(SIGNAL.SIGINT, signal_handler)

    if not run_cfgs:
        run_cfgs = get_configs(client_socket=None, print_cfgs=False)
    run_dir = os.environ['PWD'] + '/runtime'
    cmds = [
        'which logrotate;',
        'env ps -ef -o tty,comm= | grep test_logrotate | column -t | cut -d \' \' -f1;',
        'mkdir -p %s;' % run_dir,
    ]

    p = sub_popen(' '.join(cmds), stdout=sub_PIPE, shell=True)
    (_out, _err) = p.communicate()
    assert _out
    assert _err is None

    _out = _out.split('\n')
    T = {
        '_dir': os.environ['PWD'] + '/mock_log_dir',
        'f_path': os.environ['PWD'] + '/mock_log_dir/transactions.log',
        'lr_path': _out[0],
        'tty': '/dev/%s' % _out[1],
        'lr_cfg': '%s/log_r_config' % run_dir,
        'lr_status': '%s/log_r_status' % run_dir,
        'cfg_insert': ''.join(['\t%s\n' % it for it in run_cfgs]),
    }

    runtime_cfg = ['"%(_dir)s/*.log" {' % T, T['cfg_insert'], '}']

    with open(T['lr_cfg'], 'w') as f:
        f.write('\n'.join(runtime_cfg))

    logrotate_prep = """
        #!/bin/bash

        mkdir -p %(_dir)s
        rm -f %(_dir)s/*
        :> %(f_path)s

        """ % T

    ## Setup Simulation
    args = shlex.split('bash -lc \'%s\'' % logrotate_prep)
    lp = sub_popen(args)
    print 'Press ctrl+c to end simulation\n'
    time.sleep(1)

    list_dir_cmd = shlex.split('bash -lc "ls -lAtr %(_dir)s | tail -n +2"' % T)
    lr_cmd = shlex.split(
        'bash -lc " %(lr_path)s --state %(lr_status)s %(lr_cfg)s"' % T)

    while not end_test:
        with open(T['f_path'], "a+") as f:
            f.write("TEST\n")
        p = sub_popen(list_dir_cmd, stdout=sub_PIPE)
        (_out, _err) = p.communicate()
        print '\n', _out.strip('\n'), '\n'
        p = sub_popen(lr_cmd, stdout=sub_PIPE)
        (_out, _err) = p.communicate()

        time.sleep(1)

        if end_test: break

    end_test = False
    return
예제 #29
0
    def __init__(self,**kwargs):
        """

            pgSQL(db_settings=[DB_NAME, DB_USER, DB_PW, DB_HOST, DB_PORT])

        """

        def run_cmd(cmd):
            p = sub_popen(cmd,stdout=sub_PIPE,
                          shell=True,
                          executable='/bin/bash')
            (_out,_err) = p.communicate()
            assert _err is None
            return _out.rstrip('\n')

        def download_file(url,save_path):
            import os
            _dir = save_path[:save_path.rfind('/')]
            if not os.path.exists(_dir):
                os.makedirs(_dir)

            with open(save_path, 'wb') as handle:
                response = self.T.requests.get( url, stream=True)

                if not response.ok:
                    # Something went wrong
                    print 'error'

                for block in response.iter_content(1024):
                    if not block:
                        break

                    handle.write(block)
                    handle.flush()
            return True

        def read_json_from_url_response(url):
            r = self.T.requests.get(url)
            assert r.status_code=='200'
            # print r.text
            g = r.text
            g = g.replace('true',"'true'")
            a = eval(g)
            return a

        def to_sql(cmd):
            self.T.conn.set_isolation_level(    0)
            self.T.cur.execute(                 cmd)

        def redirect_logs_to_file(file_desc='/dev/pts/0',msg_form="%(asctime)s - %(levelname)s - %(message)s"):
            # print T.logger.__dict__
            # print T.logger.manager.__dict__

            # for it in dir(logger):
            #     print it,getattr(logger,it)

            for it in self.T.logger.handlers:
                self.T.logger.removeHandler(it)

            for it in self.T.logger.parent.handlers:
                self.T.logger.parent.removeHandler(it)

            for it in self.T.logger.root.handlers:
                self.T.logger.root.removeHandler(it)

            # print logger.manager.__dict__
            del_these                       =   ['IPKernelApp','basic_logger']
            for it in del_these:
                if self.T.logger.manager.__dict__['loggerDict'].has_key(it):
                    del self.T.logger.manager.__dict__['loggerDict'][it]

            for k in self.T.logger.manager.__dict__['loggerDict'].keys():
                if k.count('sqlalchemy') or k.count('pandas'):
                    del self.T.logger.manager.__dict__['loggerDict'][k]

            self.T.logging.basicConfig(filename=file_desc, level=self.T.logging.DEBUG, format=msg_form)
            return

        def custom_geoseries_plot(s,figsize=(8,8)):
            # s=T.gd.GeoSeries(A)
            colormap='Set1'
            axes=None
            linewidth=1.0

            import matplotlib.pyplot as plt
            if axes is None:
                fig, ax = plt.subplots(figsize=figsize)
                ax.set_aspect('equal')
            else:
                ax = axes
            ax.get_xaxis().get_major_formatter().set_scientific(False)
            ax.get_xaxis().get_major_formatter().set_useOffset(False)
            plt.xticks(rotation='vertical')
            ax.get_yaxis().get_major_formatter().set_scientific(False)
            ax.get_yaxis().get_major_formatter().set_useOffset(False)
            color = T.gd.plotting.gencolor(len(s), colormap=colormap)
            for geom in s:
                if geom.type == 'Polygon' or geom.type == 'MultiPolygon':
                    T.gd.plotting.plot_multipolygon(ax, geom, facecolor=next(color), linewidth=linewidth)
                elif geom.type == 'LineString' or geom.type == 'MultiLineString':
                    T.gd.plotting.plot_multilinestring(ax, geom, color=next(color), linewidth=linewidth)
                elif geom.type == 'Point':
                    T.gd.plotting.plot_point(ax, geom)
            plt.ticklabel_format(style='plain')
            plt.grid()
            plt.draw()

        def _load_connectors():
            eng                             =   create_engine(r'postgresql://%(DB_USER)s:%(DB_PW)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s'
                                                              % T,
                                                              encoding='utf-8',
                                                              echo=False)
            conn                            =   pg_connect("dbname='%(DB_NAME)s' host='%(DB_HOST)s' port=%(DB_PORT)s \
                                                           user='******' password='******' "
                                                           % T);
            cur                             =   conn.cursor()
            return eng,conn,cur

        import                                  datetime                as DT
        dt = DT
        from dateutil                           import parser           as DU               # e.g., DU.parse('some date as str') --> obj(datetime.datetime)
        import                                  time
        delay                               =   time.sleep
        from urllib                             import quote_plus,unquote
        import re
        from re                                 import findall          as re_findall
        from re                                 import sub              as re_sub           # re_sub('patt','repl','str','cnt')
        from re                                 import search           as re_search        # re_search('patt','str')
        import json
        from subprocess                         import Popen            as sub_popen
        from subprocess                         import PIPE             as sub_PIPE
        from traceback                          import format_exc       as tb_format_exc
        from sys                                import exc_info         as sys_exc_info
        from types                              import NoneType
        from time                               import sleep            as delay
        from uuid                               import uuid4            as get_guid
        import                                  requests

        from py_classes.py_classes              import To_Sub_Classes,To_Class,To_Class_Dict
        T                                   =   To_Class()
        T.config                            =   To_Class(kwargs,recursive=True)
        if hasattr(T,'config') and hasattr(T.config,'pgsql'): 
            T.update(                           T.config.pgsql.__dict__)
        else:
            T.update(                           T.config.__dict__)
        
        db_vars = ['DB_NAME','DB_HOST','DB_PORT','DB_USER','DB_PW']
        db_vars = [it for it in db_vars if not T._has_key(it)]
        
        if not db_vars:
            pass

        elif locals().keys().count('db_settings'):
            DB_NAME,DB_USER,DB_PW,DB_HOST,DB_PORT = db_settings
            for it in db_vars:
                eval('T["%s"] = %s' % (it,it))
            
        else:
            z = eval("__import__('db_settings')")
            for it in db_vars:
                T[it] = getattr(z,it)
        
        import                                  pandas                  as pd
        pd.set_option(                          'expand_frame_repr', False)
        pd.set_option(                          'display.max_columns', None)
        pd.set_option(                          'display.max_colwidth', 250)
        pd.set_option(                          'display.max_rows', 1000)
        pd.set_option(                          'display.width', 1500)
        pd.set_option(                          'display.colheader_justify','left')
        np                                  =   pd.np
        np.set_printoptions(                    linewidth=1500,threshold=np.nan)
        # import                                  geopandas               as gd
        import logging
        logger = logging.getLogger(             'sqlalchemy.dialects.postgresql')
        logger.setLevel(logging.INFO)
        from sqlalchemy                         import create_engine
        from psycopg2                           import connect          as pg_connect
        try:
            eng,conn,cur                    =   _load_connectors()

        except:
            from getpass import getpass
            pw = getpass('Root password (to create DB:"%(DB_NAME)s" via CL): ' % pgsql)
            p = sub_popen(" ".join(["echo '%s' | sudo -S prompt='' " % pw,
                                    'su postgres -c "psql --cluster 9.4/main -c ',
                                    "'create database %(DB_NAME)s;'" % T,
                                    '"']),
                          stdout=sub_PIPE,
                          shell=True)
            (_out, _err) = p.communicate()
            assert _err is None
            eng,conn,cur                    =   _load_connectors()


        import inspect, os
        D                                   =   {'guid'                 :   str(get_guid().hex)[:7],
                                                 'pg_classes_pwd'       :   os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
                                                }
        D.update(                               {'tmp_tbl'              :   'tmp_'+D['guid'],
                                                 'current_filepath'     :   inspect.getfile(inspect.currentframe())})

        self.T                              =   To_Class_Dict(  self,
                                                                dict_list=[T.__dict__,D,locals()],
                                                                update_globals=True)

        self.Functions                      =   pgSQL_Functions(self)
        self.Triggers                       =   pgSQL_Triggers(self)
        self.Tables                         =   pgSQL_Tables(self)
        self.Databases                      =   pgSQL_Databases(self)
        self.Types                          =   pgSQL_Types(self)

        # if hasattr(T,'project_sql_files') and T.project_sql_files:
        #     self.F.functions_create_from_command_line(one_directory=T.project_sql_files)
        # if hasattr(T,'base_sql_files') and T.base_sql_files:
        #     self.F.functions_create_from_command_line(one_directory=T.base_sql_files)  
        if hasattr(T,'initial_check') and T.initial_check:
            self.__initial_check__()
        if hasattr(T,'temp_options') and T.temp_options:
            self.__temp_options__()