Ejemplo n.º 1
0
def register_pid(pid_file,
                 pid=None,
                 kill_if_running=False,
                 clean_atexit=True,
                 exit_queue=None):
    "Register PID and verify already running state"
    from .diskio import write_file
    pid = pid if pid else os.getpid()
    cleanup_pid(pid_file, kill_if_running)
    write_file(pid_file, str(pid))
    signal_exit = lambda signum, frame: exit_queue.put(True)
    if clean_atexit:
        if exit_queue:
            # signal.signal(signal.SIGINT, clean_up_func)
            signal.signal(signal.SIGTERM, signal_exit)
        atexit.register(cleanup_pid, pid_file, kill_if_running=True)
Ejemplo n.º 2
0
def profile_request(sid, data, *args, **kwargs):
    if sid not in valid_SIDs: return {'error': 'Invalid SID'}

    data2 = dict(completed=False)
    try:
        if data['type'] == 'load':
            data2['text'] = load_profile(raw_text=True)
            data2['completed'] = True
        elif data['type'] == 'save':
            yaml.load(data['text'])  # validate
            path = os.getenv('PROFILE_YAML')
            write_file(path, data['text'], echo=True)
            data2['completed'] = True
    except Exception as E:
        log(E)
        data2['error'] = get_error_str(E)
    return data2
Ejemplo n.º 3
0
def load_profile(raw_text=False, create_if_missing=False, def_profl_path=None):
    if not os.getenv('PROFILE_YAML'):
        def_profl_path = def_profl_path if def_profl_path else get_home_path(
        ) + '/profile.yaml'
        templ_path = get_dir_path(
            __file__) + '/database/templates/profile.def.yaml'
        if not file_exists(def_profl_path) and create_if_missing:
            write_file(def_profl_path, read_file(templ_path))
        os.environ['PROFILE_YAML'] = def_profl_path
        # raise Exception("Env Var PROFILE_YAML is not set!")

    if raw_text:
        return read_file(os.getenv('PROFILE_YAML'))

    dict_ = read_yaml(os.getenv('PROFILE_YAML'))
    if 'environment' in dict_:
        for key in dict_['environment']:
            os.environ[key] = dict_['environment'][key]

    return dict_
Ejemplo n.º 4
0
def alias_cli():
    "Install alias"
    from xutil.helpers import get_home_path, get_dir_path, get_script_path
    from xutil.diskio import read_file, write_file
    from shutil import copyfile
    ans = input("Install 'alias.sh' in home directory (Y to proceed)? ")
    if ans.lower() != 'y':
        return

    src_path = get_dir_path() + '/alias.sh'
    dst_path = get_home_path() + '/.xutil.alias.sh'
    bash_profile_path = get_home_path() + '/.bashrc'

    # log('src_path -> ' + src_path)
    # log('dst_path -> ' + dst_path)
    copyfile(src_path, dst_path)

    bash_prof_text = read_file(bash_profile_path)

    if not dst_path in bash_prof_text:
        bash_prof_text = '{}\n\n. {}\n'.format(bash_prof_text, dst_path)
        write_file(bash_profile_path, bash_prof_text)
        log('+Updated ' + bash_profile_path)
Ejemplo n.º 5
0
def set_dbquery_state(**kws):
    """Save the DB Query state.

  Args:
    kws: the record fields as keyword arguments.
  """
    dbquery_data = kws['data']
    db_name = dbquery_data['db_name']
    meta_last_updated = sqlx('databases').select_one(fwa(db_name=db_name),
                                                     field='meta_last_updated')
    sqlx('databases').replace_rec(
        db_name=db_name,
        state_json=jdumps(dbquery_data),
        meta_last_updated=meta_last_updated,
    )

    # Save all sessions
    for sess_name in dbquery_data['sessions']:
        json_fpath = '{}/{}.{}.json'.format(SESS_FOLDER, db_name, sess_name)
        sql_fpath = '{}/{}.{}.sql'.format(SESS_FOLDER, db_name, sess_name)

        write_file(
            json_fpath,
            jdumps(dbquery_data['sessions'][sess_name]),
            echo=True,
        )
        write_file(
            sql_fpath,
            dbquery_data['sessions'][sess_name]['editor_text'],
            echo=True,
        )

    sess_name = dbquery_data['_session']['name']
    json_fpath = '{}/{}.{}.json'.format(SESS_FOLDER, db_name, sess_name)
    sql_fpath = '{}/{}.{}.sql'.format(SESS_FOLDER, db_name, sess_name)
    return dict(sql_fpath=sql_fpath, json_fpath=json_fpath)
Ejemplo n.º 6
0
    def start_sql(sql, id, limit, options, sid):
        rows = fields = []
        get_fields = lambda r: r.__fields__ if hasattr(r, '__fields__'
                                                       ) else r._fields
        s_t = epoch()
        cache_used = False
        limit = int(options['limit']) if 'limit' in options else limit

        try:

            def exec_sql(sql, limit_def=5000):
                log('\n------------SQL-START------------\n{}\n------------SQL-END------------ \n'
                    .format(sql),
                    color='blue')
                log('LIMIT: ' + str(limit), color='blue')
                cache_used = False
                if sql in worker_sql_cache:
                    for fields, rows in list(worker_sql_cache[sql]['results']):
                        # if limit above limit_def, then refresh
                        if limit > limit_def: break

                        # if limit is same and not a csv call, then refresh
                        if limit == worker_sql_cache[sql][
                                'limit'] and 'csv' not in options:
                            break

                        # if ran more than 10 minutes ago, then refresh
                        if now_minus(minutes=10
                                     ) > worker_sql_cache[sql]['timestamp']:
                            del worker_sql_cache[sql]
                            break

                        if len(fields) > 0:
                            cache_used = True  # must return data/fields
                            worker_sql_cache[sql]['limit'] = limit
                            log('+Cache Used')

                        yield fields, rows, cache_used

                if not cache_used:
                    worker_sql_cache[sql] = dict(timestamp=now(),
                                                 results=[],
                                                 limit=limit)
                    rows = conn.query(
                        sql.replace('%', '%%'),
                        dtype='tuple',
                        limit=limit if limit > limit_def else limit_def)
                    fields = conn._fields
                    worker_sql_cache[sql]['results'].append((fields, rows))
                    yield fields, rows, cache_used

            if 'meta' in options:
                # get_schemas or
                meta_func = options['meta']
                rows = getattr(conn, meta_func)(**options['kwargs'])
                rows = [tuple(r) for r in rows]
                fields = conn._fields

            elif 'special' in options:
                pass

            else:
                for fields, rows, cache_used in exec_sql(sql):
                    fields, rows = fields, rows
                    rows = rows[:limit] if len(rows) > limit else rows

            if rows == None: rows = []

            if 'email_address' in options or 'csv' in options:
                file_name = '{}-{}-{}.csv'.format(database, options['name'],
                                                  data_dict['id'])
                file_path = '{}/{}'.format(CSV_FOLDER, file_name)
                write_csv(file_path, fields, rows)
                if os.path.getsize(file_path) > 20 * (1024**2):
                    rc = os.system('gzip -f ' + file_path)
                    file_name = file_name + '.gz' if rc == 0 else file_name
                    file_path = '{}/{}'.format(CSV_FOLDER, file_name)

                url = 'http://{base_url}:{port}/csv/{name}'.format(
                    base_url=socket.gethostname(),
                    port=WEBAPP_PORT,
                    name=file_name,
                )
                options['url'] = url

            if 'email_address' in options:
                subj = 'DbNet -- Result for Query {}'.format(data_dict['id'])
                body_text = 'URL: {url}\n\nROWS: {rows}\n\nSQL:\n{sql}'.format(
                    url=url, rows=len(rows), sql=sql)
                to_address = options['email_address']
                email_template = os.getenv("SMTP_TEMPLATE")
                if 'exchange_server' == email_template:
                    email_func = send_email_exchange
                elif 'outlook' == email_template:
                    email_func = send_from_outlook
                elif 'gmail' == email_template:
                    email_func = send_from_gmail
                else:
                    raise Exception('Email method not implemented!')

                email_func(to_address, subj, body_text)

                if len(rows) > 100:
                    rows = rows[:100]

            e_t = epoch()
            secs = e_t - s_t

            # Add query
            store.sqlx('queries').add(
                task_id=data_dict['id'],
                database=database,
                sql_text=sql,
                exec_date=s_t,
                duration_sec=secs,
                row_count=len(rows),
                limit_val=limit,
                cached=cache_used,
                sql_md5=hashlib.md5(sql.encode('utf-8')).hexdigest(),
                last_updated=epoch(),
            )

            if sql.strip():
                sql_fpath = '{}/{}.{}.sql'.format(SQL_FOLDER, database,
                                                  data_dict['id'])
                sql_text = '-- Completed @ {} in {} seconds.\n\n{}'.format(
                    now_str(), secs, sql)
                write_file(sql_fpath, sql_text)

            # time.sleep(0.5)
            data = dict(
                id=data_dict['id'],
                payload_type='query-data',
                database=database,
                rows=rows,
                headers=fields,
                start_ts=s_t,
                end_ts=e_t,
                execute_time=round(secs, 2),
                completed=True,
                cache_used=cache_used,
                options=options,
                pid=worker_pid,
                orig_req=data_dict,
                sid=sid,
            )

        except Exception as E:
            secs = epoch() - s_t
            err_msg_long = get_exception_message()
            err_msg = get_error_str(E)

            worker.log(E)
            data = dict(id=id,
                        payload_type='query-data',
                        database=database,
                        rows=[],
                        headers=[],
                        execute_time=round(secs, 2),
                        completed=False,
                        error='ERROR:\n' + err_msg,
                        options=options,
                        pid=worker_pid,
                        orig_req=data_dict,
                        sid=sid)

        finally:
            # worker.pipe.send_to_parent(data)
            worker.put_parent_q(data)
Ejemplo n.º 7
0
from xutil.helpers import jdumps, jtrans, log, get_error_str, get_script_path, get_dir_path, get_home_path, load_profile, file_exists
from xutil.diskio import read_file, write_file, read_csv
from dbnet.store import store_func
from flask import render_template
import yaml, apprise

DBNET_FOLDER = os.getenv('DBNET_FOLDER', default=get_home_path() + '/dbnet')
CSV_FOLDER = DBNET_FOLDER + '/csv'
AUTH_PATH = DBNET_FOLDER + '/.authorized'
app = WebApp('dbnet', root_path=get_dir_path(__file__))
SID = None
last_perf_data = {}

get_authorized = lambda: read_file(AUTH_PATH).splitlines() if file_exists(
    AUTH_PATH) else []
add_authorized = lambda new_id: write_file(
    AUTH_PATH, new_id + '\n', append=True)
app_password = os.getenv('DBNET_PASSWD', default=None)
app_token = ''.join(
    random.SystemRandom().choice(string.ascii_uppercase + string.digits +
                                 string.ascii_lowercase) for _ in range(16))
valid_SIDs = set()
cookie_to_sid = {}
sid_to_sid = {}


@app.route('/logo.ico')
def favicon():
    return app.send_from_directory(os.path.join(app.flask_app.root_path,
                                                'templates'),
                                   'logo.ico',
                                   mimetype='image/vnd.microsoft.icon')