Exemple #1
0
 def hsetj(self, key, obj, hkey=None, pipe=False):
     "Set object as JSON string for key of hash"
     hkey = hkey if hkey else self.hkey
     if pipe:
         return self.pipe.hset(hkey, key, jdumps(obj))
     else:
         return self.conn.hset(hkey, key, jdumps(obj))
Exemple #2
0
def store_request(sid, data, *args, **kwargs):
    """
  Operation on Store. Returns Data as needed
  """
    if sid not in valid_SIDs: return {'error': 'Invalid SID'}

    data['sid'] = sid
    _data = copy.deepcopy(data)
    if _data['store_func'] == 'set_dbquery_state':
        _data['kwargs'] = '{} bytes'.format(len(jdumps(_data['kwargs'])))
    app.log('+Got Store Req => {}'.format(_data))
    try:
        data2 = dict(
            payload=store_func[data['store_func']](**data['kwargs']),
            completed=True,
        )
    except Exception as err:
        app.log(err)
        data2 = dict(
            payload={},
            completed=False,
            error=get_error_str(err),
            orig_req=data,
        )
    _data2 = copy.deepcopy(data2)
    _data2['payload'] = '{} bytes'.format(len(jdumps(_data2['payload'])))
    app.log('-Resp Data => {}'.format(_data2))
    return data2
Exemple #3
0
def start_worker_mon():
    """Starts the Monitoring worker"""
    worker_name = '{}-mon'.format(WORKER_PREFIX)
    worker = Worker(worker_name,
                    'monitor',
                    fn=mon_worker.run,
                    kwargs={},
                    log=log,
                    kill_if_running=True,
                    pid_folder=DBNET_FOLDER)

    worker.start()
    log('Monitor Loop PID is {}'.format(worker.pid))

    workers['mon'] = worker
    workers['mon'].put_child_q(dict(name=worker_name,
                                    pid=worker.pid))  # add to monitor
    store.sqlx('workers').replace_rec(
        hostname=worker.hostname,
        worker_name=worker.name,
        worker_type=worker.type,
        worker_pid=worker.pid,
        status='RUNNING',
        task_id=-1,
        task_function=worker.fn.__name__,
        task_start_date=now(),
        task_args=jdumps(worker.args),
        task_kwargs=jdumps(worker.kwargs),
        progress=None,
        queue_length=0,
        last_updated=epoch(),
    )
    return worker
Exemple #4
0
def start_worker_webapp():
    """Starts the WebApp worker"""
    worker_name = '{}-webapp'.format(WORKER_PREFIX)

    worker = Worker(worker_name,
                    'web-app',
                    fn=webapp_worker.run,
                    log=log,
                    kill_if_running=True,
                    args=(WEBAPP_HOST, WEBAPP_PORT),
                    kwargs={'mon_worker': workers['mon']},
                    pid_folder=DBNET_FOLDER)
    worker.start()
    workers['mon'].put_child_q(dict(name=worker_name,
                                    pid=worker.pid))  # add to monitor
    workers['webapp'] = worker
    store.sqlx('workers').replace_rec(
        hostname=worker.hostname,
        worker_name=worker.name,
        worker_type=worker.type,
        worker_pid=worker.pid,
        status='RUNNING',
        task_id=-1,
        task_function=worker.fn.__name__,
        task_start_date=now(),
        task_args=jdumps(worker.args),
        task_kwargs=jdumps(worker.kwargs),
        progress=None,
        queue_length=0,
        last_updated=epoch(),
    )
    return worker
Exemple #5
0
def send_to_webapp(data, host='localhost', port=WEBAPP_PORT):
    """Send data to Web App
  
  Args:
    data: the payload data
    host: the webapp host (default to localhost)
    port: the port of the webapp  
  """
    payload_type = data['payload_type']
    headers = {'Content-type': 'application/json'}
    scheme = 'https' if os.getenv('SECURE_SSL_REDIRECT',
                                  default=False) else 'http'
    url = '{}://{}:{}/api/{}'.format(scheme, host, port, payload_type)
    requests.post(url, data=jdumps(data), headers=headers, verify=False)
Exemple #6
0
def set_dbquery_state(**kws):
    """Save the DB Query state.

  Args:
    kws: the record fields as keyword arguments.
  """
    dbquery_data = kws['data']
    db_name = dbquery_data['db_name']
    meta_last_updated = sqlx('databases').select_one(fwa(db_name=db_name),
                                                     field='meta_last_updated')
    sqlx('databases').replace_rec(
        db_name=db_name,
        state_json=jdumps(dbquery_data),
        meta_last_updated=meta_last_updated,
    )

    # Save all sessions
    for sess_name in dbquery_data['sessions']:
        json_fpath = '{}/{}.{}.json'.format(SESS_FOLDER, db_name, sess_name)
        sql_fpath = '{}/{}.{}.sql'.format(SESS_FOLDER, db_name, sess_name)

        write_file(
            json_fpath,
            jdumps(dbquery_data['sessions'][sess_name]),
            echo=True,
        )
        write_file(
            sql_fpath,
            dbquery_data['sessions'][sess_name]['editor_text'],
            echo=True,
        )

    sess_name = dbquery_data['_session']['name']
    json_fpath = '{}/{}.{}.json'.format(SESS_FOLDER, db_name, sess_name)
    sql_fpath = '{}/{}.{}.sql'.format(SESS_FOLDER, db_name, sess_name)
    return dict(sql_fpath=sql_fpath, json_fpath=json_fpath)
Exemple #7
0
 def publish(self, channel, payload):
     "Publish a message to a channel"
     payload = jdumps(payload)
     self.conn.publish(channel, payload)
Exemple #8
0
 def setj(self, key, obj, pipe=False):
     "Set object as JSON string for key"
     if pipe:
         return self.pipe.set(key, jdumps(obj))
     else:
         return self.conn.set(key, jdumps(obj))
Exemple #9
0
 def stop_listener(self, channel):
     log('Stopping listener on channel: ' + channel)
     self.conn.publish(channel, jdumps('exit'))
Exemple #10
0
 def lsetj(self, name, index, obj):
     "LSET index of List"
     return self.conn.lset(name, index, jdumps(obj))
Exemple #11
0
def run(db_prof, conf_queue: Queue, worker: Worker):
    """Launch the database worker and await requests.
  
  Args:
    db_prof: the db profile
    conf_queue: a multiprocessing Queue
    worker: the respective worker.
  """

    global worker_name, worker_status
    log = worker.log
    worker_name = worker.name
    worker_status = 'IDLE'
    set_worker_idle()
    worker_db_prof = db_prof

    while True:
        try:
            time.sleep(0.05)  # brings down CPU loop usage
        except (KeyboardInterrupt, SystemExit):
            return
        # data_dict = worker.pipe.recv_from_parent(timeout=0)
        data_dict = worker.get_child_q()
        if data_dict:
            conf_data = {'payload_type': 'confirmation'}
            if data_dict['req_type'] in func_map:
                worker_queue.append(data_dict)
                sync_queue()
                conf_data['queued'] = True

                # Add task
                store.sqlx('tasks').add(
                    task_id=data_dict['id'],
                    function=func_map[data_dict['req_type']].__name__,
                    queue_date=now(),
                    start_date=None,
                    end_date=None,
                    args=jdumps([]),
                    kwargs=jdumps(data_dict),
                    error=None,
                    worker_name=worker_name,
                    worker_pid=worker_pid,
                    last_updated=epoch(),
                )

                log('+({}) Queued task: {}'.format(len(worker_queue),
                                                   data_dict))

            # Send receipt confirmation?
            # with worker.lock:
            #   worker.pipe.send_to_parent(conf_data)

        if len(worker_queue) and worker_status == 'IDLE':
            data_dict = worker_queue.popleft()
            sync_queue()
            worker_status = 'BUSY'
            func = func_map[data_dict['req_type']]

            # Sync worker
            store.sqlx('workers').update_rec(
                hostname=worker.hostname,
                worker_name=worker.name,
                status=worker_status,
                task_id=data_dict['id'],
                task_function=func.__name__,
                task_start_date=now(),
                task_args=jdumps([]),
                task_kwargs=jdumps(data_dict),
                last_updated=epoch(),
            )

            # Sync task
            store.sqlx('tasks').update_rec(
                task_id=data_dict['id'],
                start_date=now(),
                last_updated=epoch(),
            )

            try:
                error_data = None
                func(worker, data_dict)
            except Exception as E:
                log(E)
                error_data = dict(
                    id=data_dict['id'],
                    sid=data_dict['sid'],
                    payload_type='task-error',
                    error=get_error_str(E),
                )
                # worker.pipe.send_to_parent(error_data)
                worker.put_parent_q(error_data)
            finally:

                # Sync worker
                worker_status = 'IDLE'
                set_worker_idle()

                # Sync task
                store.sqlx('tasks').update_rec(
                    task_id=data_dict['id'],
                    end_date=now(),
                    error=jdumps(error_data) if error_data else None,
                    last_updated=epoch(),
                )