Пример #1
0
	def __init__(self,dbname,user,host,password,port=None):
		self.dbname = dbname
		self.user = user
		self.host = host
		self.password = password
		self.status=0
		if port is None:
			parameters = "dbname='"+dbname+"' user='******' host='"+host+"' password='******'"
		else:
			parameters = "dbname='"+dbname+"' user='******' host='"+host+"' password='******' port='"+str(port)+"'"
		self.debug=''
		try:    
			#for unicode
			pg_extensions.register_type(pg_extensions.UNICODE)
			#connecting to the database
			self.conn = pg_connect(parameters)
			#Useful for operation such as drop and insert
			self.conn.set_isolation_level(0)
			#enabling utf8 encode
			self.conn.set_client_encoding('UNICODE')
			self.cur = self.conn.cursor()
			self.debug='connected to db!\t'
		except: 
			self.debug='connection failed!\t'
			self.status=1
			self.conn = None
Пример #2
0
def _get_postgres_con(connection_parms):
    ''' Return postgres connection for given system.

    Examples
    --------

    .. code-block::

        con = _get_postgres_con('beles8')

    Parameters
    ----------
    connection_parms
        connection parameters

    Returns
    -------
    postgres odbc connection
    '''
    host = connection_parms.get('host')
    db = connection_parms.get('schema')
    user = connection_parms.get('user')
    pw = connection_parms.get('pw')
    con = pg_connect(host=host, dbname=db, user=user, password=pw)

    logger.info(f"Connection: {host}, user:{user}")

    return con
Пример #3
0
def connect(request):
    conn_info = {
        'host': environ['DB_HOST'],
        'user': request.environ.get('DB_USER', 'anonymous'),
        'database': environ['DB_NAME'],
    }

    return pg_connect(cursor_factory=NamedTupleCursor, **conn_info)
Пример #4
0
def connect(*args, **kwargs):
    """Create database connection, use TraceCursor as the cursor_factory."""
    _tracer = execution_context.get_opencensus_tracer()
    kwargs['cursor_factory'] = TraceCursor
    if _tracer is not None:
        with _tracer.span('PostgreSQL connect') as span:
            try:
                conn = pg_connect(*args, **kwargs)
                return conn
            except Exception as exc:
                logger.error(f'Could not connect to PostgreSQL.', exc_info=True)
                status = status_module.Status.from_exception(exc)
                span.set_status(status)
                raise
    else:
        conn = pg_connect(*args, **kwargs)
        return conn
 def __init__(self, db_info, rows_per_insert):
     # Set up connection to database
     self.connection = pg_connect(database=db_info["database"],
                                  user=db_info["username"],
                                  password=db_info["password"],
                                  host=db_info["host"],
                                  port=db_info["port"])
     self.rows_per_insert = rows_per_insert
Пример #6
0
def connect(request):
    conn_info = {
        "host": environ["DB_HOST"],
        "user": request.environ.get("DB_USER", "guest"),
        "database": environ["DB_NAME"],
        "port": environ["DB_PORT"],
    }

    return pg_connect(cursor_factory=NamedTupleCursor, **conn_info)
Пример #7
0
def _load_connectors():
    eng                             =   create_engine(r'postgresql://%(DB_USER)s:%(DB_PW)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s'
                                                      % T,
                                                      encoding='utf-8',
                                                      echo=False)
    conn                            =   pg_connect("dbname='%(DB_NAME)s' host='%(DB_HOST)s' port=%(DB_PORT)s                                                    user='******' password='******' "
                                                   % T);
    cur                             =   conn.cursor()
    return eng,conn,cur
Пример #8
0
 def _get_db_conn(self):
     return pg_connect(
         "dbname='{0}' user='******' host='{2}' password='******' port='{4}'".
         format(
             self.conn.database,
             self.conn.user,
             self.local_bind_address[0],
             self.conn.password,
             self.local_bind_address[1],
         ))
Пример #9
0
 def _load_connectors():
     eng                             =   create_engine(r'postgresql://%(DB_USER)s:%(DB_PW)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s'
                                                       % T,
                                                       encoding='utf-8',
                                                       echo=False)
     conn                            =   pg_connect("dbname='%(DB_NAME)s' host='%(DB_HOST)s' port=%(DB_PORT)s \
                                                    user='******' password='******' "
                                                    % T);
     cur                             =   conn.cursor()
     return eng,conn,cur
Пример #10
0
 def _create_conn(self):
     return pg_connect(
         "dbname='{0}' user='******' host='{2}' password='******' port='{4}'".
         format(
             self.credentials.schema,
             self.credentials.login,
             self.credentials.host,
             self.credentials.password,
             self.credentials.port,
         ))
Пример #11
0
    def __init__(self,
                 host='localhost',
                 port=5432,
                 dbname=None,
                 user='******',
                 password=None):
        """Connect to a PostgreSQL server using the module wide connection and
        set the isolation level.

        :param str host: PostgreSQL Host
        :param port: PostgreSQL port
        :param str dbname: Dabase name
        :param str user: PostgreSQL user name
        :param str password: User's password

        """
        # Set the DSN
        dsn = "host='%s' port=%i dbname='%s' user='******'" % \
              (host, port, dbname, user)

        # Add the password if specified
        if password:
            dsn += " password='******'" % password

        # Generate a connection hash to keep a global instance of this
        # connection with
        self._connection_hash = _generate_connection_hash(dsn)

        # Attempt to get a cached connection from our module level pool
        self._pgsql = _get_cached_connection(self._connection_hash)

        # If we got a result, just log our success in doing so
        if self._pgsql:
            logger.debug("Re-using cached connection: %s",
                         self._connection_hash)

        # Create a new PostgreSQL connection and cache it
        else:
            # Connect to a PostgreSQL daemon
            logger.info("Connecting to %s:%i:%s as %s", host, port, dbname,
                        user)
            self._pgsql = pg_connect(dsn)

            # Commit after every command
            self._pgsql.set_isolation_level(
                extensions.ISOLATION_LEVEL_AUTOCOMMIT)
            logger.info('Connected to PostgreSQL')

            # Add the connection to our module level pool
            _add_cached_connection(self._connection_hash, self._pgsql)

        # Always get a new cursor
        self._cursor = self._pgsql.cursor(cursor_factory=extras.DictCursor)
def postgres_access(query, extra_parameters):
    with pg_connect(host=settings.PG_HOST,
                    dbname=settings.PG_BASE,
                    user=settings.PG_USER,
                    password=settings.PG_PASSWORD) as conn:
        with conn.cursor() as curs:
            try:
                curs.execute(query, extra_parameters)
                return curs.fetchall()
            except PG_Error as e:
                logger.error("Error on query: " + str(e))
                raise QueryError(str(e)) from e
Пример #13
0
    def __init__(
        self,
        table,
        db_type,
        db_name,
        db_user=None,
        db_password=None,
        db_host=None,
        db_port=None,
    ):
        super().__init__()

        if re.match(r"^[\w_]+$", table):
            self.table = table
        else:
            raise ValueError("Invalid database table name")

        self.db_type = db_type
        if db_type == "postgres":
            kwargs = {
                "dbname": db_name,
                "host": db_host,
                "password": db_password,
                "port": db_port or 5432,
                "user": db_user,
            }
            self.connection = pg_connect(
                **{k: v
                   for k, v in kwargs.items() if v})
        elif db_type in ("mariadb", "mysql"):
            kwargs = {
                "database": db_name,
                "host": db_host,
                "password": db_password,
                "port": db_port or 3306,
                "user": db_user,
            }
            self.connection = my_connect(
                **{k: v
                   for k, v in kwargs.items() if v})
        elif db_type == "influxdb":
            kwargs = {
                "database": db_name,
                "host": db_host,
                "password": db_password,
                "port": db_port or 8086,
                "username": db_user,
            }
            self.connection = influx_connect(
                **{k: v
                   for k, v in kwargs.items() if v})
        else:
            raise NotImplementedError()
Пример #14
0
def handle_databases(scenario, outline, steps):
    world.initial_dir = os.getcwd()
    world.result = {}
    world.envvars = {}

    info = requests.post(
        'http://localhost:8889/replica_sets',
        data=json.dumps({
            'id': 'rs0',
            'members': [{}]
        })
    )

    assert info.status_code == 200, \
        "Impossible to create replicat set: {}".format(info.status_code)

    world.mongo_server = info.json().get('server_id')
    world.mongo_uri = info.json().get('mongodb_uri')
    world.mongo_conn = MongoClient(world.mongo_uri, w=1, j=True)

    world.pg_server = world.Postgresql()
    world.pg_uri = world.pg_server.url()
    world.pg_conn = pg_connect(world.pg_uri)

    try:
        yield

    finally:
        world.mongo_conn.close()
        world.pg_conn.close()

        if hasattr(world, 'mongo_connector'):
            world.mongo_connector.terminate()
            world.mongo_connector.wait()

        ret = requests.delete(
            'http://localhost:8889/replica_sets/rs0'
        )
        assert ret.status_code == 204, \
            "Impossible to delete replica set: {}".format(ret.status_code)

        del world.mongo_server
        del world.mongo_uri

        world.pg_server.stop()

        os.chdir(world.initial_dir)
        del world.result
        del world.envvars
Пример #15
0
    def __init__(self, host="localhost", port=5432, dbname=None, user="******", password=None):
        """Connect to a PostgreSQL server using the module wide connection and
        set the isolation level.

        :param str host: PostgreSQL Host
        :param port: PostgreSQL port
        :param str dbname: Dabase name
        :param str user: PostgreSQL user name
        :param str password: User's password

        """
        # Set the DSN
        dsn = "host='%s' port=%i dbname='%s' user='******'" % (host, port, dbname, user)

        # Add the password if specified
        if password:
            dsn += " password='******'" % password

        # Generate a connection hash to keep a global instance of this
        # connection with
        self._connection_hash = _generate_connection_hash(dsn)

        # Attempt to get a cached connection from our module level pool
        self._pgsql = _get_cached_connection(self._connection_hash)

        # If we got a result, just log our success in doing so
        if self._pgsql:
            logger.debug("Re-using cached connection: %s", self._connection_hash)

        # Create a new PostgreSQL connection and cache it
        else:
            # Connect to a PostgreSQL daemon
            logger.info("Connecting to %s:%i:%s as %s", host, port, dbname, user)
            self._pgsql = pg_connect(dsn)

            # Commit after every command
            self._pgsql.set_isolation_level(extensions.ISOLATION_LEVEL_AUTOCOMMIT)
            logger.info("Connected to PostgreSQL")

            # Add the connection to our module level pool
            _add_cached_connection(self._connection_hash, self._pgsql)

        # Always get a new cursor
        self._cursor = self._pgsql.cursor(cursor_factory=extras.DictCursor)
def insert_match_to_postgresql(properties: Sequence[Optional[str]],
                               start_time: datetime, end_time: datetime,
                               game_mode: str, map_name: str,
                               frags: List[Tuple[datetime, Any]]) -> str:
    """Insert Game Session Data to PostgreSQL Database.

    This function inserts a new record into the table `match` with the
    arguments start_time, end_time, game_mode, and map_name, using an
    INSERT statement.

    This function inserts all the frags into the table `match_frag`.

    Args:
        properties: A tuple of the following form:
                    (hostname, database_name, username, password)
                    where:
                        - hostname: hostname of the PosgtreSQL server to
                        connect to;
                        - database_name: name of the database to use;
                        - username: username of the database account on which
                        the connection is being made;
                        - password: password of the database account.
        start_time: A datetime.datetime object with time zone information
                    corresponding to the start of the game session;
        end_time:   A datetime.datetime object with time zone information
                    corresponding to the end of the game session;
        game_mode:  Multi-player mode of the game session;
        map_name:   Name of the map that was played;
        frags:      A list of tuples in the following form:
                    (frag_time, killer_name[, victim_name, weapon_code])
                    where:
                        - frag_time (required): datetime.datetime with time
                        zone when the frag occurred;
                        - killer_name (required): username of the player who
                        fragged another or killed himself;
                        - victim_name (optional): username of the player who
                        has been fragged;
                        - weapon_code (optional): code of the weapon that was
                        used to frag.

    Returns: The identification of the match that has been inserted.

    """
    connection_string = "host={} dbname={} user={} password={}" \
        .format(*(i if i else "''" for i in properties))
    try:
        with pg_connect(connection_string) as conn:
            # Open a cursor to perform database operations
            with conn.cursor() as curs:
                # Inserts a new record into the table `match`:
                curs.execute(
                    """INSERT INTO match (start_time, end_time, 
                game_mode, map_name) VALUES (%s, %s, %s, %s)
                RETURNING match_id""",
                    (start_time, end_time, game_mode, map_name))
                match_id = curs.fetchone()[0]
                if match_id:
                    insert_frags_to_postgresql(conn, match_id, frags)
                return match_id
    except (pg_DatabaseError, TypeError) as exc:
        error("{}: {}".format(exc.__class__.__name__, exc))
        raise
Пример #17
0
def connect(*args, **kwargs):
    """Create database connection, use TraceCursor as the cursor_factory."""
    kwargs['cursor_factory'] = TraceCursor
    conn = pg_connect(*args, **kwargs)
    return conn
Пример #18
0
 def connect(self):
     self.connection = pg_connect(**self.connection_params)
Пример #19
0
import tornado.web
import tornado.websocket
from tornado.options import define, options
from hashlib import sha512
from settings import *
from app.tasks import switchboard, delete_job
from push import MessageHandler
from app.utils import MetaAnalysisData
from psycopg2 import connect as pg_connect
from psycopg2.extras import DictCursor
#following only needed for filehandler
from os.path import splitext
from random import randint

try:
    postgres = pg_connect("dbname='qiita' user='******' \
        password='******' host='localhost'")
except:
    raise RuntimeError("ERROR: unable to connect to the POSTGRES database.")

define("port", default=443, help="run on the given port", type=int)

metaAnalysis = MetaAnalysisData()


class BaseHandler(tornado.web.RequestHandler):
    def get_current_user(self):
        '''Overrides default method of returning user curently connected'''
        user = self.get_secure_cookie("user")
        if user == None:
            self.clear_cookie("user")
            return ''
Пример #20
0
def connect(url):
    return pg_connect(url)
Пример #21
0
async def connect(dsn=None,
                  connection_factory=None,
                  cursor_factory=None,
                  **kwargs):
    """Open a connection to the database server and return a
    :class:`connection <psycaio.AioConnection>` object.

    The parameters are the same as for the :py:func:`psycopg2.connect` function
    with a few exceptions:

    * The  *async* or *async\_* argument will have no effect. A connection will
      always be in asynchronous mode.

    * If set, the *connection_factory* must return an instance of both an
      :class:`AioConnMixin <psycaio.AioConnMixin>` and a
      psycopg2 :py:class:`psycopg2:connection`.
      The default is the :class:`AioConnection <psycaio.AioConnection>`
      class which just inherits from both. The
      :class:`AioConnMixin <psycaio.AioConnMixin>` type must be located
      before the psycopg2
      :py:class:`psycopg2:connection` type in the class hierarchy when
      performing a method lookup.

    * If set, the *cursor_factory* must return an instance of both an
      :class:`AioCursorMixin <psycaio.AioCursorMixin>` and a psycopg2
      :py:class:`psycopg2:cursor`.
      The default is the :class:`AioCursor <psycaio.AioCursor>`
      class which just inherits from both. The
      :class:`AioConnMixin <psycaio.AioCursorMixin>` type must be
      located before the psycopg2
      :py:class:`psycopg2:cursor` type in the class
      hierarchy when performing a method lookup.

      For example, to create an asynchronous version of the psycopg2
      :py:class:`DictCursor <psycopg2.extras.DictCursor>` type, the following
      code can be used:

      .. code-block:: python

         from psycopg2.extras import DictCursor
         from psycaio import AioCursorMixin

         class AioDictCursor(AioCursorMixin, DictCursor):
            pass

      The *AioDictCursor* can then be used as the *cursor_factory* argument for
      the :py:func:`connect <psycaio.connect>` function or the
      :py:meth:`AioConnMixin.cursor <psycaio.AioConnMixin.cursor>` method.

    * The connect_timeout is ignored in asynchronous mode by psycopg2 (or
      actually libpq).
      Therefore timeout functionality is implemented in this function. When
      multiple hosts are provided, or a host resolves to multiple IP addresses,
      it will apply the timeout per single host, just like libpq in
      synchronous/blocking mode.

    Asynchronous DNS lookups are performed by this function as well, if
    necessary, because that part of the functionality is always blocking in
    libpq.

    """
    if connection_factory is None:
        connection_factory = AioConnection
    if cursor_factory is None:
        cursor_factory = AioCursor

    if dsn:
        conn_kwargs = parse_dsn(dsn)
    else:
        conn_kwargs = {}

    conn_kwargs.update(kwargs)
    conn_kwargs.update({'async_': True, 'client_encoding': 'UTF8'})

    # Two issues with non-blocking libpq:
    # * libpq and therefore psycopg2 do not respect connect_timeout in non
    #   blocking mode
    # * DNS lookups by libpq are blocking even in non blocking mode.
    #
    # Here we try to solve those two issues. If host(s) are provided, and
    # hostaddres(ses) are not, do the DNS lookup here using the asyncio version
    # of getaddrinfo.
    #
    # Also split the hosts or recognize that a single host name might have
    # multiple addresses, for example IPv4 and IPv6, so later we can apply
    # the timeout per address. Just like libpq is doing in synchronous mode.
    # This solves the issue where the first host drops the traffic (client will
    # not notice) and a second connection attempt will never be undertaken
    # because the first attempt uses up the entire timeout.
    #
    # Note: hostname(s) can be set using a service file. These are not
    # recognized here and the issues mentioned above are not solved in that
    # case.

    # first get the timeout
    timeout = conn_kwargs.get('connect_timeout')
    if timeout is not None:
        timeout = int(timeout)
        # mimic libpq behavior
        if timeout == 1:
            timeout = 2
        if timeout <= 0:
            timeout = None

    loop = get_running_loop()

    if not conn_kwargs.get("service"):

        def parse_multi(param_name):
            param = (conn_kwargs.get(param_name)
                     or os.environ.get(f"PG{param_name.upper()}"))
            return str(param).split(',') if param else []

        hostaddrs = parse_multi("hostaddr")
        hosts = parse_multi("host")
        ports = parse_multi("port")

        # same logic as in libpq
        num_host_entries = len(hostaddrs) or len(hosts) or 1

        # Build up three lists for hosts, hostaddrs and ports of equal length.
        # Lists can contain None for any value
        if not hostaddrs:
            hostaddrs = [None] * num_host_entries

        if hosts:
            # number of hosts must be the same as number of hostaddrs
            if len(hosts) != num_host_entries:
                raise OperationalError(
                    f"could not match {len(hosts)} host names to "
                    f"{num_host_entries} hostaddr values")
        else:
            hosts = [None] * num_host_entries

        if ports:
            num_ports = len(ports)
            # number of ports must be the same as number of host(addr)s or 1
            if num_ports != num_host_entries:
                if num_ports != 1:
                    raise OperationalError(
                        f"could not match {num_ports} port numbers to "
                        f"{num_host_entries} hosts")
                # Multiple host(addr) values, but just one port. That is ok.
                # Stretch the ports list to equal length
                ports *= num_host_entries
        else:
            ports = [None] * num_host_entries

        # Now we got three lists of equal length. Loop through them and add
        # a tuple for each host entry that we find
        host_entries = []
        for host, hostaddr, port in zip(hosts, hostaddrs, ports):
            if hostaddr or not host or host.startswith('/'):
                # host address is already provided, host is empty or is a unix
                # socket address. Just add it to the list
                host_entries.append((host, hostaddr, port))
            else:
                # perform async DNS lookup
                for addrinfo in await loop.getaddrinfo(
                        host, None, proto=socket.IPPROTO_TCP):
                    host_entries.append((host, addrinfo[4][0], port))
    else:
        # A service name is used. Just let libpq handle it.
        host_entries = [(
            conn_kwargs.get("host"),
            conn_kwargs.get("hostaddr"),
            conn_kwargs.get("port"),
        )]

    exceptions = []
    for host, hostaddr, port in host_entries:
        # Try to connect for each host entry. The timeout applies
        # to each attempt separately
        conn_kwargs.update(host=host, hostaddr=hostaddr, port=port)
        cn = pg_connect(connection_factory=connection_factory,
                        cursor_factory=cursor_factory,
                        **conn_kwargs)

        # Check base type and order. Psycopg2 already checked if it is a valid
        # psycopg2 connection.
        mro = type(cn).__mro__
        try:
            mixin_pos = mro.index(AioConnMixin)
        except ValueError:
            raise OperationalError(
                "connection_factory must return an instance of AioConnMixin")
        if mro.index(PGConnection) < mixin_pos:
            raise OperationalError(
                "AioConnMixin must be present before "
                "psycopg2.extensions.connection in method resolution order. "
                "Maybe base classes should be switched.")

        try:
            await wait_for(cn._start_connect_poll(), timeout)
            return cn
        except CancelledError:
            cn.close()
            # we got cancelled, do not try next entry
            raise
        except Exception as ex:
            cn.close()
            exceptions.append(ex)
    if len(exceptions) == 1:
        raise exceptions[0]
    raise OperationalError(exceptions)
Пример #22
0
from app.celery import celery
from celery import signature, group
from time import sleep
from json import dumps
from random import randint
from redis import Redis
from psycopg2 import connect as pg_connect

try:
    r_server = Redis()
except:
      raise RuntimeError("ERROR: unable to connect to the REDIS database.")

try:
    postgres = pg_connect("dbname='qiita' user='******' \
        password='******' host='localhost'")
except:
    raise RuntimeError("ERROR: unable to connect to the POSTGRES database.")

def push_notification(user, job, analysis, msg, files=[], done=False):
    '''Creates JSON and takes care of push notification'''
    jsoninfo = {
        'job': job,
        'analysis': analysis,
        'msg': msg,
        'results': files,
    }
    if done:
        jsoninfo['done'] = 1
    else:
        jsoninfo['done'] = 0
Пример #23
0
def run_sql(query, database='postgres'):
    conn = pg_connect(database=database)
    conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    cur = conn.cursor()
    cur.execute(query)
    conn.close()
Пример #24
0
def main(args,kwargs):
    if args.count('requests'):              import requests
    if args.count('urllib'):                from urllib import quote_plus,unquote

    import                                  datetime                as DT
    import                                  time
    delay                                   =   time.sleep
    from dateutil                           import parser           as DU               # e.g., DU.parse('some date as str') --> obj(datetime.datetime)
    from re                                 import findall          as re_findall
    from re                                 import sub              as re_sub           # re_sub('patt','repl','str','cnt')
    from re                                 import search           as re_search        # re_search('patt','str')
    import json
    from subprocess                         import Popen            as sub_popen
    from subprocess                         import PIPE             as sub_PIPE
    from traceback                          import format_exc       as tb_format_exc
    from sys                                import exc_info         as sys_exc_info
    from types                              import NoneType
    from uuid                               import uuid4            as get_guid
    from py_classes                         import To_Class,To_Class_Dict,To_Sub_Classes
    import                                  pandas                  as pd
    pd.set_option(                          'expand_frame_repr', False)
    pd.set_option(                          'display.max_columns', None)
    pd.set_option(                          'display.max_colwidth', 250)
    pd.set_option(                          'display.max_rows', 1000)
    pd.set_option(                          'display.width', 1500)
    pd.set_option(                          'display.colheader_justify','left')
    np                                      =   pd.np
    np.set_printoptions(                    linewidth=1500,threshold=np.nan)
    import logging
    logger = logging.getLogger(             'sqlalchemy.dialects.postgresql')
    logger.setLevel(logging.INFO)

    if args.count('pgsql'):              
        from sqlalchemy                     import create_engine
        from psycopg2                       import connect          as pg_connect
        try:
            eng                             =   create_engine(r'postgresql://%(DB_USER)s:%(DB_PW)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s'
                                                              % self.T.pgsql,
                                                              encoding='utf-8',
                                                              echo=False)
            conn                            =   pg_connect("dbname='%(DB_NAME)s' host='%(DB_HOST)s' port=%(DB_PORT)s \
                                                           user='******' password='******' "
                                                           % self.T.pgsql);
            cur                             =   conn.cursor()

        except:
            from getpass import getpass
            pw = getpass('Root password (to create DB:"%(DB_NAME)s" via CL): ' % self.T.pgsql)
            p = sub_popen(" ".join(["echo '%s' | sudo -S prompt='' " % pw,
                                    'su postgres -c "psql --cluster 9.4/main -c ',
                                    "'create database %(DB_NAME)s;'" % self.T.pgsql,
                                    '"']),
                          stdout=sub_PIPE,
                          shell=True)
            (_out, _err) = p.communicate()
            assert _err is None

    import inspect, os
    D                                       =   {'guid'                 :   str(get_guid().hex)[:7],
                                                 'pg_classes_pwd'       :   os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
                                                }
    D.update(                                   {'tmp_tbl'              :   'tmp_'+D['guid'],
                                                'current_filepath'     :   inspect.getfile(inspect.currentframe())})



    T                                       =   To_Class_Dict(  self,
                                                            dict_list=[D,locals()],
                                                            update_globals=True)