Exemplo n.º 1
0
    def register_composite(cls, connection):
        klass = cls()
        db_type = klass.db_type(connection)
        if db_type:
            try:
                cls.python_type = register_composite(
                    str(db_type),
                    connection.cursor().cursor,
                    globally=True,
                    factory=klass.factory_class()
                ).type
            except psycopg2.ProgrammingError:
                _missing_types[db_type] = cls
            else:
                def adapt_composite(composite):
                    # For safety, `composite_python_class` must have the same
                    # attributes as the namedtuple `python_type`'s fields, so
                    # that those can be escaped rather than relying on
                    # `__str__`.
                    return AsIs("(%s)::%s" % (
                        ", ".join([
                            adapt(getattr(composite, field)).getquoted().decode('utf-8') for field in cls.python_type._fields
                        ]), db_type
                    ))

                register_adapter(cls.composite_python_class, adapt_composite)
Exemplo n.º 2
0
	def __init__(self, connection_string, data_quality=None):
		super(PostgresConsoleDumper, self).__init__(connection_string, data_quality)
		register_adapter(list, SQL_LIST)
		try:
			self.stdoutw = sys.stdout.buffer.write
		except AttributeError:
			self.stdoutw = sys.stdout.write
def register_ipaddress(conn_or_curs=None):
    """
    Register conversion support between `ipaddress` objects and `network types`__.

    :param conn_or_curs: the scope where to register the type casters.
        If `!None` register them globally.

    After the function is called, PostgreSQL :sql:`inet` values will be
    converted into `~ipaddress.IPv4Interface` or `~ipaddress.IPv6Interface`
    objects, :sql:`cidr` values into into `~ipaddress.IPv4Network` or
    `~ipaddress.IPv6Network`.

    .. __: https://www.postgresql.org/docs/current/static/datatype-net-types.html
    """
    global ipaddress
    import ipaddress

    global _casters
    if _casters is None:
        _casters = _make_casters()

    for c in _casters:
        register_type(c, conn_or_curs)

    for t in [ipaddress.IPv4Interface, ipaddress.IPv6Interface,
              ipaddress.IPv4Network, ipaddress.IPv6Network]:
        register_adapter(t, adapt_ipaddress)
def register_psycopg2_composite(dbapi_connection, composite):
    psycopg2.extras.register_composite(
        composite.name,
        dbapi_connection,
        globally=True,
        factory=composite.caster
    )

    def adapt_composite(value):
        adapted = [
            adapt(
                getattr(value, column.name)
                if not isinstance(column.type, TypeDecorator)
                else column.type.process_bind_param(
                    getattr(value, column.name),
                    PGDialect_psycopg2()
                )
            )
            for column in
            composite.columns
        ]
        for value in adapted:
            if hasattr(value, 'prepare'):
                value.prepare(dbapi_connection)
        values = [
            value.getquoted().decode(dbapi_connection.encoding)
            if six.PY3
            else value.getquoted()
            for value in adapted
        ]
        return AsIs("(%s)::%s" % (', '.join(values), composite.name))

    register_adapter(composite.type_cls, adapt_composite)
Exemplo n.º 5
0
def register():
    """Register adapters for numpy types."""

    # Simple numeric types need only be converted by ``AsIs``
    for numpy_type in (
        numpy.int_,
        numpy.intc,
        numpy.intp,
        numpy.int8,
        numpy.int16,
        numpy.int32,
        numpy.int64,
        numpy.uint8,
        numpy.uint16,
        numpy.uint32,
        numpy.uint64,
        numpy.float_,
        numpy.float16,
        numpy.float32,
        numpy.float64,
    ):
        register_adapter(numpy_type, AsIs)

    # Booleans have to be converted
    register_adapter(numpy.bool_, lambda v: AsIs(bool(v)))
Exemplo n.º 6
0
    def register_uuid(oids=None, conn_or_curs=None):
        """Create the UUID type and an uuid.UUID adapter."""
        if not oids:
            oid1 = 2950
            oid2 = 2951
        elif type(oids) == list:
            oid1, oid2 = oids
        else:
            oid1 = oids
            oid2 = 2951

        def parseUUIDARRAY(data, cursor):
            if data is None:
                return None
            elif data == "{}":
                return []
            else:
                return [((len(x) > 0 and x != "NULL") and uuid.UUID(x) or None) for x in data[1:-1].split(",")]

        _ext.UUID = _ext.new_type((oid1,), "UUID", lambda data, cursor: data and uuid.UUID(data) or None)
        _ext.UUIDARRAY = _ext.new_type((oid2,), "UUID[]", parseUUIDARRAY)

        _ext.register_type(_ext.UUID, conn_or_curs)
        _ext.register_type(_ext.UUIDARRAY, conn_or_curs)
        _ext.register_adapter(uuid.UUID, UUID_adapter)

        return _ext.UUID
Exemplo n.º 7
0
    def __call__(self, cls):
        cls.REGISTERED = True
        table = cls.TABLE
        for attr, prop in cls.__dict__.items():
            if isinstance(prop, ForeignKeyProperty):
                target_table, target_name, target_model = prop.reference
                REF = (table, attr, target_table, target_name, target_model)
                BACKREF = (target_table, target_name, table, attr, cls)
                if table in self.REFERENCES:
                    self.REFERENCES[table].append(REF)
                else: self.REFERENCES[table] = [REF]
                if target_table in self.BACKREFS:
                    self.BACKREFS[target_table].append(BACKREF)
                else: self.BACKREFS[table] = [BACKREF]

        cls.all = staticmethod(lambda: Query(self, cls))
        cls._save = self.save
        register_adapter(cls, ForeignKeyProperty.adapt)
        if table not in self.TABLES:
            self._create_table(cls)
            # if the model defined also defines an index (which, by implication is also no in the database)
            for field, in cls.INDEXES_DEFINED:
                if isinstance(cls.__dict__[field], GeoProperty): self._create_geo_index(cls, field)
                else: self._create_index(cls. field)
        # if there is a model in the database that is not defined: disregard
        return cls
Exemplo n.º 8
0
def register_uuid(oids=None, conn_or_curs=None):
    """Create the UUID type and an uuid.UUID adapter.

    :param oids: oid for the PostgreSQL :sql:`uuid` type, or 2-items sequence
        with oids of the type and the array. If not specified, use PostgreSQL
        standard oids.
    :param conn_or_curs: where to register the typecaster. If not specified,
        register it globally.
    """

    import uuid

    if not oids:
        oid1 = 2950
        oid2 = 2951
    elif isinstance(oids, (list, tuple)):
        oid1, oid2 = oids
    else:
        oid1 = oids
        oid2 = 2951

    _ext.UUID = _ext.new_type((oid1, ), "UUID",
            lambda data, cursor: data and uuid.UUID(data) or None)
    _ext.UUIDARRAY = _ext.new_array_type((oid2,), "UUID[]", _ext.UUID)

    _ext.register_type(_ext.UUID, conn_or_curs)
    _ext.register_type(_ext.UUIDARRAY, conn_or_curs)
    _ext.register_adapter(uuid.UUID, UUID_adapter)

    return _ext.UUID
Exemplo n.º 9
0
def register_inet(oid=None, conn_or_curs=None):
    """Create the INET type and an Inet adapter."""
    if not oid: oid = 869
    _ext.INET = _ext.new_type((oid, ), "INET",
            lambda data, cursor: data and Inet(data) or None)
    _ext.register_type(_ext.INET, conn_or_curs)
    _ext.register_adapter(Inet, lambda x: x)
    return _ext.INET
Exemplo n.º 10
0
    def test_no_mro_no_joy(self):
        from psycopg2.extensions import adapt, register_adapter, AsIs

        class A: pass
        class B(A): pass

        register_adapter(A, lambda a: AsIs("a"))
        self.assertRaises(psycopg2.ProgrammingError, adapt, B())
Exemplo n.º 11
0
 def register_uuid(oid=None, conn_or_curs=None):
     """Create the UUID type and an uuid.UUID adapter."""
     if not oid: oid = 2950
     _ext.UUID = _ext.new_type((oid, ), "UUID",
             lambda data, cursor: data and uuid.UUID(data) or None)
     _ext.register_type(_ext.UUID, conn_or_curs)
     _ext.register_adapter(uuid.UUID, UUID_adapter)
     return _ext.UUID
Exemplo n.º 12
0
def use_pendulum_for_time_types():
    register_cast(OID_TIMESTAMP, "TIMESTAMP", cast_timestamp)
    register_cast(OID_TIMESTAMPTZ, "TIMESTAMPTZ", cast_timestamptz)
    register_cast(OID_DATE, "DATE", cast_date)
    register_cast(OID_TIME, "TIME", cast_time)
    register_cast(OID_INTERVAL, "INTERVAL", cast_interval)

    register_adapter(datetime, adapt_datetime)
    register_adapter(relativedelta, adapt_relativedelta)
Exemplo n.º 13
0
 def __init__(self):
     self.dir = r"D:\Twitter_data_collect\*.json"
     self.host = r"localhost"
     self.database = "twitter"
     self.user = "******"
     self.password = "******"  # unix passwd?
     self.data_dict = {}
     self.list_twitts = []
     register_adapter(dict, Json)
Exemplo n.º 14
0
    def get_db_prep_value(self, value, connection, prepared=False):
        """Return a UUID object. Also, ensure that psycopg2 is
        aware how to address that object.
        """
        # Register the UUID type with psycopg2.
        register_adapter(uuid.UUID, UUIDAdapter)

        # Run the normal functionality.
        return super(UUIDField, self).get_db_prep_value(value, connection, prepared=prepared)
Exemplo n.º 15
0
def configure(database_name, port):
  from psycopg2.extras import Json
  from psycopg2.extensions import register_adapter

  global CONNECTION_STRING

  CONNECTION_STRING = "host=localhost port={} user=postgres dbname={}".format(port, database_name)

  register_adapter(dict, lambda d: Json(d))
Exemplo n.º 16
0
    def test_adapt_most_specific(self):
        from psycopg2.extensions import adapt, register_adapter, AsIs

        class A(object): pass
        class B(A): pass
        class C(B): pass

        register_adapter(A, lambda a: AsIs("a"))
        register_adapter(B, lambda b: AsIs("b"))
        self.assertEqual('b', adapt(C()).getquoted())
Exemplo n.º 17
0
    def test_adapt_subtype_3(self):
        from psycopg2.extensions import adapt, register_adapter, AsIs

        class A: pass
        class B(A): pass

        register_adapter(A, lambda a: AsIs("a"))
        try:
            self.assertEqual(b"a", adapt(B()).getquoted())
        finally:
           del psycopg2.extensions.adapters[A, psycopg2.extensions.ISQLQuote]
Exemplo n.º 18
0
    def test_no_mro_no_joy(self):
        from psycopg2.extensions import adapt, register_adapter, AsIs

        class A: pass
        class B(A): pass

        register_adapter(A, lambda a: AsIs("a"))
        try:
            self.assertRaises(psycopg2.ProgrammingError, adapt, B())
        finally:
           del psycopg2.extensions.adapters[A, psycopg2.extensions.ISQLQuote]
Exemplo n.º 19
0
def register_macaddr_type():
    from psycopg2.extensions import register_adapter, new_type, register_type, new_array_type
    import psycopg2

    oid = get_type_oid("NULL::macaddr")
    PGTYPE = new_type((oid,), "macaddr", cast_macaddr)
    register_type(PGTYPE)
    register_adapter(MacAddr, adapt_macaddr)

    mac_array_oid = get_type_oid("'{}'::macaddr[]")
    array_of_mac = new_array_type((mac_array_oid, ), 'macaddr', psycopg2.STRING)
    psycopg2.extensions.register_type(array_of_mac)
Exemplo n.º 20
0
def register_hstore(conn_or_curs, globally=False, unicode=False, oid=None):
    """Register adapter and typecaster for `!dict`\-\ |hstore| conversions.

    :param conn_or_curs: a connection or cursor: the typecaster will be
        registered only on this object unless *globally* is set to `!True`
    :param globally: register the adapter globally, not only on *conn_or_curs*
    :param unicode: if `!True`, keys and values returned from the database
        will be `!unicode` instead of `!str`. The option is not available on
        Python 3
    :param oid: the OID of the |hstore| type if known. If not, it will be
        queried on *conn_or_curs*

    The connection or cursor passed to the function will be used to query the
    database and look for the OID of the |hstore| type (which may be different
    across databases). If querying is not desirable (e.g. with
    :ref:`asynchronous connections <async-support>`) you may specify it in the
    *oid* parameter (it can be found using a query such as :sql:`SELECT
    'hstore'::regtype::oid;`).

    Note that, when passing a dictionary from Python to the database, both
    strings and unicode keys and values are supported. Dictionaries returned
    from the database have keys/values according to the *unicode* parameter.

    The |hstore| contrib module must be already installed in the database
    (executing the ``hstore.sql`` script in your ``contrib`` directory).
    Raise `~psycopg2.ProgrammingError` if the type is not found.

    .. versionchanged:: 2.4
        added the *oid* parameter. If not specified, the typecaster is
        installed also if |hstore| is not installed in the :sql:`public`
        schema.
    """
    if oid is None:
        oid = HstoreAdapter.get_oids(conn_or_curs)
        if oid is None or not oid[0]:
            raise psycopg2.ProgrammingError(
                "hstore type not found in the database. "
                "please install it from your 'contrib/hstore.sql' file")
        else:
            oid = oid[0]  # for the moment we don't have a HSTOREARRAY

    if isinstance(oid, int):
        oid = (oid,)

    # create and register the typecaster
    if sys.version_info[0] < 3 and unicode:
        cast = HstoreAdapter.parse_unicode
    else:
        cast = HstoreAdapter.parse

    HSTORE = _ext.new_type(oid, "HSTORE", cast)
    _ext.register_type(HSTORE, not globally and conn_or_curs or None)
    _ext.register_adapter(dict, HstoreAdapter)
Exemplo n.º 21
0
    def test_mro_required(self):
        import psycopg2
        from psycopg2.extensions import adapt, register_adapter, AsIs

        # Intentionally old-style, they don't expose their MRO.
        class A:
            pass
        class B(A):
            pass

        register_adapter(A, lambda a: AsIs("a"))
        with self.assertRaises(psycopg2.ProgrammingError):
            adapt(B())
Exemplo n.º 22
0
    def test_adapt_most_specific(self):
        from psycopg2.extensions import adapt, register_adapter, AsIs

        class A(object): pass
        class B(A): pass
        class C(B): pass

        register_adapter(A, lambda a: AsIs("a"))
        register_adapter(B, lambda b: AsIs("b"))
        try:
            self.assertEqual(b'b', adapt(C()).getquoted())
        finally:
           del psycopg2.extensions.adapters[A, psycopg2.extensions.ISQLQuote]
           del psycopg2.extensions.adapters[B, psycopg2.extensions.ISQLQuote]
Exemplo n.º 23
0
def register_numpy_types():
    """Register the AsIs adapter for following types from numpy:
      - numpy.int8
      - numpy.int16
      - numpy.int32
      - numpy.int64

      - numpy.float16
      - numpy.float32
      - numpy.float64
      - numpy.float128
    """
    for typ in ['int8', 'int16', 'int32', 'int64',
                'float16', 'float32', 'float64', 'float128']:
        register_adapter(np.__getattribute__(typ), AsIs)
Exemplo n.º 24
0
def setup_connection(conn):
    # We want to use unicode everywhere
    register_type(UNICODE, conn)
    register_type(UNICODEARRAY, conn)
    cur = conn.cursor()
    cur.execute("SELECT NULL::numeric")
    oid = cur.description[0][1]
    NUMERIC = new_type((oid,), "NUMERIC", numeric_converter)
    register_type(NUMERIC, conn)
    register_adapter(Integer, AsIs)
    register_adapter(RealNumber, RealEncoder)
    register_adapter(list, Json)
    register_adapter(tuple, Json)
    register_adapter(dict, Json)
    register_json(conn, loads=Json.loads)
Exemplo n.º 25
0
def configure_postgres(args):
  from psycopg2.extras import Json
  from psycopg2.extensions import register_adapter

  from bidon.db.core import get_pg_core

  port = args.port or 5432

  CONFIG.update(core=get_pg_core("dbname=bidon_test user=postgres host=localhost port={}".format(port)),
                test_rowcount=True,
                test_callproc=True,
                is_pg=True)
  CONFIG.freeze()

  register_adapter(dict, lambda d: Json(d))
Exemplo n.º 26
0
def getDBConnection():
    global con
    if con == None:
        con = psycopg2.connect( host = config.DB_HOST,
                                database = config.DB_DATABASE,
                                user = config.DB_USERNAME,
                                password = config.DB_PASSWORD )

        psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
        psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
        
        con.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT )

        register_adapter( PostgresEntityType, __adapt_entity )
        query = """ SELECT pg_type.oid
                    FROM pg_type JOIN pg_namespace
                           ON typnamespace = pg_namespace.oid
                    WHERE typname = 'entity_sync' AND nspname = 'public'"""
        cur = con.cursor()
        cur.execute( query )
        record = cur.fetchone()
        if record != None:
            entity_oid = record[0]
            ENTITY = psycopg2.extensions.new_type( ( entity_oid, ), "entity_sync", __cast_entity )
            psycopg2.extensions.register_type( ENTITY )

        query = ''' SELECT pg_type.oid
                    FROM pg_type JOIN pg_namespace
                           ON typnamespace = pg_namespace.oid
                    WHERE typname = '_entity_sync'
                     AND nspname = 'public'
                    '''
        cur = con.cursor()

        cur.execute( query )
        record = cur.fetchone()
        if record != None:
            entities_oid = record[0]
            ENTITIES = psycopg2.extensions.new_type( ( entities_oid, ), "entity_sync[]", __cast_entities )
            psycopg2.extensions.register_type( ENTITIES )

        cur.close()

    return con
Exemplo n.º 27
0
def register_hstore(conn_or_curs, globally=False, unicode=False,
        oid=None, array_oid=None):
    from psycopg2.extras import HstoreAdapter
    from psycopg2 import extensions as _ext
    import psycopg2
    import sys
    import re as regex
    from .fields import HStoreDict

    def cast(s, cur, _bsdec=regex.compile(r"\\(.)")):
        if sys.version_info[0] < 3 and unicode:
            result = HstoreAdapter.parse_unicode(s, cur)
        else:
            result = HstoreAdapter.parse(s, cur, _bsdec)
        return HStoreDict(result)

    if oid is None:
        oid = HstoreAdapter.get_oids(conn_or_curs)
        if oid is None or not oid[0]:
            raise psycopg2.ProgrammingError(
                "hstore type not found in the database. "
                "please install it from your 'contrib/hstore.sql' file")
        else:
            array_oid = oid[1]
            oid = oid[0]

    if isinstance(oid, int):
        oid = (oid,)

    if array_oid is not None:
        if isinstance(array_oid, int):
            array_oid = (array_oid,)
        else:
            array_oid = tuple([x for x in array_oid if x])

    HSTORE = _ext.new_type(oid, str("HSTORE"), cast)
    _ext.register_type(HSTORE, not globally and conn_or_curs or None)
    _ext.register_adapter(dict, HstoreAdapter)

    if array_oid:
        HSTOREARRAY = _ext.new_array_type(array_oid, str("HSTOREARRAY"), HSTORE)
        _ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
Exemplo n.º 28
0
    def register_composite(cls):
        db_type = cls().db_type(connection)
        if db_type:
            try:
                cls.python_type = register_composite(
                    db_type,
                    connection.cursor().cursor,
                    globally=True
                ).type
            except ProgrammingError:
                _missing_types[db_type] = cls
            else:
                def adapt_composite(composite):
                    return AsIs("(%s)::%s" % (
                        ", ".join([
                            adapt(getattr(composite, field)).getquoted() for field in composite._fields
                        ]), db_type
                    ))

                register_adapter(cls.python_type, adapt_composite)
    def register_composite(cls, connection):
        """
        Register this CompositeType with Postgres.

        If the CompositeType does not yet exist in the database, this will
        fail.  Hopefully a migration will come along shortly and create the
        type in the database. If `retry` is True, this CompositeType will try
        to register itself again after the type is created.
        """

        LOGGER.debug("Registering composite type %s on connection %s",
                     cls.__name__, connection)
        cls.registered_connection = connection

        with connection.temporary_connection() as cur:
            # This is what to do when the type is coming out of the database
            register_composite(cls._meta.db_type, cur, globally=True,
                               factory=cls.Caster)
            # This is what to do when the type is going in to the database
            register_adapter(cls, QuotedCompositeType)
Exemplo n.º 30
0
def register_range_type(pgrange, pyrange, conn):
    """
    Register a new range type as a PostgreSQL range.

        >>> register_range_type("int4range", intrange, conn)

    The above will make sure intrange is regarded as an int4range for queries
    and that int4ranges will be cast into intrange when fetching rows.

    pgrange should be the full name including schema for the custom range type.

    Note that adaption is global, meaning if a range type is passed to a regular
    psycopg2 connection it will adapt it to its proper range type. Parsing of
    rows from the database however is not global and just set on a per connection
    basis.
    """

    register_adapter(pyrange, partial(adapt_range, pgrange))
    register_range_caster(
        pgrange, pyrange, *query_range_oids(pgrange, conn), scope=conn)
Exemplo n.º 31
0
    def adapt_date(date):
        if date.tz:
            return AsIs("'%s'::timestamptz" % str(date.date))
        else:
            return AsIs("'%s'::timestamp" % str(date.date))

    def adapt_range(_range):
        if _range.start.tz:
            return AsIs("tstzrange('%s', '%s')" %
                        (str(_range.start.date), str(_range.end.date)))
        else:
            return AsIs("tsrange('%s', '%s')" %
                        (str(_range.start.date), str(_range.end.date)))

    register_adapter(Date, adapt_date)
    register_adapter(Range, adapt_range)

except ImportError:
    pass


def findall(text):
    """Find all the timestrings within a block of text.

    >>> timestring.findall("once upon a time, about 3 weeks ago, there was a boy whom was born on august 15th at 7:20 am. epic.")
    [
     ('3 weeks ago,', <timestring.Date 2014-02-09 00:00:00 4483019280>),
     ('august 15th at 7:20 am', <timestring.Date 2014-08-15 07:20:00 4483019344>)
    ]
    """
Exemplo n.º 32
0
    database_user = config.database_user

    #Create bitmask used to remove duplicate rows and rows without a CIN.
    dupemask = make_duplicates_bitmask(medical_file)

    #column_names and column_specifications are used by pandas.read_fwf to
    #read Medi-Cal file.
    with open(config.db_load_info) as f:
        column_names, column_specifications, _ = zip(*json.load(f))

    #Create an iterator to read chunks of the fixed width Medi-Cal file.
    chunksize = config.chunk_size
    chunked_data_iterator = pd.read_fwf(
        medical_file,
        colspecs=column_specifications,
        names=column_names,
        converters={name: str
                    for name in column_names},
        iterator=True,
        chunksize=chunksize)

    with psycopg2.connect(database=database_name, user=database_user) as conn:
        register_adapter(float, nan_to_null)
        with conn.cursor() as cur:
            params = ((x[0], x[1], chunksize, dupemask)
                      for x in enumerate(chunked_data_iterator))
            if args.multi_process:
                multi_process_run(params)
            else:
                single_process_run(params)
    return AsIs(numpy_int64)


def addapt_numpy_float32(numpy_float32):
    return AsIs(numpy_float32)


def addapt_numpy_int32(numpy_int32):
    return AsIs(numpy_int32)


def addapt_numpy_array(numpy_array):
    return AsIs(tuple(numpy_array))


register_adapter(np.float64, addapt_numpy_float64)
register_adapter(np.int64, addapt_numpy_int64)
register_adapter(np.float32, addapt_numpy_float32)
register_adapter(np.int32, addapt_numpy_int32)
register_adapter(np.ndarray, addapt_numpy_array)
register_adapter(dict, Json)


class PostgresClient:
    """
    PostgresClient for DB related operations
    1. Load Configeration
    2. execute select
    3. execute update
    4. execute batch updates
    """
Exemplo n.º 34
0
def rdc(*args, **kwargs):
    kwargs['connection_factory'] = psycopg2.extras.RealDictConnection
    # this is to let everything pass through as strings
    psycopg2.extensions.string_types.clear()
    register_adapter(list, SQL_IN)
    return psycopg2.connect(*args, **kwargs)
Exemplo n.º 35
0
# http://initd.org/psycopg/docs/advanced.html#adapting-new-python-types-to-sql-syntax
# and
# http://pyopengl.sourceforge.net/pydoc/numpy.core.numerictypes.html
#
# http://numpy.sourceforge.net/numdoc/HTML/numdoc.htm

''' numpy data types:
int8 int16 int32 int64 int128
uint8 uint16 uint32 uint64 uint128
float16 float32 float64 float96 float128 float256
complex32 complex64 complex128 complex192 complex256 complex512
'''

def adapt_numpy_int8(numpy_int8):
	return AsIs(numpy_int8)
register_adapter(numpy.int8, adapt_numpy_int8)

def adapt_numpy_int16(numpy_int16):
	return AsIs(numpy_int16)
register_adapter(numpy.int16, adapt_numpy_int16)

def adapt_numpy_int32(numpy_int32):
	return AsIs(numpy_int32)
register_adapter(numpy.int32, adapt_numpy_int32)

def adapt_numpy_int64(numpy_int64):
	return AsIs(numpy_int64)
register_adapter(numpy.int64, adapt_numpy_int64)

#def adapt_numpy_int128(numpy_int128):
#	return AsIs(numpy_int128)
Exemplo n.º 36
0
    return AsIs(numpy_int64)


def addapt_numpy_float32(numpy_float32):
    return AsIs(numpy_float32)


def addapt_numpy_int32(numpy_int32):
    return AsIs(numpy_int32)


def addapt_numpy_array(numpy_array):
    return AsIs(tuple(numpy_array))


register_adapter(numpy.float64, addapt_numpy_float64)
register_adapter(numpy.int64, addapt_numpy_int64)
register_adapter(numpy.float32, addapt_numpy_float32)
register_adapter(numpy.int32, addapt_numpy_int32)
register_adapter(numpy.ndarray, addapt_numpy_array)


#This handles nans (which present as floats)!
def nan_to_null(f):
    if not numpy.isnan(f) and not numpy.isinf(f):
        return psycopg2.extensions.Float(f)
    return AsIs('NULL')


register_adapter(float, nan_to_null)
from enum import Enum
import io
import json
import logging
import os
import pandas as pd
import psycopg2
from psycopg2 import pool
from psycopg2.extras import register_json, Json
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, register_adapter
import sys
from typing import List, Tuple, Dict

logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)

register_adapter(dict, Json)


class PostgresType(Enum):
    TEXT = "TEXT"
    JSONB = "JSONB"
    INT = "INTEGER"
    DEC = "DECIMAL(15, 2)"
    SMALL_DEC = "DECIMAL(2,1)"
    BIGINT = "BIGINT"
    TIMESTAMP = "TIMESTAMP"
    DATE = "DATE"


def to_date(x):
    try:
Exemplo n.º 38
0
)
import random
import pandas as pd
import numpy
from psycopg2.extensions import register_adapter, AsIs


def addapt_numpy_float64(numpy_float64):
    return AsIs(numpy_float64)


def addapt_numpy_int64(numpy_int64):
    return AsIs(numpy_int64)


register_adapter(numpy.float64, addapt_numpy_float64)
register_adapter(numpy.int64, addapt_numpy_int64)

author = 'Your name here'

doc = """
Your app description
"""


class Constants(BaseConstants):
    name_in_url = 'p55l50_reverse'
    players_per_group = None
    num_rounds = 64

Exemplo n.º 39
0
import flask
import pickle
import pandas as pd
from flask import Flask, render_template, request, redirect, url_for
from flask import *
from datetime import datetime
from dbModel import *
import numpy
from psycopg2.extensions import register_adapter, AsIs


def adapt_numpy_int64(numpy_int64):
    return AsIs(numpy_int64)


register_adapter(numpy.int64, adapt_numpy_int64)

# Use pickle to load in the pre-trained model
with open(f'model/ckd_random_red.pkl', 'rb') as f:
    model = pickle.load(f)

# Initialise the Flask app
app = flask.Flask(__name__, template_folder='templates')


@app.route('/')
def index():
    return render_template('login.html')


# Route for handling the login page logic
Exemplo n.º 40
0
def register_hstore(conn_or_curs,
                    globally=False,
                    unicode=False,
                    oid=None,
                    array_oid=None):
    r"""Register adapter and typecaster for `!dict`\-\ |hstore| conversions.

    :param conn_or_curs: a connection or cursor: the typecaster will be
        registered only on this object unless *globally* is set to `!True`
    :param globally: register the adapter globally, not only on *conn_or_curs*
    :param unicode: if `!True`, keys and values returned from the database
        will be `!unicode` instead of `!str`. The option is not available on
        Python 3
    :param oid: the OID of the |hstore| type if known. If not, it will be
        queried on *conn_or_curs*.
    :param array_oid: the OID of the |hstore| array type if known. If not, it
        will be queried on *conn_or_curs*.

    The connection or cursor passed to the function will be used to query the
    database and look for the OID of the |hstore| type (which may be different
    across databases). If querying is not desirable (e.g. with
    :ref:`asynchronous connections <async-support>`) you may specify it in the
    *oid* parameter, which can be found using a query such as :sql:`SELECT
    'hstore'::regtype::oid`. Analogously you can obtain a value for *array_oid*
    using a query such as :sql:`SELECT 'hstore[]'::regtype::oid`.

    Note that, when passing a dictionary from Python to the database, both
    strings and unicode keys and values are supported. Dictionaries returned
    from the database have keys/values according to the *unicode* parameter.

    The |hstore| contrib module must be already installed in the database
    (executing the ``hstore.sql`` script in your ``contrib`` directory).
    Raise `~psycopg2.ProgrammingError` if the type is not found.
    """
    if oid is None:
        oid = HstoreAdapter.get_oids(conn_or_curs)
        if oid is None or not oid[0]:
            raise psycopg2.ProgrammingError(
                "hstore type not found in the database. "
                "please install it from your 'contrib/hstore.sql' file")
        else:
            array_oid = oid[1]
            oid = oid[0]

    if isinstance(oid, int):
        oid = (oid, )

    if array_oid is not None:
        if isinstance(array_oid, int):
            array_oid = (array_oid, )
        else:
            array_oid = tuple([x for x in array_oid if x])

    # create and register the typecaster
    if _sys.version_info[0] < 3 and unicode:
        cast = HstoreAdapter.parse_unicode
    else:
        cast = HstoreAdapter.parse

    HSTORE = _ext.new_type(oid, "HSTORE", cast)
    _ext.register_type(HSTORE, not globally and conn_or_curs or None)
    _ext.register_adapter(dict, HstoreAdapter)

    if array_oid:
        HSTOREARRAY = _ext.new_array_type(array_oid, "HSTOREARRAY", HSTORE)
        _ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
Exemplo n.º 41
0
from psycopg2.extensions import adapt, AsIs, register_adapter

# This determines how many releases for a given color are fetched and then randomly chosen from.
# If COUNT releases are requested, we use COUNT * INTERMEDIARY_COUNT_MULTIPLIER to create
# the possible set to choose from. If this number is too large, we may get too much color variability
# in the results and if it is too small, we get no variability at all.
INTERMEDIARY_COUNT_MULTIPLIER = 4


def adapt_cube(cube):
    """ Function required by Postgres for inserting/searching cube extension colors """
    return AsIs("'(%s, %s, %s)'" %
                (adapt(cube.red), adapt(cube.green), adapt(cube.blue)))


register_adapter(ColorCube, adapt_cube)


def get_releases_for_color(red: int, green: int, blue: int,
                           count: int) -> List[ColorResult]:
    """ Fetch matching releases, their euclidian distance in RGB space and the
        release_name and artist_name for the returned releases.

        Args:
          red, green, blue: ints for each of the red, green and blue color components.
          count: int -- the number of matches to return
        Returns:
          A list of ColorResult objects.
    """

    query = """SELECT release_mbid
Exemplo n.º 42
0
from psycopg2._psycopg import DateFromTicks, TimeFromTicks, TimestampFromTicks

from psycopg2._psycopg import Error, Warning, DataError, DatabaseError, ProgrammingError
from psycopg2._psycopg import IntegrityError, InterfaceError, InternalError
from psycopg2._psycopg import NotSupportedError, OperationalError

from psycopg2._psycopg import _connect, apilevel, threadsafety, paramstyle
from psycopg2._psycopg import __version__

from psycopg2 import tz


# Register default adapters.

import psycopg2.extensions as _ext
_ext.register_adapter(tuple, _ext.SQL_IN)
_ext.register_adapter(type(None), _ext.NoneAdapter)

# Register the Decimal adapter here instead of in the C layer.
# This way a new class is registered for each sub-interpreter.
# See ticket #52
try:
    from decimal import Decimal
except ImportError:
    pass
else:
    from psycopg2._psycopg import Decimal as Adapter
    _ext.register_adapter(Decimal, Adapter)
    del Decimal, Adapter

import re
Exemplo n.º 43
0
    fanficsme = 18
    royalroadl = 19
    wavesarisen = 20
    sugarquill = 21
    bulbagarden = 22
    thefanfictionforum = 23
    fanficparadisesfw = 24
    fanficparadisensfw = 25
    wanderinginn = 26


def adaptFicType(ftype: FicType) -> AsIs:
    return AsIs(int(ftype))


register_adapter(FicType, adaptFicType)

# map FicType => Adapter
adapters: Dict[FicType, Optional['Adapter']] = {
    FicType.broken: None,
}


def getAdapter(ficType: FicType) -> 'Adapter':
    adapter = adapters[ficType]
    if adapter is not None:
        return adapter
    raise Exception(f'missing adapter for {ficType}')


class FicId:
import os
import luigi
from datetime import date
from psycopg2.extensions import register_adapter
from utils import PSQLConn,create_hourly_counts_table,create_pca_input_table,find_principal_components,extract_large_pca_components
from utils import QuotedIdentifier
from utils import initialize_user_defined_functions, test_for_nulls

cred = PSQLConn(os.getenv("GPDB_DATABASE"),
                os.getenv("GPDB_USER"),
                os.getenv("GPDB_PASSWORD"),
                os.getenv("GPDB_HOST"),
                os.getenv("GPDB_PORT")
                )

register_adapter(QuotedIdentifier, lambda x: x)

class DatabaseConfig(luigi.Config):
    base_table=luigi.Parameter()
    feature_input_table=luigi.Parameter()
    pca_input_base_table=luigi.Parameter()
    pca_output_base_table=luigi.Parameter()
    outlier_base_table=luigi.Parameter()

class ModelConfig(luigi.Config):
    user_col=luigi.Parameter()
    percentage_val=luigi.FloatParameter()
    threshold = luigi.FloatParameter()

#some default params
TARGET_PATH=os.path.join(os.path.dirname(__file__),'target/{feature}_{date}'.format(
Exemplo n.º 45
0
def database(env, tell_sentry):
    dburl = env.database_url
    maxconn = env.database_maxconn
    try:
        db = DB(dburl, maxconn=maxconn, cursor_factory=SimpleRowCursor)
    except psycopg2.OperationalError as e:
        tell_sentry(e, {}, allow_reraise=False)
        db = NoDB()

    itemgetter0 = itemgetter(0)

    def back_as_Object(cols, vals):
        return Object(zip(map(itemgetter0, cols), vals))

    db.back_as_registry[Object] = db.back_as_registry[
        'Object'] = back_as_Object

    models = (
        _AccountElsewhere,
        AccountElsewhere,
        _Community,
        Community,
        Encrypted,
        ExchangeRoute,
        Participant,
        Payin,
        Repository,
        Tip,
    )
    for model in models:
        db.register_model(model)
        setattr(db, model.__name__, model)
    liberapay.billing.payday.Payday.db = db

    def adapt_set(s):
        return adapt(tuple(s))

    register_adapter(set, adapt_set)

    def adapt_money(m):
        return AsIs('(%s,%s)::currency_amount' %
                    (adapt(m.amount), adapt(m.currency)))

    register_adapter(Money, adapt_money)

    def cast_currency_amount(v, cursor):
        return None if v in (None, '(,)') else Money(*v[1:-1].split(','))

    try:
        oid = db.one("SELECT 'currency_amount'::regtype::oid")
        register_type(
            new_type((oid, ), 'currency_amount', cast_currency_amount))
    except (psycopg2.ProgrammingError, NeedDatabase):
        pass

    def adapt_money_basket(b):
        return AsIs("_wrap_amounts('%s'::jsonb)" % json.dumps(
            {k: str(v)
             for k, v in b.amounts.items() if v}).replace("'", "''"))

    register_adapter(MoneyBasket, adapt_money_basket)

    def cast_currency_basket(v, cursor):
        if v is None:
            return None
        parts = v[1:-1].split(',', 2)
        if len(parts) == 2:
            eur, usd = parts
            obj = None
        else:
            eur, usd, obj = parts
        if obj:
            amounts = json.loads(obj[1:-1].replace('""', '"') if obj[0] ==
                                 '"' else obj)
            amounts = {k: Decimal(str(v)) for k, v in amounts.items()}
        else:
            amounts = {}
            if eur:
                amounts['EUR'] = Decimal(eur)
            if usd:
                amounts['USD'] = Decimal(usd)
        return MoneyBasket(**amounts)

    try:
        oid = db.one("SELECT 'currency_basket'::regtype::oid")
        register_type(
            new_type((oid, ), 'currency_basket', cast_currency_basket))
    except (psycopg2.ProgrammingError, NeedDatabase):
        pass

    use_qc = not env.override_query_cache
    qc1 = QueryCache(db, threshold=(1 if use_qc else 0))
    qc5 = QueryCache(db, threshold=(5 if use_qc else 0))

    return {'db': db, 'db_qc1': qc1, 'db_qc5': qc5}
Exemplo n.º 46
0
            # objects.
            lower = adapt(r.lower).getquoted().decode('ascii')
        else:
            lower = ''

        if not r.upper_inf:
            upper = adapt(r.upper).getquoted().decode('ascii')
        else:
            upper = ''

        return ("'%s%s,%s%s'" %
                (r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')


# TODO: probably won't work with infs, nans and other tricky cases.
register_adapter(NumericRange, NumberRangeAdapter)

# Register globally typecasters and adapters for builtin range types.

# note: the adapter is registered more than once, but this is harmless.
int4range_caster = RangeCaster(NumberRangeAdapter,
                               NumericRange,
                               oid=3904,
                               subtype_oid=23,
                               array_oid=3905)
int4range_caster._register()

int8range_caster = RangeCaster(NumberRangeAdapter,
                               NumericRange,
                               oid=3926,
                               subtype_oid=20,
Exemplo n.º 47
0
    def _register(self, scope=None):
        register_type(self.typecaster, scope)
        if self.array_typecaster is not None:
            register_type(self.array_typecaster, scope)

        register_adapter(self.range, self.adapter)
Exemplo n.º 48
0
# -*- coding: utf-8 -*-
import psycopg2
from psycopg2.extensions import register_adapter
from huesound import config
from huesound.utils import render_template, render_json, expose, validate_url, url_for
from huesound import cube

register_adapter(cube.Cube, cube.adapt_cube)


def get_images(color, count):

    try:
        conn = psycopg2.connect(config.PG_CONNECT)
        cur = conn.cursor()
    except psycopg2.OperationalError as err:
        print "Cannot connect to database: %s" % err
        exit()

    red = int(color[0:2], 16)
    green = int(color[2:4], 16)
    blue = int(color[4:6], 16)

    query = '''SELECT album_key, icon_url, embed_url 
                     FROM color_rdio_cube 
                 ORDER BY cube_distance(color, %s) 
                    LIMIT %s'''
    data = (cube.Cube(red, green, blue), count)
    cur.execute(query, data)

    result = []
Exemplo n.º 49
0
# psycopg2 plumming to get automatic adaption
def adaptLabel(label):
    return AsIs("(%s, %s)::label" %
                (adapt(label.type_), adapt(label.labelValue())))


def adaptSecuritAttribute(label):
    return AsIs("(%s, %s)::security_attribute" %
                (adapt(label.type_), adapt(label.value)))


def adaptDatetime(dt):
    return AsIs("%s" % adapt(dt.isoformat()))


register_adapter(nsa.Label, adaptLabel)
register_adapter(nsa.SecurityAttribute, adaptSecuritAttribute)
register_adapter(datetime.datetime, adaptDatetime)


class LabelComposite(CompositeCaster):
    def make(self, values):
        return nsa.Label(*values)


class SecuritAttributeComposite(CompositeCaster):
    def make(self, values):
        return nsa.SecurityAttribute(*values)


def castDatetime(value, cur):
Exemplo n.º 50
0
                           backref=db.backref('detections', lazy='dynamic'))

    @property
    def url(self):
        return url_for("detect", id=self.id)

    def __repr__(self):
        return model_debug(self)


# Convenience routine simple repr implementation of models.
def model_debug(m):
    id = m.id
    c = dict.copy(m.__dict__)
    del c['_sa_instance_state']
    if 'id' in c.keys():
        del c['id']
    return type(m).__name__ + "#" + str(id) + ":" + str(c)


# This adapts (1-dimensional) numpy arrays to Postgres
# We should make it do n-dimensionals eventually.
from psycopg2.extensions import register_adapter, AsIs


def adapt_numpy(np):
    return AsIs(",".join([str(f) for f in np]))


register_adapter(np.ndarray, adapt_numpy)
Exemplo n.º 51
0
class dataParser(object):
    def addapt_numpy_array(self, numpy_array):
        return AsIs(tuple(numpy_array))

    register_adapter(np.ndarray, addapt_numpy_array)

    def start(self, filePath, file):
        tempDir = str(
            pathlib.Path().absolute()) + '/temp_uploaded_patient_data/'
        fileNameNoExtension = file._name.split('.')[0]
        dataParser.getData(self, fileNameNoExtension, tempDir)

    # TODO: Parse MIT sample data
    def getData(self, fileNameNoExtension, filePath):

        # build path to specific record
        tempFilePath = filePath + fileNameNoExtension
        df_columns = [
            "record_name", "n_sig", "fs", "counter_freq", "base_counter",
            "sig_len", "base_time", "base_date", "comments", "sig_name",
            "d_signal", "e_p_signal", "file_name", "fmt", "samps_per_frame",
            "skew", "byte_offset", "adc_gain", "baseline", "units", "adc_res",
            "adc_zero", "init_value", "checksum", "block_size",
            "has_annotations"
        ]

        formatted_patient_DF = pd.DataFrame(index=np.arange(1),
                                            columns=df_columns)

        # get specific record from MIT data set
        record = wfdb.rdrecord(tempFilePath)

        # get all attributes of record object and store in attributes
        attributes = vars(record)

        # initialize record value for later use
        recordValue = 0

        # will hold time series data for patient signal and mv
        p_signal_time = []
        signal_name = ""

        # iterate through attributes
        for attr in attributes:
            # example of selecting specific data segment from record
            # if attr == "checksum":
            if attr == "record_name":
                recordValue = getattr(record, attr)
                formatted_patient_DF['record_name'].values[0] = getattr(
                    record, attr)

            if attr == "n_sig":
                formatted_patient_DF['n_sig'].values[0] = getattr(record, attr)

            if attr == "fs":
                formatted_patient_DF['fs'].values[0] = getattr(record, attr)

            if attr == "counter_freq":
                formatted_patient_DF['counter_freq'].values[0] = getattr(
                    record, attr)

            if attr == "base_counter":
                formatted_patient_DF['base_counter'].values[0] = getattr(
                    record, attr)

            if attr == "sig_len":
                formatted_patient_DF['sig_len'].values[0] = getattr(
                    record, attr)

            if attr == "base_time":
                formatted_patient_DF['base_time'].values[0] = getattr(
                    record, attr)

            if attr == "base_date":
                formatted_patient_DF['base_date'].values[0] = getattr(
                    record, attr)

            if attr == "comments":
                formatted_patient_DF['comments'].values[0] = getattr(
                    record, attr)

            if attr == "sig_name":
                formatted_patient_DF['sig_name'].values[0] = getattr(
                    record, attr)
                signal_name = getattr(record, attr)

            if attr == "p_signal":
                p_signal_time = getattr(record, attr)

            if attr == "d_signal":
                formatted_patient_DF['d_signal'].values[0] = getattr(
                    record, attr)

            if attr == "e_p_signal":
                formatted_patient_DF['e_p_signal'].values[0] = getattr(
                    record, attr)

            if attr == "file_name":
                formatted_patient_DF['file_name'].values[0] = getattr(
                    record, attr)

            if attr == "fmt":
                formatted_patient_DF['fmt'].values[0] = getattr(record, attr)

            if attr == "samps_per_frame":
                formatted_patient_DF['samps_per_frame'].values[0] = getattr(
                    record, attr)

            if attr == "skew":
                formatted_patient_DF['skew'].values[0] = getattr(record, attr)

            if attr == "byte_offset":
                formatted_patient_DF['byte_offset'].values[0] = getattr(
                    record, attr)

            if attr == "adc_gain":
                formatted_patient_DF['adc_gain'].values[0] = getattr(
                    record, attr)

            if attr == "baseline":
                formatted_patient_DF['baseline'].values[0] = getattr(
                    record, attr)

            if attr == "units":
                formatted_patient_DF['units'].values[0] = getattr(record, attr)

            if attr == "adc_res":
                formatted_patient_DF['adc_res'].values[0] = getattr(
                    record, attr)

            if attr == "adc_zero":
                formatted_patient_DF['adc_zero'].values[0] = getattr(
                    record, attr)

            if attr == "init_value":
                formatted_patient_DF['init_value'].values[0] = getattr(
                    record, attr)

            if attr == "checksum":
                formatted_patient_DF['checksum'].values[0] = getattr(
                    record, attr)

            if attr == "block_size":
                formatted_patient_DF['block_size'].values[0] = getattr(
                    record, attr)

        timeDF = dataParser.extractTimeData(self, p_signal_time, signal_name,
                                            recordValue)
        dataParser.extractAnnotations(self, filePath, timeDF,
                                      fileNameNoExtension)
        dataParser.writePatientCSV(self, formatted_patient_DF, recordValue,
                                   'All_Attributes_', 0)
        dataParser.insertInDatabase(self, filePath, fileNameNoExtension)

    def insertInDatabase(self, filePath, recordNum):
        dbConn = psycopg2.connect(host="localhost",
                                  port="5432",
                                  dbname="patient_db",
                                  user="******",
                                  password="******")

        dbCursor = dbConn.cursor()

        attrFile = filePath + "All_Attributes_" + recordNum + ".csv"
        signalFile = filePath + "TimeData_" + recordNum + ".csv"
        patientAttributes = [
            "record_name", "n_sig", "fs", "counter_freq", "base_counter",
            "sig_len", "base_time", "base_date", "comments", "sig_name",
            "d_signal", "e_p_signal", "file_name", "fmt", "samps_per_frame",
            "skew", "byte_offset", "adc_gain", "baseline", "units", "adc_res",
            "adc_zero", "init_value", "checksum", "block_size",
            "has_annotations"
        ]

        signalAttributes = [
            "time", "mlii", "v5", "signal_record_name_id", "annotation"
        ]

        # try to open file containing patients attribute data
        try:
            attrFileData = open(attrFile, 'r')

        # check for error opening attribute file
        except psycopg2.Error as e:
            print("\nerror opening file %s: %s\n" % (attrFile, e))
            return

        # Insert attribute data into patientdb_patient table
        try:
            print(
                "\ninserting patient %s's attributes into patientdb_patient...............\n"
                % recordNum)
            dbCursor.copy_from(attrFileData,
                               "patientdb_patient",
                               columns=patientAttributes,
                               sep="|")
            dbConn.commit()

        # Check for errors inserting attribute data
        except psycopg2.Error as e:
            print("\nerror inserting into patientdb_patient: %s\n" % e)
            return

        # check for error opening signals file
        try:
            signalFileData = open(signalFile, 'r')

        # check for error opening attribute file
        except psycopg2.Error as e:
            print("\nerror opening file %s: %s\n" % (signalFile, e))
            return

        # Insert attribute data into patientdb_signals table
        try:
            print(
                "\ninserting patient %s's signal data into patientdb_signals...............\n"
                % recordNum)
            dbCursor.copy_from(signalFileData,
                               "patientdb_signals",
                               columns=signalAttributes,
                               sep=",")
            dbConn.commit()

        # Check for errors inserting signal data
        except psycopg2.Error as e:
            print("\nerror inserting into patientdb_signals inserting: %s\n" %
                  e)
            return

        # Close database connection
        dbCursor.close()
        dbConn.close()

    def extractAnnotations(self, filePath, timeDF, recordNumber):
        # build path to annotations
        newFilePath = filePath + recordNumber

        # get specific record from MIT data set
        ann = wfdb.rdann(newFilePath, 'atr')

        # get all attributes of record object and store in attributes
        attributes = vars(ann)

        dfCounter = 0
        annoCounter = 0
        lenAnno = len(attributes["sample"])

        while annoCounter < lenAnno:
            annoIndex = attributes["sample"][annoCounter]
            tempTime = 0.0027777777777777777777777777 * annoIndex
            annoTime = round(tempTime, 3)

            if annoTime == timeDF["time"][dfCounter]:
                # print("\nannoTime: %f\ttimeDF time: %f\n" % (annoTime, timeDF["time"][dfCounter]))
                timeDF["annotation"][dfCounter] = attributes["symbol"][
                    annoCounter]
                annoCounter += 1

            dfCounter += 1

        dataParser.writePatientCSV(self, timeDF, recordNumber, "TimeData_", 1)

    def extractTimeData(self, p_signal_time, signal_name, record_value):
        #df_columns = dataParser.buildTimeDFCol(self, signal_name)
        df_columns = [
            'time', 'mlii', 'v5', 'signal_record_name_id', 'annotation'
        ]
        formatted_pt_time_DF = pd.DataFrame(index=np.arange(650000),
                                            columns=df_columns)
        time = 0.0
        tempTime = 0.0
        count = 0

        for element in p_signal_time:
            formatted_pt_time_DF["signal_record_name_id"].values[
                count] = record_value
            formatted_pt_time_DF["time"].values[count] = time

            formatted_pt_time_DF[df_columns[1]].values[count] = element[0]

            formatted_pt_time_DF[df_columns[2]].values[count] = float(
                element[1])
            tempTime += 0.0027777777777777777777777777
            time = round(tempTime, 3)
            count += 1
        return formatted_pt_time_DF

    # will dynamically build list of columns for signals of patient record
    def buildTimeDFCol(self, signalNames):
        df_columns = ["time"]
        count = 1

        for element in signalNames:
            df_columns.append(element)
            count += 1

        df_columns.append("signal_record_name_id")
        df_columns.append("annotation")
        i = 0
        while i < len(df_columns):
            df_columns[i] = df_columns[i].lower()
            i += 1

        for x in df_columns:
            print('\nx: %s\n' % x)

        return df_columns

    def writePatientCSV(self, patientData_DF, rec_num, fileName,
                        signalOrPatient):

        # getting current working directory where patient data will be written
        tempDir = str(
            pathlib.Path().absolute()) + '/temp_uploaded_patient_data/'

        # filename
        ptFormattedFile = tempDir + fileName + str(rec_num) + ".csv"

        # when signalOrPatient is equal to 1, then it is the signal data frame so use comma as delimiter
        if signalOrPatient == 1:
            # write formatted patient data to new csv file
            patientData_DF.to_csv(ptFormattedFile,
                                  sep=',',
                                  encoding='utf-8',
                                  index=False,
                                  header=False,
                                  quoting=0)

        # when signalOrPatient is equal to 0, then it is the patient attributes data frame so use | as delimiter
        if signalOrPatient == 0:
            # write formatted signal data to new csv file
            patientData_DF.to_csv(ptFormattedFile,
                                  sep='|',
                                  encoding='utf-8',
                                  index=False,
                                  header=False,
                                  quoting=0)
Exemplo n.º 52
0
import numpy
from psycopg2.extensions import register_adapter, AsIs


def addapt_numpy_float32(numpy_float32):
    return AsIs(numpy_float32)


register_adapter(numpy.float32, addapt_numpy_float32)


def addapt_numpy_float64(numpy_float64):
    return AsIs(numpy_float64)


register_adapter(numpy.float64, addapt_numpy_float64)
Exemplo n.º 53
0
import datetime
import logging
from config_decider import config as settings
import psycopg2
from psycopg2.extensions import SQL_IN, register_adapter
from dashboard.exceptions import ShortRetryException

register_adapter(list, SQL_IN)

insert_article_sql = 'insert into article (article_identifier) values (%s) returning article_id'

#store_message_sql = 'insert into message(message_id,timestamp) values (%s, %s)'

store_message_sql = 'insert into message(message_id,timestamp) ' \
    'select %s, %s where not EXISTS ' \
    '(select message_id from message where message_id = %s)'

store_event_sql = 'insert into event (version, run, type, status, timestamp, message, article_id) ' \
                  'select %s, %s, %s, %s, %s, %s, %s where not exists ' \
                  '(select message_id from message where message_id = %s)'


store_property_sql = 'insert into property (property_type, name, int_value, date_value, text_value, article_id, ' \
                     ' version) select %s, %s, %s, %s, %s, %s, %s where not exists ' \
                     '(select message_id from message where message_id = %s)'

update_property_sql = 'update property set property_type=%s, int_value=%s, date_value=%s, text_value = %s, ' \
                      ' version=%s where property_id = %s'

property_id_sql = 'select property_id  from property where name=%s and article_id=%s and version=%s '
Exemplo n.º 54
0
from trac.util.compat import close_fds
from trac.util.html import Markup
from trac.util.text import empty, exception_to_unicode, to_unicode
from trac.util.translation import _

try:
    import psycopg2 as psycopg
    import psycopg2.extensions
    from psycopg2 import DataError, ProgrammingError
    from psycopg2.extensions import register_type, UNICODE, \
                                    register_adapter, AsIs, QuotedString
except ImportError:
    raise DistributionNotFound('psycopg2>=2.0 or psycopg2-binary', ['Trac'])
else:
    register_type(UNICODE)
    register_adapter(Markup, lambda markup: QuotedString(str(markup)))
    register_adapter(type(empty), lambda empty: AsIs("''"))
    psycopg2_version = get_pkginfo(psycopg).get('version', psycopg.__version__)
    _libpq_pathname = None
    if not hasattr(psycopg, 'libpq_version'):
        # search path of libpq only if it is dynamically linked
        _f = _match = None
        try:
            with open(psycopg._psycopg.__file__, 'rb') as _f:
                if os.name != 'nt':
                    _match = re.search(
                        r'''
                            \0(
                            (?:/[^/\0]+)*/?
                            libpq\.(?:so\.[0-9]+|[0-9]+\.dylib)
                            )\0
Exemplo n.º 55
0
    def __init__(self, field, items):
        self.field = field
        self.items = items
        super(_Array, self).__init__()


def adapt_array(arr):
    conn = arr.field.model_class._meta.database.get_conn()
    items = adapt(arr.items)
    items.prepare(conn)
    return AsIs(
        '%s::%s%s' %
        (items, arr.field.get_column_type(), '[]' * arr.field.dimensions))


register_adapter(_Array, adapt_array)


class IndexedFieldMixin(object):
    default_index_type = 'GiST'

    def __init__(self, index_type=None, *args, **kwargs):
        kwargs.setdefault('index', True)  # By default, use an index.
        super(IndexedFieldMixin, self).__init__(*args, **kwargs)
        self.index_type = index_type or self.default_index_type


class ArrayField(IndexedFieldMixin, Field):
    default_index_type = 'GIN'

    def __init__(self,
Exemplo n.º 56
0
        """Return a UUID object."""
        if isinstance(value, self._coerce_to) or not value:
            return value
        return self._coerce_to(value)

    @property
    def _auto_add_str(self):
        """Return a dot path, as a string, of the `_auto_add` callable.
        If `_auto_add` is a boolean, return it unchanged.
        """
        if isinstance(self._auto_add, bool):
            return self._auto_add
        return "%s:%s" % (self._auto_add.__module__, self._auto_add.__name__)


class UUIDAdapter(object):
    def __init__(self, value):
        if not isinstance(value, UUID):
            raise TypeError("UUIDAdapter only understands UUID objects.")
        self.value = value

    def getquoted(self):
        return ("'%s'" % self.value).encode("utf8")


if hasattr(models, "SubfieldBase"):
    UUIDField = six.add_metaclass(models.SubfieldBase)(UUIDField)

# Register the UUID type with psycopg2.
register_adapter(UUID, UUIDAdapter)
Exemplo n.º 57
0
    return Enum(map=dict([(arg, unicode(arg)) for arg in args]), **kwargs)


class _UUIDVariable(Variable):
    """an UUID column kind"""
    __slots__ = ()

    def parse_set(self, value, from_db):
        """parse the data"""
        if isinstance(value, str):
            value = uuid.UUID(value)
        elif not isinstance(value, uuid.UUID):
            raise TypeError("Expected UUID, found %r: %r"
                            % (type(value), value))
        return value


class StormUUID(SimpleProperty):
    """A property type for handling UUIDs in Storm.

    >>> class Foo(object):
    >>>   id = StormUUID(primary=True)
    """
    variable_class = _UUIDVariable


def adapt_uuid(uu):
    """what to do when an uuid is found"""
    return AsIs("'%s'" % str(uu))
register_adapter(uuid.UUID, adapt_uuid)
Exemplo n.º 58
0
# -*- coding: utf-8 -*-
import json
import logging
import os
import time
import psycopg2
import psycopg2.extras as pg_extras
import psycopg2.extensions as pg_extensions
import kafka
import kafka.errors

# Register jsonb extras to convert jsonb data to dict transparently
pg_extras.register_default_jsonb(globally=True)
pg_extensions.register_adapter(dict, psycopg2.extras.Json)

DB_DSN = os.getenv('DB_DSN', 'postgresql://*****:*****@localhost/meetup')
KAFKA_SERVER = os.getenv('KAFKA_SERVER', 'localhost:9092')
TOPIC = os.getenv('KAFKA_TOPIC', 'my_topic')

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s %(levelname)s [%(name)s] %(message)s',
)
logger = logging.getLogger(__name__)


def get_consumer():
    """Factory method to get KafkaConsumer instance with retries logic

    :return: KafkaConsumer instance
    """
Exemplo n.º 59
0
        return JSONAllKeyExist(Cast(column, 'jsonb'), keys)

    def json_contains(self, column, json):
        return JSONContains(Cast(column, 'jsonb'), Cast(json, 'jsonb'))


register_type(UNICODE)
if PYDATE:
    register_type(PYDATE)
if PYDATETIME:
    register_type(PYDATETIME)
if PYTIME:
    register_type(PYTIME)
if PYINTERVAL:
    register_type(PYINTERVAL)
register_adapter(float, lambda value: AsIs(repr(value)))
register_adapter(Decimal, lambda value: AsIs(str(value)))


def convert_json(value):
    from trytond.protocols.jsonrpc import JSONDecoder
    return json.loads(value, object_hook=JSONDecoder())


register_default_json(loads=convert_json)
register_default_jsonb(loads=convert_json)

if is_gevent_monkey_patched():
    from psycopg2.extensions import set_wait_callback
    from psycopg2.extras import wait_select
    set_wait_callback(wait_select)
Exemplo n.º 60
0
from .utils.currency import Balance
from psycopg2.extras import register_composite
from psycopg2.extensions import register_adapter, adapt, AsIs
from . import managers

MoneyValue = register_composite('money_value',
                                connection.cursor().cursor,
                                globally=True).type


def moneyvalue_adapter(value):
    return AsIs("(%s,%s)::money_value" %
                (adapt(value.amount), adapt(value.currency.code)))


register_adapter(Money, moneyvalue_adapter)


class MoneyValueField(models.Field):
    description = "wrapper for money_value composite type in postgres"

    def from_db_value(self, value, expression, connection):
        if value is None:
            return value
        return Money(value.amount, value.currency)

    def to_python(self, value):
        if isinstance(value, Money):
            return value
        if value is None:
            return value