def get_table(self, table=None, except_=True, as_cls=False, reflect=True, schema=None): schema = schema or self.config.get('schema') if table is None: table = self.config.get('table') if isinstance(table, Table): # this is already the table we're looking for... _table = table else: is_defined(table, 'table must be defined!') _table = self.meta_tables.get(table) if reflect: if _table is None and schema: # if we have a schema, try to load the full # Table class definition _table = self.autotable(schema=schema) if _table is None: # this provides us ONLY with the SQL definied Table, # which might not include custom Types, etc (JSONType, # UTCEpoch, etc) but rather only the underlying SQL # types (Text, Float, etc) would need to call autotable... self.meta_reflect() _table = self.meta_tables.get(table) except_ and is_true(isinstance(_table, Table), 'table (%s) not found! Got: %s' % (table, _table)) if isinstance(_table, Table) and as_cls: defaults = dict(__tablename__=table, autoload=True) _table = type(str(table), (self.Base,), defaults) elif _table is not None: pass else: _table = None return _table
def autotable(self, name=None, schema=None, objects=None, create=True, except_=False, **kwargs): name = name or self.config.get('table') schema = schema or self.config.get('schema') is_defined(name, 'table name must be defined') if name not in self.meta_tables: # load a sqla.Table into metadata so sessions act as expected # unless it's already there, of course. if schema is None: schema = self.autoschema(objects=objects, **kwargs) table = schema2table(name=name, schema=schema, Base=self.Base, type_map=self.type_map, exclude_keys=self.RESTRICTED_KEYS) try: if create and name not in self.db_tables: table.__table__.create() except Exception as e: logger.error('Create Table %s: FAIL (%s)' % (name, e)) if except_: raise else: logger.error('Failed to create table %s: %s' % (name, e)) # return back None, since we failed to load a Table table = None else: logger.error('Create Table %s: OK' % name) table = self.get_table(name, except_=except_) return table
def _sqlite_path(self): db = self.config.get('db') is_defined(db, "db can not be null!") cache_dir = self.config.get('cache_dir') suffix = '.sqlite' fname = '%s%s' % (db, suffix) return os.path.join(cache_dir, fname)
def db_columns(self, table=None): table = table or self.config.get('table') is_defined(table, 'table name required; got %s' % table) dsn = self.config.get('db_schema') result = self.inspector.get_columns(table, dsn) columns = sorted([r['name'] for r in result]) # return sorted([r[0] for r in result]) return columns
def proxy_init(self): is_defined(self.name, "name can not be null!") config = self.proxy_config # make sure we pass along the current schema definition config['schema'] = self.schema if self._proxy is None: self._proxy = self._proxy_cls # else: _proxy is a proxy_cls self._proxy = self._proxy(**config)
def user_disable(self, username, table=None): table = self.get_table(table) is_defined(username, 'username required') logger.info('Disabling existing user %s' % username) u = update('pg_database') # update pg_database set datallowconn = false # where datname = 'applogs'; sql = u.where( "datname = '%s'" % username).values({'datallowconn': 'false'}) return self.session_auto.execute(sql)
def user_disable(self, username, table=None): table = self.get_table(table) is_defined(username, 'username required') logger.info('Disabling existing user %s' % username) u = update('pg_database') # update pg_database set datallowconn = false # where datname = 'applogs'; sql = u.where("datname = '%s'" % username).values( {'datallowconn': 'false'}) return self.session_auto.execute(sql)
def get_last_field(self, field, table=None): '''Shortcut for querying to get the last field value for a given owner, cube. :param field: field name to query ''' field = field if is_array(field) else [field] table = self.get_table(table, except_=False) if table is None: last = None else: is_defined(field, 'field must be defined!') last = self.find(table=table, fields=field, scalar=True, sort=field, limit=1, descending=True, date='~', default_fields=False) logger.debug("last %s.%s: %s" % (table, list2str(field), last)) return last
def _index_default_name(self, columns, name=None): table = self.config.get('table') is_defined(table, 'table must be defined!') if name: ix = name elif isinstance(columns, basestring): ix = '%s_%s' % (table, columns) #ix = columns elif is_array(columns, except_=False): ix = '%s_%s' % (table, '_'.join(tuple(columns))) #ix = '_'.join(tuple(columns)) else: raise ValueError( "unable to get default name from columns: %s" % columns) # prefix ix_ to all index names ix = re.sub('^ix_', '', ix) ix = 'ix_%s' % ix return ix
def test_is_defined(): from metrique.utils import is_defined defined = [0, -1, 1, 0.1, False, True, 'h'] not_defined = ['', None, [], {}] for x in defined: true = is_defined(x, except_=False) print '%s is defined? %s' % (repr(x), true) assert true is True for x in not_defined: true = is_defined(x, except_=False) print '%s is defined? %s' % (repr(x), true) assert true is False try: true = is_defined(x, except_=True) except RuntimeError: pass else: assert False
def _index_default_name(self, columns, name=None): table = self.config.get('table') is_defined(table, 'table must be defined!') if name: ix = name elif isinstance(columns, basestring): ix = '%s_%s' % (table, columns) #ix = columns elif is_array(columns, except_=False): ix = '%s_%s' % (table, '_'.join(tuple(columns))) #ix = '_'.join(tuple(columns)) else: raise ValueError("unable to get default name from columns: %s" % columns) # prefix ix_ to all index names ix = re.sub('^ix_', '', ix) ix = 'ix_%s' % ix return ix
def get_engine_uri(db, host='127.0.0.1', port=5432, dialect='sqlite', driver=None, username=None, password=None, connect_args=None, cache_dir=None): cache_dir = cache_dir or CACHE_DIR is_defined(db, 'db can not be null') is_true(bool(dialect in [None, 'postgresql', 'sqlite', 'teiid']), 'invalid dialect: %s' % dialect) if dialect and driver: dialect = '%s+%s' % (dialect, driver) elif dialect: pass else: dialect = 'sqlite' dialect = dialect.replace('://', '') if dialect == 'sqlite': # db is expected to be an absolute path to where # sqlite db will be saved db = os.path.join(cache_dir, '%s.sqlite' % db) uri = '%s:///%s' % (dialect, db) else: if username and password: u_p = '%s:%s@' % (username, password) elif username: u_p = '%s@' % username else: u_p = '' uri = '%s://%s%s:%s/%s' % (dialect, u_p, host, port, db) if connect_args: args = ['%s=%s' % (k, v) for k, v in connect_args.iteritems()] args = '?%s' % '&'.join(args) uri += args _uri = re.sub(':[^:]+@', ':***@', uri) logger.info("Engine URI: %s" % _uri) return uri
def get_table(self, table=None, except_=True, as_cls=False, reflect=True, schema=None): schema = schema or self.config.get('schema') if table is None: table = self.config.get('table') if isinstance(table, Table): # this is already the table we're looking for... _table = table else: is_defined(table, 'table must be defined!') _table = self.meta_tables.get(table) if reflect: if _table is None and schema: # if we have a schema, try to load the full # Table class definition _table = self.autotable(schema=schema) if _table is None: # this provides us ONLY with the SQL definied Table, # which might not include custom Types, etc (JSONType, # UTCEpoch, etc) but rather only the underlying SQL # types (Text, Float, etc) would need to call autotable... self.meta_reflect() _table = self.meta_tables.get(table) except_ and is_true(isinstance(_table, Table), 'table (%s) not found! Got: %s' % (table, _table)) if isinstance(_table, Table) and as_cls: defaults = dict(__tablename__=table, autoload=True) _table = type(str(table), (self.Base, ), defaults) elif _table is not None: pass else: _table = None return _table
def schema2table(name, schema, Base=None, type_map=None, exclude_keys=None): is_defined(name, "table name must be defined!") is_defined(schema, "schema must be defined!") logger.debug('Reusing existing Base (%s)' % Base) if Base else None Base = Base or declarative_base() schema = copy(schema) type_map = copy(type_map or TYPE_MAP) logger.debug("Attempting to create Table class: %s..." % name) logger.debug(" ... Schema: %s" % schema) logger.debug(" ... Type Map: %s" % type_map) def __repr__(s): return '%s(%s)' % ( s.__tablename__, ', '.join(['%s=%s' % (k, v) for k, v in s.__dict__.iteritems() if k != '_sa_instance_state'])) # always exclude the following 'system' keys, as they are # hard coded and should remain consistent across containers exclude_keys = list(exclude_keys or []) exclude_keys.extend(['id', '_id', '_hash', '_start', '_end', '_v', '__v__', '_e']) exclude_keys = sorted(set(exclude_keys)) defaults = { '__tablename__': name, '__table_args__': ({'extend_existing': True}), 'id': Column('id', Integer, primary_key=True), '_id': Column(CoerceUTF8, nullable=False, unique=True, index=True), '_oid': Column(BigInteger, nullable=False, index=True, unique=False), '_hash': Column(CoerceUTF8, nullable=False, index=True), '_start': Column(type_map[datetime], index=True, nullable=False), '_end': Column(type_map[datetime], index=True), '_v': Column(Integer, default=0, nullable=False), '__v__': Column(CoerceUTF8, default=__version__, nullable=False), '_e': Column(type_map[dict]), '__repr__': __repr__, } for k, v in schema.items(): if k in exclude_keys: warnings.warn( 'restricted schema key detected %s; ignoring!' % k) continue __type = v.get('type') if __type is None: __type = type(None) _type = type_map.get(__type) if v.get('container', False): _list_type = type_map[list] if _list_type is pg.ARRAY: _list_type = _list_type(_type) defaults[k] = Column(_list_type) elif k == '_oid': # in case _oid is defined in the schema, # make sure we index it and it's unique defaults[k] = Column(_type, nullable=False, index=True, unique=False) else: defaults[k] = Column(_type, name=k) logger.debug(" ... Table Schema Final: %s" % defaults) _table = type(str(name), (Base,), defaults) return _table
def __init__(self, db=None, table=None, debug=None, config=None, dialect=None, driver=None, host=None, port=None, username=None, password=None, connect_args=None, batch_size=None, cache_dir=None, db_schema=None, log_file=None, log_dir=None, log2file=None, log2stdout=None, log_format=None, schema=None, retries=None, **kwargs): ''' Accept additional kwargs, but ignore them. ''' is_true(HAS_SQLALCHEMY, '`pip install sqlalchemy` required') # use copy of class default value self.RESERVED_USERNAMES = copy(SQLAlchemyProxy.RESERVED_USERNAMES) self.type_map = copy(SQLAlchemyProxy.type_map) # default _start, _end is epoch timestamp options = dict( batch_size=batch_size, cache_dir=cache_dir, connect_args=connect_args, db=db, db_schema=db_schema, default_fields=None, debug=debug, dialect=dialect, driver=driver, host=host, log_dir=log_dir, log_file=log_file, log_format=log_format, log2file=log2file, log2stdout=log2stdout, password=password, port=None, retries=retries, schema=schema, table=table, username=username) defaults = dict( batch_size=999, cache_dir=CACHE_DIR, connect_args=None, db=None, db_schema=None, default_fields={'_start': 1, '_end': 1, '_oid': 1}, debug=logging.INFO, dialect='sqlite', driver=None, host='127.0.0.1', log_file='metrique.log', log_dir=LOG_DIR, log_format=None, log2file=True, log2stdout=False, password=None, port=5432, retries=1, schema=None, table=None, username=getuser()) self.config = copy(config or self.config or {}) # FIXME: config expected to come from caller as kwarg or defaults # will be used. This is because loading from file causes problems # at the moment such as when container is loaded, it tries to # load top-level 'proxy' key from config_file, which is incorrect, # since that config key is meant for the data source proxy rather # than container proxy. self.config = configure(options, defaults, section_only=True, update=self.config) # db is required; default db is db username else local username self.config['db'] = self.config['db'] or self.config['username'] is_defined(self.config.get('db'), 'db can not be null') # setup sqlalchemy logging; redirect to metrique logger self._debug_setup_sqlalchemy_logging() if not self._object_cls: from metrique.core_api import metrique_object self._object_cls = metrique_object
def __init__(self, db=None, table=None, debug=None, config=None, dialect=None, driver=None, host=None, port=None, username=None, password=None, connect_args=None, batch_size=None, cache_dir=None, db_schema=None, log_file=None, log_dir=None, log2file=None, log2stdout=None, log_format=None, schema=None, retries=None, **kwargs): ''' Accept additional kwargs, but ignore them. ''' is_true(HAS_SQLALCHEMY, '`pip install sqlalchemy` required') # use copy of class default value self.RESERVED_USERNAMES = copy(SQLAlchemyProxy.RESERVED_USERNAMES) self.type_map = copy(SQLAlchemyProxy.type_map) # default _start, _end is epoch timestamp options = dict(batch_size=batch_size, cache_dir=cache_dir, connect_args=connect_args, db=db, db_schema=db_schema, default_fields=None, debug=debug, dialect=dialect, driver=driver, host=host, log_dir=log_dir, log_file=log_file, log_format=log_format, log2file=log2file, log2stdout=log2stdout, password=password, port=None, retries=retries, schema=schema, table=table, username=username) defaults = dict(batch_size=999, cache_dir=CACHE_DIR, connect_args=None, db=None, db_schema=None, default_fields={ '_start': 1, '_end': 1, '_oid': 1 }, debug=logging.INFO, dialect='sqlite', driver=None, host='127.0.0.1', log_file='metrique.log', log_dir=LOG_DIR, log_format=None, log2file=True, log2stdout=False, password=None, port=5432, retries=1, schema=None, table=None, username=getuser()) self.config = copy(config or self.config or {}) # FIXME: config expected to come from caller as kwarg or defaults # will be used. This is because loading from file causes problems # at the moment such as when container is loaded, it tries to # load top-level 'proxy' key from config_file, which is incorrect, # since that config key is meant for the data source proxy rather # than container proxy. self.config = configure(options, defaults, section_only=True, update=self.config) # db is required; default db is db username else local username self.config['db'] = self.config['db'] or self.config['username'] is_defined(self.config.get('db'), 'db can not be null') # setup sqlalchemy logging; redirect to metrique logger self._debug_setup_sqlalchemy_logging() if not self._object_cls: from metrique.core_api import metrique_object self._object_cls = metrique_object
def schema2table(name, schema, Base=None, type_map=None, exclude_keys=None): is_defined(name, "table name must be defined!") is_defined(schema, "schema must be defined!") logger.debug('Reusing existing Base (%s)' % Base) if Base else None Base = Base or declarative_base() schema = copy(schema) type_map = copy(type_map or TYPE_MAP) logger.debug("Attempting to create Table class: %s..." % name) logger.debug(" ... Schema: %s" % schema) logger.debug(" ... Type Map: %s" % type_map) def __repr__(s): return '%s(%s)' % (s.__tablename__, ', '.join([ '%s=%s' % (k, v) for k, v in s.__dict__.iteritems() if k != '_sa_instance_state' ])) # always exclude the following 'system' keys, as they are # hard coded and should remain consistent across containers exclude_keys = list(exclude_keys or []) exclude_keys.extend( ['id', '_id', '_hash', '_start', '_end', '_v', '__v__', '_e']) exclude_keys = sorted(set(exclude_keys)) defaults = { '__tablename__': name, '__table_args__': ({ 'extend_existing': True }), 'id': Column('id', Integer, primary_key=True), '_id': Column(CoerceUTF8, nullable=False, unique=True, index=True), '_oid': Column(BigInteger, nullable=False, index=True, unique=False), '_hash': Column(CoerceUTF8, nullable=False, index=True), '_start': Column(type_map[datetime], index=True, nullable=False), '_end': Column(type_map[datetime], index=True), '_v': Column(Integer, default=0, nullable=False), '__v__': Column(CoerceUTF8, default=__version__, nullable=False), '_e': Column(type_map[dict]), '__repr__': __repr__, } for k, v in schema.items(): if k in exclude_keys: warnings.warn('restricted schema key detected %s; ignoring!' % k) continue __type = v.get('type') if __type is None: __type = type(None) _type = type_map.get(__type) if v.get('container', False): _list_type = type_map[list] if _list_type is pg.ARRAY: _list_type = _list_type(_type) defaults[k] = Column(_list_type) elif k == '_oid': # in case _oid is defined in the schema, # make sure we index it and it's unique defaults[k] = Column(_type, nullable=False, index=True, unique=False) else: defaults[k] = Column(_type, name=k) logger.debug(" ... Table Schema Final: %s" % defaults) _table = type(str(name), (Base, ), defaults) return _table