def test_connection_add(connection):
    password = "******"
    statement = Insert(User).values(name='John', fullname="John Doe", password=password)
    connection.execute(statement)
    assert len(xray_recorder.current_segment().subsegments) == 1
    sql_meta = xray_recorder.current_segment().subsegments[0].sql
    assert sql_meta['sanitized_query'].startswith('INSERT INTO users')
    assert sql_meta['url'] == 'sqlite:///:memory:'
    assert password not in sql_meta['sanitized_query']
Esempio n. 2
0
from sqlalchemy.orm.session import Session
from sqlalchemy.pool import Pool
from sqlalchemy.sql import func, select
from sqlalchemy.sql.expression import Insert


@compiles(Insert, 'mysql')
def on_duplicate(insert, compiler, **kw):
    """Custom MySQL insert on_duplicate support."""
    stmt = compiler.visit_insert(insert, **kw)
    my_var = insert.dialect_kwargs.get('mysql_on_duplicate', None)
    if my_var:
        stmt += ' ON DUPLICATE KEY UPDATE %s' % my_var
    return stmt

Insert.argument_for('mysql', 'on_duplicate', None)


def configure_db(uri, _db=None):
    """
    Configure and return a :class:`~ichnaea.db.Database` instance.

    :param _db: Test-only hook to provide a pre-configured db.
    """
    if _db is not None:
        return _db
    return Database(uri)


# the request db_ro_session and db_tween_factory are inspired by
# pyramid_tm to provide lazy session creation, session closure and
Esempio n. 3
0
from sqlalchemy.dialects.postgresql import insert
from db.utils import hashstring


metadata = MetaData()

@compiles(Insert, 'sqlite')
def compile_replace(insert, compiler, **kw):

    stmt = compiler.sql_compiler.visit_insert(insert)
    if not insert.kwargs['sqlite_replace']:
        return stmt
    else:
        return re.sub(r'^INSERT', 'INSERT OR REPLACE', stmt)

Insert.argument_for("sqlite", "replace", None)

hash_body = lambda context:hashstring(context.current_parameters['body'])

history = Table(
    'history'
    , metadata
    , Column('history_id', String(32), primary_key=True, nullable=False, default=hash_body)
    , Column('body', String(100), nullable=False)
)

authors = Table(
    'authors'
    , metadata
    , Column('author_id', Integer(), primary_key=True)
    , Column('email', String(100), nullable=False, unique=True)
Esempio n. 4
0
@compiles(Insert)
def _append_string(insert: Insert, compiler: SQLCompiler,
                   **kwargs: Any) -> str:
    """append a string to insert"""
    append_string: str = compiler.visit_insert(insert, **kwargs)
    if insert.kwargs['postgresql_append_string']:
        if append_string.rfind("RETURNING") == -1:
            return "%s %s" % (append_string,
                              insert.kwargs['postgresql_append_string'])
        return append_string.replace(
            "RETURNING",
            " " + insert.kwargs['postgresql_append_string'] + " RETURNING ")
    return append_string


Insert.argument_for("postgresql", "append_string", None)


def insert_or_update(session: Session,
                     entry: declarative_base,
                     column: str,
                     update_fields: Optional[Set[str]] = None,
                     exclude_fields: Optional[Set[str]] = None,
                     flush: bool = False) -> ResultProxy:
    # pylint: disable=too-many-arguments
    """postgresql specific insert or update logic"""
    if exclude_fields is None:
        exclude_fields = set()

    model_dict = _get_model_dict(entry)
    if update_fields is None:
Esempio n. 5
0
cursors.Cursor.execute = execute
cursors.Cursor.executemany = executemany
cursors.Cursor._do_execute_many = _do_execute_many


@compiles(Insert, "mysql")
def on_duplicate(insert, compiler, **kw):
    """Custom MySQL insert on_duplicate support."""
    stmt = compiler.visit_insert(insert, **kw)
    my_var = insert.dialect_kwargs.get("mysql_on_duplicate", None)
    if my_var:
        stmt += " ON DUPLICATE KEY UPDATE %s" % my_var
    return stmt


Insert.argument_for("mysql", "on_duplicate", None)


def configure_db(uri, _db=None):
    """
    Configure and return a :class:`~ichnaea.db.Database` instance.

    :param _db: Test-only hook to provide a pre-configured db.
    """
    if _db is not None:
        return _db
    return Database(uri)


# the request db_sessions and db_tween_factory are inspired by pyramid_tm
# to provide lazy session creation, session closure and automatic
Esempio n. 6
0
@inject('mysql_url')
def sql_engine(mysql_url):
  from sqlalchemy import create_engine
  return create_engine(mysql_url)

@inject('sql_engine')
def sql_meta_data(sql_engine):
  from sqlalchemy import MetaData
  return MetaData(sql_engine)

@inject('mongo_url')
def mongo_client(mongo_url):
  from pymongo import MongoClient
  return MongoClient(mongo_url)

@inject('mongo_client')
def mongo_db(mongo_client):
  return mongo_client['stock_dev']

from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import Insert

@compiles(Insert, 'mysql')
def replace_into(insert, compiler, **kw):
  s = compiler.visit_insert(insert, **kw)
  s = s.replace("INSERT INTO", "REPLACE INTO")
  return s

Insert.argument_for("mysql", "replace_into", None)
Esempio n. 7
0
BLOCK_STORAGE_DIR = nuus.app.config.get('BLOCK_STORAGE_DIR')
BLOCK_FILE_REGEX = re.compile(nuus.app.config.get('BLOCK_FILE_REGEX'))

with open('patterns.txt') as f:
    PATTERNS = [re.compile(p, re.I | re.U) for p in [x for x in [x.strip() for x in f.readlines()] if not (x == '' or x.startswith('#'))]]

@compiles(Insert, 'mysql')
def suffix_insert(insert, compiler, **kw):
    stmt = compiler.visit_insert(insert, **kw)
    if 'mysql_on_duplicate_key_update_cols' in insert.dialect_kwargs:
        my_var = insert.kwargs['mysql_on_duplicate_key_update_cols']
        if my_var is not None:
            stmt += ' ON DUPLICATE KEY UPDATE %s=%s' % (my_var, my_var)
    return stmt

Insert.argument_for("mysql", "on_duplicate_key_update_cols", None)

def dump_unmatched():
    lines_per_file = 1000000
    lines = 0
    utf8_w = codecs.getwriter('UTF-8')
    def gen_dumpfile():
        fcnt = 0
        while True:
            yield gzip.open('unmatched.%s.dump' % fcnt, 'w')
            #yield utf8_w(f)
            fcnt += 1
    dumpfile = gen_dumpfile()
    ouf = next(dumpfile)
    for fn in os.listdir(os.path.join(BLOCK_STORAGE_DIR, 'complete')):
        m = BLOCK_FILE_REGEX.match(fn)
Esempio n. 8
0
def getSingletonEntity(selectStatement: Select,
                       insertStatement: Insert,
                       dbEngine: sqlalchemy.engine.Engine,
                       allowParallel: bool = False,
                       _lock: bool = False):
    '''
        Get a singleton entity using the given selectStatement or insertStatement.

        :param selectStatement: A select statement to execute, used to check whether
                                the element exists and query the element if
                                `statement.returned_defaults` is False.
        :type selectStatement: :class:`sqlalchemy.sql.expression.select`
        :param insertStatement: An insert statement to execute to fill the database
                                with the missing values.
        :type insertStatement: :class:`sqlalchemy.sql.expression.insert`
        :param allowParallel: If we allow to insert without a lock. Can produce
                                :class:`sqlalchemy.exc.IntegrityError` on duplicate key.
        :type allowParallel: bool
        :param _lock: If we have to set up a lock when querying. Note that if this value
                        is set allowParallel is ignored and lock is always used.
        :type _lock: bool
        :param dbEngine: A SQLAlchemy database engine.
        :type dbEngine: :class:`sqlalchemy.engine.Engine`
    '''

    insertStatement.return_defaults()

    with dbEngine.connect() as dbConnection:
        if _lock:
            with _globalLock:

                entity = dbConnection.execute(selectStatement).first()

                if not entity:
                    stmt = dbConnection.execute(insertStatement)

                    if stmt.returned_defaults:
                        return dict(stmt.returned_defaults)
                    else:
                        return dict(
                            dbConnection.execute(selectStatement).first())
                else:
                    return dict(entity)
        else:
            entity = dbConnection.execute(selectStatement).first()

            if not entity:
                if allowParallel:
                    stmt = dbConnection.execute(insertStatement)

                    if stmt.returned_defaults:
                        return dict(stmt.returned_defaults)
                    else:
                        return dict(
                            dbConnection.execute(selectStatement).first())
                else:
                    # Entity not found. We have to insert and return it (inside a lock to avoid duplicate keys)
                    return getSingletonEntity(selectStatement,
                                              insertStatement,
                                              dbEngine,
                                              allowParallel=False,
                                              _lock=True)
            else:
                return dict(entity)
Esempio n. 9
0
File: db.py Progetto: ingle/ichnaea
from sqlalchemy.pool import Pool
from sqlalchemy.sql import func, select
from sqlalchemy.sql.expression import Insert


@compiles(Insert, 'mysql')
def on_duplicate(insert, compiler, **kw):
    """Custom MySQL insert on_duplicate support."""
    stmt = compiler.visit_insert(insert, **kw)
    my_var = insert.dialect_kwargs.get('mysql_on_duplicate', None)
    if my_var:
        stmt += ' ON DUPLICATE KEY UPDATE %s' % my_var
    return stmt


Insert.argument_for('mysql', 'on_duplicate', None)


def configure_db(uri, _db=None):
    """
    Configure and return a :class:`~ichnaea.db.Database` instance.

    :param _db: Test-only hook to provide a pre-configured db.
    """
    if _db is not None:
        return _db
    return Database(uri)


# the request db_ro_session and db_tween_factory are inspired by
# pyramid_tm to provide lazy session creation, session closure and
Esempio n. 10
0
from ..models import Novel, Chapter, Article
import redis
import traceback
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql.expression import Insert


@compiles(Insert)
def append_string(insert, compiler, **kw):
    s = compiler.visit_insert(insert, **kw)
    if 'append_string' in insert.kwargs:
        return s + " " + insert.kwargs['append_string']
    return s


Insert.argument_for("mysql", "append_string", None)

main = Blueprint('main', __name__)
rdb = None


@main.errorhandler(404)
def page_not_found(error):
    if request.accept_mimetypes.accept_json and \
            not request.accept_mimetypes.accept_html:
        response = jsonify({'error': 'not found'})
        response.status_code = 404
        return response
    return render_template('404.html'), 404

Esempio n. 11
0
At the time of creation attributes with ottermatics validators will be created as columns in the component table, where as only numeric key value pairs will be added to the components Vertical Attribute Mapping table (VAM)

'''

log  = logging.getLogger('otterlib-report')


#ignore duplicates
@compiles(Insert, 'postgresql')
def ignore_duplicates(insert, compiler, **kw):
    s = compiler.visit_insert(insert, **kw)
    ignore = insert.kwargs.get('postgresql_ignore_duplicates', False)
    return s if not ignore else s + ' ON CONFLICT DO NOTHING'

Insert.argument_for('postgresql', 'ignore_duplicates', None)

#TODO: make new sqalchemy base, dont mixin other bases into our special fancy db

#These Mixins & Base Table Classes Help Us By Defining Common Bases For tables

class MappedDictMixin:
    """Adds obj[key] access to a mapped class.

    This class basically proxies dictionary access to an attribute
    called ``_proxied``.  The class which inherits this class
    should have an attribute called ``_proxied`` which points to a dictionary.

    """

    def __len__(self):
Esempio n. 12
0
class GtfsZip(object):
    def __init__(self, filename):
        self.filename = filename
        self._zipfile = zipfile.ZipFile(filename)

    def rows(self, item):
        name = "{}.txt".format(item)
        return csv.DictReader(self._zipfile.open(name))


@compiles(Insert, 'mysql')
def replace(insert, compiler, **kw):
    s = compiler.visit_insert(insert, **kw)
    if insert.kwargs.get('mysql_replace'):
        s = s.replace('INSERT', 'REPLACE')
    return s


Insert.argument_for('mysql', 'replace', False)


def etl_directory(path, session):
    for filename in filter(lambda s: s.endswith('.zip'), listdir(path)):
        logging.info("Reading GTFS from %s.", filename)
        archive = GtfsZip(os.path.join(path, filename))
        logging.info("Saving created objects...")
        for name, cls in model_map:
            for row in ifilter(None, archive.rows(name)):
                stmt = (cls.__table__.insert(mysql_replace=True))
                session.execute(stmt, row)
Esempio n. 13
0
from sqlalchemy.util import safe_reraise

from dq.config import Config
from dq.errors import ModelError
from dq.funcutil import safe_invoke
from dq.logging import error

logger = logging.getLogger(__name__)

engine = create_engine(
    Config.get('sql.url'),
    pool_recycle=600,
    pool_pre_ping=True,
)
if Config.get('sql.flavor') == 'mysql':
    Insert.argument_for('mysql', 'replace_insert', None)


def session_maker():
    """Generate a scoped session. In general, use the Session global variable.

    :returns Session: A new SQLAlchemy session.
    """
    return scoped_session(sessionmaker(bind=engine))


Session = session_maker()


@compiles(Insert, 'mysql')
def replace_insert(insert, compiler, **kw):