from sqlalchemy.dialects.postgresql import insert from db.utils import hashstring metadata = MetaData() @compiles(Insert, 'sqlite') def compile_replace(insert, compiler, **kw): stmt = compiler.sql_compiler.visit_insert(insert) if not insert.kwargs['sqlite_replace']: return stmt else: return re.sub(r'^INSERT', 'INSERT OR REPLACE', stmt) Insert.argument_for("sqlite", "replace", None) hash_body = lambda context:hashstring(context.current_parameters['body']) history = Table( 'history' , metadata , Column('history_id', String(32), primary_key=True, nullable=False, default=hash_body) , Column('body', String(100), nullable=False) ) authors = Table( 'authors' , metadata , Column('author_id', Integer(), primary_key=True) , Column('email', String(100), nullable=False, unique=True)
from sqlalchemy.orm.session import Session from sqlalchemy.pool import Pool from sqlalchemy.sql import func, select from sqlalchemy.sql.expression import Insert @compiles(Insert, 'mysql') def on_duplicate(insert, compiler, **kw): """Custom MySQL insert on_duplicate support.""" stmt = compiler.visit_insert(insert, **kw) my_var = insert.dialect_kwargs.get('mysql_on_duplicate', None) if my_var: stmt += ' ON DUPLICATE KEY UPDATE %s' % my_var return stmt Insert.argument_for('mysql', 'on_duplicate', None) def configure_db(uri, _db=None): """ Configure and return a :class:`~ichnaea.db.Database` instance. :param _db: Test-only hook to provide a pre-configured db. """ if _db is not None: return _db return Database(uri) # the request db_ro_session and db_tween_factory are inspired by # pyramid_tm to provide lazy session creation, session closure and
cursors.Cursor.execute = execute cursors.Cursor.executemany = executemany cursors.Cursor._do_execute_many = _do_execute_many @compiles(Insert, "mysql") def on_duplicate(insert, compiler, **kw): """Custom MySQL insert on_duplicate support.""" stmt = compiler.visit_insert(insert, **kw) my_var = insert.dialect_kwargs.get("mysql_on_duplicate", None) if my_var: stmt += " ON DUPLICATE KEY UPDATE %s" % my_var return stmt Insert.argument_for("mysql", "on_duplicate", None) def configure_db(uri, _db=None): """ Configure and return a :class:`~ichnaea.db.Database` instance. :param _db: Test-only hook to provide a pre-configured db. """ if _db is not None: return _db return Database(uri) # the request db_sessions and db_tween_factory are inspired by pyramid_tm # to provide lazy session creation, session closure and automatic
@compiles(Insert) def _append_string(insert: Insert, compiler: SQLCompiler, **kwargs: Any) -> str: """append a string to insert""" append_string: str = compiler.visit_insert(insert, **kwargs) if insert.kwargs['postgresql_append_string']: if append_string.rfind("RETURNING") == -1: return "%s %s" % (append_string, insert.kwargs['postgresql_append_string']) return append_string.replace( "RETURNING", " " + insert.kwargs['postgresql_append_string'] + " RETURNING ") return append_string Insert.argument_for("postgresql", "append_string", None) def insert_or_update(session: Session, entry: declarative_base, column: str, update_fields: Optional[Set[str]] = None, exclude_fields: Optional[Set[str]] = None, flush: bool = False) -> ResultProxy: # pylint: disable=too-many-arguments """postgresql specific insert or update logic""" if exclude_fields is None: exclude_fields = set() model_dict = _get_model_dict(entry) if update_fields is None:
BLOCK_STORAGE_DIR = nuus.app.config.get('BLOCK_STORAGE_DIR') BLOCK_FILE_REGEX = re.compile(nuus.app.config.get('BLOCK_FILE_REGEX')) with open('patterns.txt') as f: PATTERNS = [re.compile(p, re.I | re.U) for p in [x for x in [x.strip() for x in f.readlines()] if not (x == '' or x.startswith('#'))]] @compiles(Insert, 'mysql') def suffix_insert(insert, compiler, **kw): stmt = compiler.visit_insert(insert, **kw) if 'mysql_on_duplicate_key_update_cols' in insert.dialect_kwargs: my_var = insert.kwargs['mysql_on_duplicate_key_update_cols'] if my_var is not None: stmt += ' ON DUPLICATE KEY UPDATE %s=%s' % (my_var, my_var) return stmt Insert.argument_for("mysql", "on_duplicate_key_update_cols", None) def dump_unmatched(): lines_per_file = 1000000 lines = 0 utf8_w = codecs.getwriter('UTF-8') def gen_dumpfile(): fcnt = 0 while True: yield gzip.open('unmatched.%s.dump' % fcnt, 'w') #yield utf8_w(f) fcnt += 1 dumpfile = gen_dumpfile() ouf = next(dumpfile) for fn in os.listdir(os.path.join(BLOCK_STORAGE_DIR, 'complete')): m = BLOCK_FILE_REGEX.match(fn)
@inject('mysql_url') def sql_engine(mysql_url): from sqlalchemy import create_engine return create_engine(mysql_url) @inject('sql_engine') def sql_meta_data(sql_engine): from sqlalchemy import MetaData return MetaData(sql_engine) @inject('mongo_url') def mongo_client(mongo_url): from pymongo import MongoClient return MongoClient(mongo_url) @inject('mongo_client') def mongo_db(mongo_client): return mongo_client['stock_dev'] from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.expression import Insert @compiles(Insert, 'mysql') def replace_into(insert, compiler, **kw): s = compiler.visit_insert(insert, **kw) s = s.replace("INSERT INTO", "REPLACE INTO") return s Insert.argument_for("mysql", "replace_into", None)
from sqlalchemy.pool import Pool from sqlalchemy.sql import func, select from sqlalchemy.sql.expression import Insert @compiles(Insert, 'mysql') def on_duplicate(insert, compiler, **kw): """Custom MySQL insert on_duplicate support.""" stmt = compiler.visit_insert(insert, **kw) my_var = insert.dialect_kwargs.get('mysql_on_duplicate', None) if my_var: stmt += ' ON DUPLICATE KEY UPDATE %s' % my_var return stmt Insert.argument_for('mysql', 'on_duplicate', None) def configure_db(uri, _db=None): """ Configure and return a :class:`~ichnaea.db.Database` instance. :param _db: Test-only hook to provide a pre-configured db. """ if _db is not None: return _db return Database(uri) # the request db_ro_session and db_tween_factory are inspired by # pyramid_tm to provide lazy session creation, session closure and
from ..models import Novel, Chapter, Article import redis import traceback from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.expression import Insert @compiles(Insert) def append_string(insert, compiler, **kw): s = compiler.visit_insert(insert, **kw) if 'append_string' in insert.kwargs: return s + " " + insert.kwargs['append_string'] return s Insert.argument_for("mysql", "append_string", None) main = Blueprint('main', __name__) rdb = None @main.errorhandler(404) def page_not_found(error): if request.accept_mimetypes.accept_json and \ not request.accept_mimetypes.accept_html: response = jsonify({'error': 'not found'}) response.status_code = 404 return response return render_template('404.html'), 404
At the time of creation attributes with ottermatics validators will be created as columns in the component table, where as only numeric key value pairs will be added to the components Vertical Attribute Mapping table (VAM) ''' log = logging.getLogger('otterlib-report') #ignore duplicates @compiles(Insert, 'postgresql') def ignore_duplicates(insert, compiler, **kw): s = compiler.visit_insert(insert, **kw) ignore = insert.kwargs.get('postgresql_ignore_duplicates', False) return s if not ignore else s + ' ON CONFLICT DO NOTHING' Insert.argument_for('postgresql', 'ignore_duplicates', None) #TODO: make new sqalchemy base, dont mixin other bases into our special fancy db #These Mixins & Base Table Classes Help Us By Defining Common Bases For tables class MappedDictMixin: """Adds obj[key] access to a mapped class. This class basically proxies dictionary access to an attribute called ``_proxied``. The class which inherits this class should have an attribute called ``_proxied`` which points to a dictionary. """ def __len__(self):
class GtfsZip(object): def __init__(self, filename): self.filename = filename self._zipfile = zipfile.ZipFile(filename) def rows(self, item): name = "{}.txt".format(item) return csv.DictReader(self._zipfile.open(name)) @compiles(Insert, 'mysql') def replace(insert, compiler, **kw): s = compiler.visit_insert(insert, **kw) if insert.kwargs.get('mysql_replace'): s = s.replace('INSERT', 'REPLACE') return s Insert.argument_for('mysql', 'replace', False) def etl_directory(path, session): for filename in filter(lambda s: s.endswith('.zip'), listdir(path)): logging.info("Reading GTFS from %s.", filename) archive = GtfsZip(os.path.join(path, filename)) logging.info("Saving created objects...") for name, cls in model_map: for row in ifilter(None, archive.rows(name)): stmt = (cls.__table__.insert(mysql_replace=True)) session.execute(stmt, row)
from sqlalchemy.util import safe_reraise from dq.config import Config from dq.errors import ModelError from dq.funcutil import safe_invoke from dq.logging import error logger = logging.getLogger(__name__) engine = create_engine( Config.get('sql.url'), pool_recycle=600, pool_pre_ping=True, ) if Config.get('sql.flavor') == 'mysql': Insert.argument_for('mysql', 'replace_insert', None) def session_maker(): """Generate a scoped session. In general, use the Session global variable. :returns Session: A new SQLAlchemy session. """ return scoped_session(sessionmaker(bind=engine)) Session = session_maker() @compiles(Insert, 'mysql') def replace_insert(insert, compiler, **kw):