def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ app = create_app() url = None with app.app_context(): url = db.get_engine() context.configure( url=url, target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, ) with context.begin_transaction(): context.run_migrations()
def fetch_source_reliability(api_client): def extractor(item): mapping = {'reliable': True, 'unreliable': False} i_id = item['entity_id'] i_val = item['value']['value'] return i_id, mapping[i_val] iterable = annotations_iterable( api_client, annotation_category='label', annotation_type='Source reliability (binary)', method='Expert-based source reliability evaluation', extractor=extractor) with get_engine().begin() as engine: for i, id_val in enumerate(iterable): print(f'[source-relability] batch #{i}') id, val = id_val update_query = update(Source) \ .where(Source.id == id) \ .values(is_reliable=val) engine.execute(update_query)
def fetch_all_sources(api_client): for i, batch in enumerate(sources_iterator(api_client)): print(f'[fetch-all-sources] batch #{i}') batch = map(map_source, batch) with get_engine().begin() as engine: engine.execute(Source.upsert_query(), [source.__dict__ for source in batch])
def get_database_session(): directory = click.get_app_dir("mydeploy") os.makedirs(directory, exist_ok=True) cfg = os.path.join(directory, 'database.db') (engine, sessionmaker) = get_engine(cfg) Base.metadata.create_all(engine) click.echo(cfg) return sessionmaker()
def main(): engine = db.get_engine() print "Creating database..." db.create(engine) init_data(engine) print "Successfully set up."
async def load_sample_data(filename): """ Loads sample data to db from sql queries :param filename string e.g. 'data.sql' """ sql = (settings.SQL_DIR / filename).read_text() async with get_engine() as engine: async with engine.acquire() as conn: await conn.execute(sql)
def get_session(self, bind='{{cookiecutter.app_name}}', autocommit=True): """ 使用了scoped_session 默认情况下,创建的session都是Thread-Local Scope :param bind: :return: """ engine = db.get_engine(app=db.get_app(), bind=bind) session_factory = sessionmaker(bind=engine, autocommit=autocommit) _session = scoped_session(session_factory) self.session = _session() return self.session
def main(): tornado.options.parse_command_line() engine = db.get_engine() initialize_sessionmaker(engine=engine) http_server = tornado.httpserver.HTTPServer(Application()) register_shutdown_handler(http_server, engine) if not DEBUG: http_server.bind(options.port) http_server.start(4) tornado.ioloop.IOLoop.current().start() else: http_server.listen(options.port) tornado.ioloop.IOLoop.instance().start()
def get_acc_id(user, region): engine = db.get_engine() acc_df = pd.read_sql(sql="SELECT t.*, CTID FROM public.lol_summoners t", con=engine) if user.lower() not in [name.lower() for name in list(acc_df["name"])]: db.update_user(user=user, region=region) acc_df = pd.read_sql( sql="SELECT t.*, CTID FROM public.lol_summoners t WHERE name='%s'" % user, con=engine, ) return acc_df["accountId"][0]
def fetch_article_veracity(api_client): def extractor(item): article_id = item['entity_id'] claims = item['value']['claims'] veracity = item['value']['value'] return ArticleVeracity(article_id=article_id, veracity=veracity, claims=claims) iterable = annotations_iterable(api_client, annotation_category='prediction', annotation_type='Article veracity', extractor=extractor, flatten=False) for i, batch in enumerate(iterable): print(f'[article-veracity] batch #{i}') with get_engine().begin() as engine: engine.execute(ArticleVeracity.upsert_query(), [av.__dict__ for av in batch])
def fetch_new_articles(api_client, last_id, max_count): from db import Author, Source iter = new_articles_iterator(api_client=api_client, last_id=last_id, max_count=max_count) for i, batch in enumerate(iter): print(f'batch_no={i}') batch = [map_article(a) for a in batch] with get_engine().begin() as engine: engine.execute( insert(Source).on_conflict_do_nothing(), [ art.source.__dict__ for art in batch if art.source is not None ]) engine.execute(Author.upsert_query(), [ art.author.__dict__ for art in batch if art.author is not None ]) _save_all(batch, merge=True)
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ def process_revision_directives(context, revision, directives): if config.cmd_opts.autogenerate: script = directives[0] if script.upgrade_ops.is_empty(): directives[:] = [] print("No Changes detected") connectable = None # Instantiate application context in order to load all the configs needed # without static variables app = create_app() with app.app_context(): connectable = db.get_engine() if not connectable: # If there is an error in getting the engine from the configs - Fallback connectable = engine_from_config( config.get_section(config.config_ini_section), prefix="sqlalchemy.", poolclass=pool.NullPool, ) with connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata, process_revision_directives=process_revision_directives ) with context.begin_transaction(): context.run_migrations()
from models import Album, Artist from db import get_engine, session_scope, Session if __name__ == "__main__": engine = get_engine(echo=True) with session_scope(engine) as session: print("Pierwsze dwa albumy:") for album in session.query(Album).limit(2): print(f"{album.artist.name}: {album.title}") artist = session.query(Artist).first() print(f"Albumy artysty {artist.name}:") for album in artist.albums: print(f"- {album.title}")
# from sqlalchemy import Column, ForeignKey from sqlalchemy.orm import sessionmaker, relationship from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects.mysql import BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, \ DATE, DATETIME, DECIMAL, DECIMAL, DOUBLE, ENUM, FLOAT, INTEGER, LONGBLOB, \ LONGTEXT, MEDIUMBLOB, MEDIUMINT, MEDIUMTEXT, NCHAR, NUMERIC, NVARCHAR, \ REAL, SET, SMALLINT, TEXT, TIME, TIMESTAMP, TINYBLOB, TINYINT, TINYTEXT, \ VARBINARY, VARCHAR, YEAR from apimethods.utils import get_uuid_string_from_bytes, get_ip_str_from_bytes import db Base = declarative_base(bind=db.get_engine(database='alienvault_siem')) class Ac_Acid_Event(Base): __tablename__ = 'ac_acid_event' cnt = Column('cnt', INTEGER, primary_key=False) ctx = Column('ctx', BINARY(16), primary_key=True, index=True) src_net = Column('src_net', BINARY(16), primary_key=True, index=True) day = Column('day', DATETIME, primary_key=True, index=True) dst_host = Column('dst_host', BINARY(16), primary_key=True, index=True) dst_net = Column('dst_net', BINARY(16), primary_key=True, index=True) plugin_id = Column('plugin_id', INTEGER, primary_key=True, autoincrement=False, index=True)
# from sqlalchemy import Column, ForeignKey from sqlalchemy.orm import sessionmaker, relationship from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects.mysql import BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, \ DATE, DATETIME, DECIMAL, DECIMAL, DOUBLE, ENUM, FLOAT, INTEGER, LONGBLOB, \ LONGTEXT, MEDIUMBLOB, MEDIUMINT, MEDIUMTEXT, NCHAR, NUMERIC, NVARCHAR, \ REAL, SET, SMALLINT, TEXT, TIME, TIMESTAMP, TINYBLOB, TINYINT, TINYTEXT, \ VARBINARY, VARCHAR, YEAR from apimethods.utils import get_uuid_string_from_bytes, get_ip_str_from_bytes import db Base_siem = declarative_base(bind=db.get_engine(database='alienvault_siem')) class Ac_Acid_Event (Base_siem): __tablename__ = 'ac_acid_event' cnt = Column('cnt', INTEGER, primary_key=False) ctx = Column('ctx', BINARY(16), primary_key=True, index=True) src_net = Column('src_net', BINARY(16), primary_key=True, index=True) day = Column('day', DATETIME, primary_key=True, index=True) dst_host = Column('dst_host', BINARY(16), primary_key=True, index=True) dst_net = Column('dst_net', BINARY(16), primary_key=True, index=True) plugin_id = Column('plugin_id', INTEGER, primary_key=True, autoincrement=False, index=True) device_id = Column('device_id', INTEGER, primary_key=True, autoincrement=False, index=True) plugin_sid = Column('plugin_sid', INTEGER, primary_key=True, autoincrement=False, index=True) src_host = Column('src_host', BINARY(16), primary_key=True, index=True) #
from sqlalchemy import Table, Column, ForeignKey from sqlalchemy.sql import func from sqlalchemy.orm import sessionmaker, relationship from sqlalchemy.orm.collections import mapped_collection from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects.mysql import BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, \ DATE, DATETIME, DECIMAL, DECIMAL, DOUBLE, ENUM, FLOAT, INTEGER, LONGBLOB, \ LONGTEXT, MEDIUMBLOB, MEDIUMINT, MEDIUMTEXT, NCHAR, NUMERIC, NVARCHAR, \ REAL, SET, SMALLINT, TEXT, TIME, TIMESTAMP, TINYBLOB, TINYINT, TINYTEXT, \ VARBINARY, VARCHAR, YEAR from apimethods.utils import get_uuid_string_from_bytes,get_ip_str_from_bytes import db Base = declarative_base(bind=db.get_engine(database='alienvault_api')) class Celery_Job (Base): __tablename__='celery_job' info = Column('info',BLOB,primary_key=False) last_modified = Column('last_modified',TIMESTAMP,primary_key=False) id = Column('id',BINARY(16),primary_key=True) # # Relations: # @property def serialize(self): return { 'info':self.info, 'last_modified':self.last_modified, 'id':str(uuid.UUID(bytes=self.id)) if self.id else '',
from typing import Any, Generator import pytest from sqlalchemy.orm import sessionmaker from fastapi import FastAPI from app import app as test_app from db import Base, get_engine engine = get_engine(uri='sqlite:///:memory:') Session = sessionmaker(autocommit=False, autoflush=False, bind=engine) @pytest.fixture(autouse=True) def app() -> Generator[FastAPI, Any, None]: Base.metadata.create_all(bind=engine) _app = test_app _app.debug = True yield _app Base.metadata.drop_all(engine) @pytest.fixture def db_session(app: FastAPI) -> Generator[Session, Any, None]: connection = engine.connect() session = Session(bind=connection) yield session session.close()
help='The port number the server uses') return parser.parse_args(args) if __name__ == '__main__': import sys, logging from pyamf.remoting.gateway.wsgi import WSGIGateway from wsgiref import simple_server logging.basicConfig( level=logging.DEBUG, format='%(asctime)s %(levelname)-5.5s [%(name)s] %(message)s') options = parse_args(sys.argv[1:])[0] service = {'service': SoftwareService(db.get_engine())} host = options.host port = int(options.port) gw = WSGIGateway(service, debug=True, logger=logging) httpd = simple_server.WSGIServer( (host, port), simple_server.WSGIRequestHandler, ) httpd.set_app(gw) logging.info('Started RecordSet example server on http://%s:%s' % (host, str(port)))
parser = OptionParser() parser.add_option('--host', dest='host', default='localhost', help='The host address for the AMF gateway') parser.add_option('-p', '--port', dest='port', default=8000, help='The port number the server uses') return parser.parse_args(args) if __name__ == '__main__': import sys from pyamf.remoting.gateway.wsgi import WSGIGateway from wsgiref import simple_server options = parse_args(sys.argv[1:])[0] service = {'service': SoftwareService(db.get_engine())} host = options.host port = int(options.port) gw = WSGIGateway(service) httpd = simple_server.WSGIServer( (host, port), simple_server.WSGIRequestHandler, ) httpd.set_app(gw) print 'Started RecordSet example server on http://%s:%s' % (host, str(port) )
from models import Playlist from db import Session, session_scope, get_engine if __name__ == "__main__": engine = get_engine() with session_scope(engine) as session: playlist = session.query(Playlist).first() print(f"Playlist {playlist.name}") for track in playlist.tracks: print(" - ", track.name)
import db import model from model import Users from sqlalchemy.orm import sessionmaker if __name__ == '__main__': engine = db.get_engine() model.Base.metadata.create_all(engine) Session = sessionmaker(bind=engine) session = Session() # 创建新User对象: new_user = Users(id='6', name='Bob') # 添加到session: session.add(new_user) # 提交即保存到数据库: session.commit() # 关闭session: session.close()
def __init__(self, policies_yml, db_url=""): if db_url: self.db_engine = db.get_engine(db_url) else: self.db_engine = db.get_engine(db.get_db_url_from_env()) self.policy_queue = policyqueue.PolicyQueue(policies_yml)
def get_session(): if not hasattr(g, 'db'): engine = db.get_engine(app.config) g.db = db.get_session(engine) return g.db
# import uuid from sqlalchemy import Column, ForeignKey from sqlalchemy.orm import sessionmaker, relationship, deferred from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects.mysql import BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, \ DATE, DATETIME, DECIMAL, DECIMAL, DOUBLE, ENUM, FLOAT, INTEGER, LONGBLOB, \ LONGTEXT, MEDIUMBLOB, MEDIUMINT, MEDIUMTEXT, NCHAR, NUMERIC, NVARCHAR, \ REAL, SET, SMALLINT, TEXT, TIME, TIMESTAMP, TINYBLOB, TINYINT, TINYTEXT, \ VARBINARY, VARCHAR, YEAR import db Base = declarative_base(bind=db.get_engine('avcenter')) class Current_Local (Base): __tablename__='current_local' hostname = Column('hostname',VARCHAR(50),primary_key=False) database_ossim = Column('database_ossim',VARCHAR(50),primary_key=False) ocs_db = Column('ocs_db',VARCHAR(50),primary_key=False) ha_password = deferred(Column('ha_password',VARCHAR(50),primary_key=False)) ntp_server = Column('ntp_server',VARCHAR(50),primary_key=False) admin_ip = Column('admin_ip',VARCHAR(15),primary_key=False) netflow = Column('netflow',VARCHAR(50),primary_key=False) ha_heartbeat_comm = deferred(Column('ha_heartbeat_comm',VARCHAR(50),primary_key=False)) upgrade = Column('upgrade',VARCHAR(50),primary_key=False) uuid = Column('uuid',VARCHAR(36),primary_key=False) server_license = Column('server_license',VARCHAR(50),primary_key=False)