def get_db() -> Database: '''Return database object for queries''' if settings.TESTING: if settings.TEST_DB_URL is None: # pragma: no cover logger.critical('Test database not set') sys.exit(1) database = Database(settings.TEST_DB_URL) else: # pragma: no cover database = Database(settings.DB_URL) return database
def _create_database(self, db_name): """ Manages database creation. Connects to MySQL via Database class from records library. Creates a new MySQL local database. """ connection = Database(f'mysql+pymysql://{MYSQL_ID}:\ {MYSQL_PW}@localhost/?charset=utf8') connection.query(f'CREATE DATABASE {db_name} CHARACTER SET "utf8"')
def database_connection(): """ Manages connection to the database. Opens 'db_name.txt' to retrieve db_name. Connects to MySQL local database via records.Database Returns 'database' object containing connection with database. """ with open('db_name.txt', "r") as f: db_name = f.read() database = Database(f'''mysql+pymysql://{MYSQL_ID}:{MYSQL_PW}@localhost/\ {db_name}?charset=utf8''') return database
def cli(): cli_docs = """Box Exporter: Take that data and put it in a box. Usage: boxex <filepath> <filename> [--url=<url>] boxex (-h | --help) Options: -h --help Show this screen --url=<url> The database URL to use. Defaults to $DATABASE_URL Notes: - While you may specify a database connection string with --url, box-exporter will automatically default to the value of $DATABASE_URL, if available. - filepath is intended to be the path of a SQL file. - All box credentials are set via environmental variables. Make sure you have the following environment variables set or a KeyError will occur: $BOX_CLIENT_ID $BOX_CLIENT_SECRET $BOX_ENTERPRISE_ID $BOX_RSA_PRIVATE_KEY_PASS $BOX_RSA_PRIVATE_KEY_PATH $BOX_JWT_KEY_ID $BOX_FOLDER_ID """ arguments = docopt(cli_docs) # Create the database object db = Database(arguments['--url']) # Authenticate the box client client = BoxClient() queryfile = arguments['<filepath>'] filename = arguments['<filename>'] # Execute the query, if it is found. if os.path.isfile(queryfile): rows = db.query_file(queryfile).all() if rows: # grab the first row and use keys as fieldnames fieldnames = rows[0].as_dict().keys() client.upload(to_csv(fieldnames, rows), filename) else: print('There was no query file that was found')
def __init__(self): self.configuration = Configuration() self.database = Database(self.configuration.database.url) self.game_repository: GameRepository = MySQLGameRepository( self.database) self.create_game_command_handler = CreateGameCommandHandler( GameCreator(self.game_repository)) self.search_game_query_handler = SearchGameQueryHandler( self.game_repository) self.query_bus: QueryBus = SimpleQueryBus( {SearchGameQuery.__name__: self.search_game_query_handler}) self.guess_repository: GuessRepository = MySQLGuessRepository( self.database) self.create_guess_command_handler = \ CreateGuessCommandHandler(GuessCreator( self.guess_repository, self.query_bus))
def db_connection(db_config: dict): connection = Database(db_config['public-transport-stops']) yield connection connection.close()
import os import responder from records import Database DATABASE_URL = os.environ["DATABASE_URL"] db = Database() api = responder.API() def migrate(db): db.query_file("../contacts.sql") @api.route("/") async def greet_world(req, resp): contacts = db.query("SELECT * FROM contacts") resp.text = api.template("index.html", contacts=contacts) if __name__ == "__main__": migrate(db=db) api.run()
from records import Database from json import load reviews = load(open('./reviews.json')) db = Database('postgres:///pitchfork-reviews') for review in reviews: album = review['album'] album_id = db.query_file('./insert_album.sql', **album).first().id db.query_file("insert_review.sql", album_id=album_id)
from urllib.parse import quote_plus from records import Database from common.config import DATABASE def _build_database_url( database=DATABASE, server='.\SQLEXPRESS', driver='SQL Server', ): params = quote_plus( f'DRIVER={driver};SERVER={server};DATABASE={database};') database_url = f'mssql+pyodbc:///?odbc_connect={params}' return database_url database_url = _build_database_url() db = Database(database_url)
file_name = '__'.join(row['github'].split('/')) return os.path.exists('data/readmes/' + file_name) def path(github): file_name = '__'.join(github.split('/')) return 'data/readmes/' + file_name def has_file(row): return os.path.exists(path(row.github)) if not os.path.isfile('data/data.pkl'): db = Database(os.environ['DATABASE']) rows = db.query('select github, stars, time_alive from libs') filtered = filter(has_file, rows) data = { 'y': [], 'x1': [], 'x2': [], } for lib in filtered: data['y'].append(lib.stars) data['x1'].append(parse_readme(lib.github)) data['x2'].append(int(lib.time_alive / 1000))
import os from records import Database from flask import Flask app = Flask(__name__) db = Database(os.environ["DATABASE_URL"]) from app import routes
def __str__(self): return "<Record {}>".format(self.as_dict()) Connection.iquery = iquery Database.iquery = i2query Record.__str__ = __str__ Record.__repr__ = __str__ if __name__ == '__main__': import os import psutil url = "your-url-engine" current_memoery_use = lambda: psutil.Process(os.getpid()).memory_info( ).rss / 1024 / 1024 print(current_memoery_use()) database = Database(db_url=url) r = database.iquery("select * from your-schema.your-table", batches=100) print(current_memoery_use()) for obj in r: # 批式计算 # do_something_partial_by_batch(...) pass print(current_memoery_use())
def __init__(self): cfg = load_config() self._db = Database("{}://{}:{}@{}/{}".format( cfg['sql_type'], cfg['sql_user'], cfg['sql_pass'], cfg['sql_host'], cfg['sql_db']))