def main(): dbc = DBController() dec = Decoder([TLEDecoder(), TLEListDecoder()]) dlc = None try: dlc = Downloader() except DownloaderError as e: print("failed to initialize downloader: " + str(e)) sys.exit(1) for esat in dlc.get_data(): sats = [] try: sats = dec.decode(esat.fmt, esat.data) except DecoderError as e: print("failed to decode: " + str(e)) try: for sat in sats: dbc.add(sat) dbc.sync() except DBError as e: print("failed to insert into db: " + str(e))
def start(update, context): "/start command" context.user_data["database"] = DBController(config["database"]["path"]) bot_info = str(context.bot.get_me()) message = f"Select an action" update.message.reply_text(message, reply_markup=reply_markup) return SELECTING_ACTION
def main(): parser = OptionParser() logger_options(parser) parser.add_option("--skip-connection-check", dest='skip_connection_check', default=False, action="store_true", help="Don't check open connections.") parser.add_option( "--kill-connections", dest='kill_connections', default=False, action="store_true", help="Kill non-system connections instead of reporting an error.") parser.add_option('--pgbouncer', dest='pgbouncer', default='host=localhost port=6432 user=pgbouncer', metavar='CONN_STR', help="libpq connection string to administer pgbouncer") parser.add_option('--dbname', dest='dbname', default='launchpad_prod', metavar='DBNAME', help='Database name we are updating.') parser.add_option('--dbuser', dest='dbuser', default='postgres', metavar='USERNAME', help='Connect as USERNAME to databases') (options, args) = parser.parse_args() if args: parser.error("Too many arguments") if options.kill_connections and options.skip_connection_check: parser.error( "--skip-connection-check conflicts with --kill-connections") log = logger(options) controller = DBController(log, options.pgbouncer, options.dbname, options.dbuser) if options.kill_connections: preflight_check = KillConnectionsPreflight(log, controller) elif options.skip_connection_check: preflight_check = NoConnectionCheckPreflight(log, controller) else: preflight_check = DatabasePreflight(log, controller) if preflight_check.check_all(): log.info('Preflight check succeeded. Good to go.') return 0 else: log.error('Preflight check failed.') return 1
def main(): parser = OptionParser() parser.add_option('--pgbouncer', dest='pgbouncer', default='host=localhost port=6432 user=pgbouncer', metavar='CONN_STR', help="libpq connection string to administer pgbouncer") parser.add_option('--dbname', dest='dbname', default='launchpad_prod', metavar='DBNAME', help='Database name we are updating.') parser.add_option('--dbuser', dest='dbuser', default='postgres', metavar='USERNAME', help='Connect as USERNAME to databases') logger_options(parser, milliseconds=True) (options, args) = parser.parse_args() if args: parser.error("Too many arguments") # In case we are connected as a non-standard superuser, ensure we # don't kill our own connections. SYSTEM_USERS.add(options.dbuser) log = logger(options) controller = DBController(log, options.pgbouncer, options.dbname, options.dbuser) try: # Master connection, not running in autocommit to allow us to # rollback changes on failure. master_con = psycopg2.connect(str(controller.master)) except Exception, x: log.fatal("Unable to open connection to master db (%s)", str(x)) return 94
def setUp(self): self.dbController = DBController("db_name", "db_user", "db_pass", "db_host", 5432) self.mock_conn = MockConnection() self.dbController.conn = self.mock_conn self.mock_cursor = self.dbController.conn.cursor()
class TestDBController(unittest.TestCase): """ Test cases to test class DBController. """ def setUp(self): self.dbController = DBController("db_name", "db_user", "db_pass", "db_host", 5432) self.mock_conn = MockConnection() self.dbController.conn = self.mock_conn self.mock_cursor = self.dbController.conn.cursor() def test_insert_author(self): """ Test insert author. """ author = create_author(123) self.dbController.insert_author(author) self.mock_cursor.execute.assert_called_with( "INSERT INTO authors (author_id, author_name, aliases) VALUES (%s, %s, %s)", (123, 'frank guo', 'F. G| S.G|Shiqiang Guo')) def test_insert_paper(self): """ Test insert paper. """ paper = create_paper(123) self.dbController.insert_paper(paper) self.mock_cursor.execute.assert_called_with( "INSERT INTO papers (title, author_ids, abstract, published_year, venue) VALUES (%s, %s, %s, %s, %s) RETURNING paper_id", ("title of paper 123", "1234|5678", "abstract", 2008, "venue")) def test_getAuthorInfo_happycase(self): """ Test get author info in happy case. """ get_author_result = [(123, "frank guo", "F. G| S.G|Shiqiang Guo")] get_papers_result = [(1000, "title of paper 1000", "1234|5678", "abstract", 2008, "venue"), (1001, "title of paper 1001", "1234|5678", "abstract", 2008, "venue")] self.mock_cursor.fetchall.side_effect = [ get_author_result, get_papers_result ] author = self.dbController.getAuthorInfo(123) self.assertEqual(author.author_id, 123) self.assertEqual(len(author.papers), 2) self.assertEqual(author.papers[0].paper_id, 1000) self.assertEqual(author.papers[1].paper_id, 1001) def test_getAuthorInfo_not_found(self): """ Test get author info when author is not is database. """ self.mock_cursor.fetchall.side_effect = [None] author = self.dbController.getAuthorInfo(123) self.assertTrue(author is None) def test_getAuthorInfo_with_error(self): """ Test get author info when when meets DB error. """ self.mock_cursor.fetchall.side_effect = psycopg2.DataError() author = self.dbController.getAuthorInfo(123) self.mock_cursor.execute.assert_called_with("ROLLBACK") self.assertTrue(author is None) @patch('psycopg2.connect') def test_connect(self, connect_call): """ Test coneect to database. """ connect_call.return_value = self.mock_conn self.dbController.connect_to_db() connect_call.assert_called_with(database="db_name", user="******", password="******", host="db_host", port=5432) self.assertTrue(self.dbController.conn is not None) self.assertEqual(self.dbController.conn, self.mock_conn)
import pickle import redis CACHE_TTL_SECONDS = 60 app = Flask(__name__) api = Api(app) # Get DB configurations. DB_HOST = 'db' DB_USER = os.environ['POSTGRES_USER'] DB_PASS = os.environ['POSTGRES_PASSWORD'] DB_NAME = os.environ['POSTGRES_DB'] DB_PORT = '5432' dbController = DBController(DB_NAME, DB_USER, DB_PASS, DB_HOST, DB_PORT) cache = redis.Redis(host='paper_cache', port=6379) """ Define how to render Author instance. """ author_list_fields = { 'id': fields.Integer(attribute='author_id'), 'name': fields.String(attribute='author_name'), 'aliases': fields.List(fields.String) } """ Define how to render Paper instance. """ paper_list_fields = { 'paper_id': fields.Integer, 'title': fields.String,
import os import csv from domain_classes import Author, Paper from dbcontroller import DBController AUTHORS_CSV_FILE = 'authors.csv' PAPERS_CSV_FILE = 'papers.csv' author_ids_set = set() DB_HOST = 'db' DB_USER = os.environ['POSTGRES_USER'] DB_PASS = os.environ['POSTGRES_PASSWORD'] DB_NAME = os.environ['POSTGRES_DB'] DB_PORT = '5432' dbController = DBController(DB_NAME, DB_USER, DB_PASS, DB_HOST, DB_PORT) dbController.connect_to_db() def backfill_authors(): """ Backfill authors into author table. """ with open(AUTHORS_CSV_FILE) as csv_file: csv_reader = csv.reader(csv_file, quotechar='"', delimiter=',', quoting=csv.QUOTE_ALL, skipinitialspace=True) line_count = 0 for row in csv_reader:
today = datetime.date.today().strftime('%Y-%m-%d') current_year = today.split('-')[0] parser = argparse.ArgumentParser() parser.add_argument('-t', '--team', default='NYY') parser.add_argument('-d', '--date', default=today) args = parser.parse_args() year = args.date.split('-')[0] # print("Gathering game previews...") # scrape.game_previews() # scrape.espn_preview_text(args.date, args.team) # Create database controller object dbc = DBController() # print("Gathering game previews...") # scrape.game_previews() # Query upcomming game and populate data game = dbc.get_team_game_preview(team=args.team, date=args.date) game_data = extract_game_data(game) if game_data: home = game_data['home'] away = game_data['away'] state = game_data['preview'][0]['gameData']['status']['detailedState'] else: raise ValueError("NO GAME FOUND")
import sqlite3 import argparse from dbcontroller import DBController from settings import DB_FILENAME from models import Admin if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("id") args = parser.parse_args() new_id = args.id db = DBController(DB_FILENAME) db.add_admin(Admin(new_id)) print("OK! New admin succefully added.")
from aiogram.utils import executor from dbcontroller import DBController from updater import update from custom_filters import HasArg, IsAdmin from statistics import Statistics from models import User, Admin from cfparser import parse_problems_count, check from aiogram.dispatcher.filters import IsReplyFilter import asyncio import settings import logging bot = Bot(settings.API_TOKEN) dp = Dispatcher(bot) db = DBController(settings.DB_FILENAME) logging.basicConfig(level=logging.INFO) @dp.message_handler(commands=['start', 'help']) async def hello(message): msg_text = "Hello, I am challnge statistics bot!\n \ Which commands I know ?\n\n \ /start or /help -> use this commands to get this help\n \ /list -> use this command to get Participants list\n \ /stat -> to get statistics\n\n \ Bot created by @WinDuz\n \ You can contribute to project\ <a href='https://github.com/ilyas-kalandar/challengestatistics'> \ here</a>"
def boot_database(self,myid=""): if DB.dbcontroller is None: DB.dbcontroller = DBController(myid)
from tqdm import tqdm import requests import argparse import re import json import datetime import pickle import inspect import pandas as pd from io import StringIO from dbcontroller import DBController from utils import open_url, convert_name, find_missing_dates, parse_types # No push methods in DBController, so do it manually for now dbc = DBController() db = dbc._db def fangraphs(state, year): """ Scrape data from fangraphs.com """ tid = 0 # Scrape all teams for now, add individual teams later if needed url = """http://www.fangraphs.com/leaders.aspx?pos=all&stats={0}\ &lg=all&qual=0&type=8&season={1}\ &month=0&season1={1}\ &ind=0&team={2}&page=1_1000"""\ .format(state, year, tid)\ .replace(' ', '')