def create_schema(db_name=None): database.create_db(db_name) conn = database.connect(db_name) with conn as cur: Package.create_schema(cur) Object.create_schema(cur) Symbol.create_schema(cur) conn.close()
def do_work(pkg, ctx): t1 = datetime.datetime.now() if ctx[0] is None: ctx[0] = database.connect(args.db_name) conn = ctx[0] find_interposes(pkg, conn, args.verbose) t2 = datetime.datetime.now() time = (t2 - t1).total_seconds() return Stats(time)
from lib.database import create_tables, connect create_tables(connect())
import discord from lib.common import parse_config from lib import database from bot.util import parse_message from bot.commands import execute_command from bot.trigger_timer import main_loop intents = discord.Intents.all() client = discord.Client(intents=intents) config = parse_config('discord') db_connection = database.connect() @client.event async def on_ready(): print('------------------') print(f'bot ready {client.user.name}') print('------------------') await main_loop(client, db_connection) @client.event async def on_message(message): p_message = parse_message(message.content) if p_message is not None: await execute_command(client, message, p_message, db_connection) client.run(config['token'])
def main(): parser = argparse.ArgumentParser( description= "Analyze contents of Debian binary packages and store them to database." ) parser.add_argument('--verbose', '-v', action='count', help="Print diagnostic info.", default=0) parser.add_argument('--db-name', help="Database name.", default='syms') parser.add_argument('-j', dest='num_threads', help="Number of threads.", type=int, default=2) parser.add_argument('--stats', dest='stats', help="Print statistics before exit.", default=False, action='store_true') parser.add_argument('--no-stats', dest='stats', help="Do not print statistics before exit.", action='store_false') parser.add_argument('--allow-errors', dest='allow_errors', help="Process packages which had errors.", default=False, action='store_true') parser.add_argument('--no-allow-errors', dest='allow_errors', help="Do not process packages which had errors.", action='store_false') parser.add_argument( 'pkgs', metavar='PKGS', nargs='*', help="Optional list of packages to analyze (default is to analyze all)." ) parser.set_defaults(stats=True) args = parser.parse_args() set_prog_name(os.path.basename(__file__)) conn = database.connect(args.db_name) with conn as cur: Package.create_indices(cur) Object.create_indices(cur) Symbol.create_indices(cur) if not args.pkgs: with conn as cur: pkgs = Package.deserialize_all(cur) else: pkgs = [] for pkg_name in args.pkgs: with conn as cur: pkgs.append(Package.deserialize(cur, pkg_name)) conn.close() if not args.allow_errors: pkgs = list(filter(lambda p: not p.has_errors, pkgs)) def do_work(pkg, ctx): t1 = datetime.datetime.now() if ctx[0] is None: ctx[0] = database.connect(args.db_name) conn = ctx[0] find_interposes(pkg, conn, args.verbose) t2 = datetime.datetime.now() time = (t2 - t1).total_seconds() return Stats(time) res_lists, exc_lists = parallel_map.map(do_work, pkgs, args.num_threads) if args.stats: print("Number of packages: %d" % len(pkgs)) results = [r for lst in res_lists for r in lst] wall_time = max(sum(r.time for r in lst) for lst in res_lists) print("Wall time: %d:%d" % (wall_time / 60, wall_time % 60)) times = [r.time for r in results] print("Average time to process a package: %g sec." % mean(times)) parallel_map.raise_errors(exc_lists)
def db(): if 'db' not in g: g.db = database.connect() return g.db