Exemplo n.º 1
0
Arquivo: cyvk.py Projeto: cydev/cyvk
 def start(self):
     initialize_database(DB_FILE)
     s = gevent.spawn(self.run_forever)
     self.connect()
     self.dispatcher_loop()
     self.receiver_loop()
     self.probe_users()
     self.process_users()
     s.join()
Exemplo n.º 2
0
def main():
    # We create an argument parser
    arg_parser = arguments.create_argument_parser()

    # We parse the arguments
    args = arg_parser.parse_args(sys.argv[1:])
    arguments.process_arguments(args, arg_parser)

    # If the verbose mode is on, we display the arguments
    if args.verbose:
        arguments.print_arguments(args)

    # We open the database file where the proxies will be stored
    connection, cursor = database.initialize_database(args.database_file)

    try:
        # We generate the proxies
        for proxy in parser.generate_proxy(args):
            # And we store them in the database
            database.insert_in_database(cursor, proxy)
    except KeyboardInterrupt:
        if args.verbose:
            print('')
            print('[warn] received interruption signal')

    # We save the changes made to the database, and close the file
    connection.commit()
    connection.close()

    return 0
Exemplo n.º 3
0
def main():
	# We create an argument parser
	arg_parser = arguments.create_argument_parser()

	# We parse the arguments
	args = arg_parser.parse_args(sys.argv[1:])
	arguments.process_arguments(args, arg_parser)

	# If the verbose mode is on, we display the arguments
	if args.verbose:
		arguments.print_arguments(args)

	# We open the database file where the proxies will be stored
	connection, cursor = database.initialize_database(args.database_file)

	try:
		# We generate the proxies
		for proxy in parser.generate_proxy(args):
			# And we store them in the database
			database.insert_in_database(cursor, proxy)
	except KeyboardInterrupt:
		if args.verbose:
			print('')
			print('[warn] received interruption signal')

	# We save the changes made to the database, and close the file
	connection.commit()
	connection.close()

	return 0
Exemplo n.º 4
0
 def setUpClass(self):
     settings = TestingConfig.get_database_from_url(TestingConfig.DATABASE_URL)
     self.db = initialize_database(settings)
     self.cache = fakeredis.FakeStrictRedis()
     self.cache.set('bov-eod-scrapper:last_update', dt(2016, 04, 10))
     sql_file = os.path.join(os.path.abspath(os.path.dirname(__file__))) + '/res/setup_test_database.sql'
     drop_database(self.db)
     run_sql_file(sql_file, self.db)
Exemplo n.º 5
0
def main(target_dir: Path):
    logging.info("Initializing database.")
    initialize_database()
    logging.info(f"Collecting photos and sub-directories of \"{target_dir}\".")
    collections = scan(str(target_dir))

    session = get_session()

    for collection in collections:
        collection_record = stage_collection_in_database(collection, session)
        stage_images_in_database(collection.images, collection_record, session)
        logging.info(
            f"{collection.name} : {len(collection.images)} : {collection.path}"
        )

    logging.info("Adding new items to database.")
    session.flush()
    session.commit()
    logging.info("Complete.")
Exemplo n.º 6
0
def main():
    # We create an argument parser
    arg_parser = arguments.create_argument_parser()

    # We parse the arguments
    args = arg_parser.parse_args(sys.argv[1:])
    arguments.process_arguments(args, arg_parser)

    # If the verbose mode is on, we display the arguments
    if args.verbose:
        arguments.print_arguments(args)

    if args.database_file is not None and args.text_file is None:
        # We open the database file where the proxies will be stored
        connection, cursor = database.initialize_database(args.database_file)

        try:
            # We generate the proxies
            for proxy in parser.generate_proxy(args):
                # And we store them in the database
                database.insert_in_database(cursor, proxy)
        except KeyboardInterrupt:
            if args.verbose:
                print('')
                print('[warn] received interruption signal')

        # We save the changes made to the database, and close the file
        connection.commit()
        connection.close()

        return 0

    # Write to text file with priority
    elif args.text_file is not None or args.text_file is not None and args.database_file is not None:
        with open(args.text_file, 'w') as tf:
            for proxy in parser.generate_proxy(args):
                proxy_line = proxy[2].lower() + '://' + str(
                    proxy[0]) + ':' + str(proxy[1]) + '\n'
                tf.write(proxy_line)
    elif args.database_file is None and args.text_file is None:
        return 'Please specify output file!'
Exemplo n.º 7
0
def menu():
    connection = database.initialize_database()

    while (user_option := input(MENU_PROMPT)) != '5':
        if user_option == '1':
            enter_seed_information(connection)
        elif user_option == '2':
            seeds = database.get_seed_info(connection)
            for seed in seeds:
                print(seed)
        elif user_option == '3':
            name = input('Enter Seed Name:')
            seeds = database.get_seed_info_by_name(connection, name)
            for seed in seeds:
                print(seed)
        elif user_option == '4':
            seed_type = input('Enter Seed Type:')
            seeds = database.get_seed_info_by_type(connection, seed_type)
            for seed in seeds:
                print(seed)
        else:
            print('Invalid Option, try again!!!')
Exemplo n.º 8
0
def store_last_update(end_date):
    last_update = dt.strftime(end_date, DATE_FORMAT)
    filename.write(last_update)
    filename.flush()
    filename.close()


def update_database():
    """
        Method called to update the database.
    """
    update = Update()
    last_update = load_last_update()
    from_date = last_update + td(days=1)
    end_date = dt.now()
    update.update_daily_data(from_date, end_date)
    store_last_update(end_date)
    logger.info('Database EOD has been updated.')


if __name__ == '__main__':
    config = ProductionConfig()
    logger.info('Start update database...')
    logger.info('Config type: {type}'.format(type=config.CONFIG_TYPE))
    logger.info('Database URL : {url}'.format(url=config.DATABASE_URL))
    settings = config.get_database_from_url(config.DATABASE_URL)
    initialize_database(settings)

    update_database()
Exemplo n.º 9
0
def create_app():
    app = Flask(__name__)

    @app.route("/")
    def index():
        return render_template("index.html")

    @app.route("/api/all")
    def request_all_countries():
        """ get information about all countries """
        update_data()

        year_start = int(request.args.get("start", 0) or 0)
        year_end = int(request.args.get("end", 0) or 0)
        per_capita = bool(request.args.get("percapita", None))

        data = database.get_all_countries_data(year_start, year_end, per_capita)

        data_by_code = {}
        for code, year, value in data:
            if code not in data_by_code:
                data_by_code[code] = []
            data_by_code[code].append((year, value))

        return jsonify(data_by_code)

    @app.route("/api/country/<country_code>")
    def request_one_country(country_code):
        """ get information about a single country """
        update_data()

        year_start = int(request.args.get("start", 0) or 0)
        year_end = int(request.args.get("end", 0) or 0)
        per_capita = bool(request.args.get("percapita", None))

        data = database.get_one_country_data(
            country_code, year_start, year_end, per_capita
        )

        data_by_code = {data[0][0]: []}
        for code, year, value in data:
            data_by_code[code].append((year, value))

        return jsonify(data_by_code)

    @app.route("/api/meta/all")
    def request_country_metadata():
        """ get metadata of all countries """
        update_data()

        metadata = database.get_countries_info()

        response_data = {
            code: {"name": name, "region": region, "income": income, "notes": notes}
            for name, code, region, income, notes in metadata
        }
        return jsonify(response_data)

    database.initialize_database()
    update_data(force=True)

    return app
Exemplo n.º 10
0
        runmode = sys.argv[1]
        if runmode == "sc":  # set the number of VNFs to chain
            num_VNFs = int(sys.argv[2])
        elif runmode == "case":  # measure a specific service chain
            list_VNFs = sys.argv[2]
        else:
            print "%s { vnf | sc [# of VNFs] | case [list of VNFs(,)] }" % sys.argv[
                0]
            exit(0)
    else:
        print "%s { vnf | sc [# of VNFs] | case [list of VNFs(,)] }" % sys.argv[
            0]
        exit(0)

# initialize database
database.initialize_database()
print "Initialized the Probius database"

if vnf_mgmt.is_athene_env() == True:  # Athene
    # load analysis configurations
    analysis = load_analysis_configurations("config/analysis_athene.conf")
    print "Loaded analysis configurations"

    # load VNF configurations
    config = vnf_mgmt.load_VNF_configurations("config/vnf_athene.conf")
    print "Loaded VNF configurations"

    # update VNF configurations
    config = vnf_mgmt.update_VNF_configurations(config)
    print "Updated VNF configurations"
Exemplo n.º 11
0
 def load_database():
     initialize_database(app.config["DB_URL"])
Exemplo n.º 12
0
        users = db.get_users(cursor)

    messages = jobs.generate_update_message(updates)

    for user in users:
        if user[1] in messages.keys():
            try:
                context.bot.send_message(chat_id=user[0],
                                         text=messages[user[1]])
            except telegram.error.Unauthorized:
                with db.connection() as cursor:
                    db.delete_user(cursor, user[0])


if __name__ == "__main__":
    logging.basicConfig(
        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
        level=logging.INFO,
    )

    db.initialize_database()

    updater = Updater(token=API)
    dispatcher = updater.dispatcher

    add_handlers()

    updater.start_polling()
    updater.job_queue.run_daily(update_job, time(hour=9, minute=30))
    updater.idle()
Exemplo n.º 13
0
            f"All update done. Result can be found in table {pistoncup.__table__.name}."
        )
        session.close()


if __name__ == '__main__':
    # Download, extract and save the data files to disk
    utils.mkdir(directory=constants.DATA_FOLDER)
    for file in constants.FILES:
        download_and_save_gzip_from_url(source=file,
                                        destination=constants.DATA_FOLDER)

    # initialize objects to talk to database
    engine = utils.get_db_engine()
    if not database.is_initialized(engine):
        database.initialize_database(engine)

    if not check_db_filled(engine):
        vehicle_table = database.Vehicle
        mater_table = database.Mater

        # Read csv files into lists
        mater: List[List[str]] = list()
        vehicles: List[List[str]] = list()
        for file in os.listdir(constants.DATA_FOLDER):
            file_vehicle_list = read_data_from_csv(
                source=os.path.join(constants.DATA_FOLDER, file))
            mater, vehicles = split_into_long_and_normal_lists(
                file_vehicle_list, mater, vehicles)

        # Load data lists into database tables
Exemplo n.º 14
0
import os
import errno
import hashlib
from Queue import Empty as QueueEmptyException
from multiprocessing import Process, Queue
from scidb_server_interface import D3_DATA_THRESHOLD
from cache_worker_functions import compute_tile, print_tile_params, compute_first_tile, list_all_tiles, get_tile_counts, TileMetadata
import database as db
import json
from time import sleep

DEBUG = True

cache_root_dir = '_scalar_cache_dir2'
uri = 'postgresql+psycopg2://testuser:password@localhost:5432/test'
db.initialize_database(uri)

queries = ["select * from cali100"] # list of dataset names
data_thresholds = [D3_DATA_THRESHOLD] # list of thresholds (ints)

QUEUE_MARKER = "END"

sleeptime = 20
maxpool = 2

# hashes a string
def hash_it(s):
  h = hashlib.md5()
  h.update(str(s))
  return h.hexdigest()
Exemplo n.º 15
0
def init():
    return initialize_database(app.config['dsn'])