def main(): initialize_db() tornado.options.parse_command_line() tornado.httpserver.HTTPServer(Application(), xheaders=True).listen(options.port) print("App started. Listenning on %d" % options.port) tornado.ioloop.IOLoop.instance().start()
def main(): database.initialize_db() logging.basicConfig(level=logging.INFO) # print(database.db,__file__) # db.generate_mapping(check_tables=True,create_tables=True) tornado.httpserver.HTTPServer(Application()).listen(8899) print("Web is running %d" % 8899) tornado.ioloop.IOLoop.instance().start()
def __init__(self, host='127.0.0.1', port=55333, cert_file="./dummy_certs/certificate.pem", key_file="./dummy_certs/key.pem"): self.host = host self.port = port self.cert_file = os.path.abspath(cert_file) self.key_file = os.path.abspath(key_file) self.db = sqlite3.connect('nonce.db') self.context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) self.context.load_cert_chain(cert_file, key_file) self.context.set_ciphers( "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-CHACHA20-POLY1305-SHA256:ECDHE-RSA-CHACHA20-POLY1305-SHA256:ECDHE-ECDSA-AES-256-GCM-SHA384:ECDHE-RSA-AES-256-GCM-SHA384" ) initialize_db(self.db)
session['cart'] = [] session.modified = True flash('Your Order Was Successful') return redirect(url_for('orders')) def cart_to_string(cart_contents): strings = '' for item in cart_contents: strings += '{} x {}, '.format(item['quantity'], item['name']) return strings @app.route('/orders') @login_required def orders(): order_list = db.get_orders_by_id(current_user.user_id) args = {'active': 'orders', 'orders': order_list} return render_template('orders.html', args=args) @login_manager.user_loader def load_user(user_id): return db.get_user_by_id(user_id) if __name__ == '__main__': db.initialize_db() app.run(host='0.0.0.0', debug=DEBUG)
from server import app import database if __name__ == "__main__": database.initialize_db(app.app) app.run(host="0.0.0.0", debug=True, use_reloader=False)
import requests import json from utils_func import simulate_tag from PIL import Image import yaml import pytest import database import time import io import pandas as pd from sqlalchemy import create_engine # pulisco il db e lo tiro su database.drop_all() database.initialize_db() getNewImageUrl = 'http://127.0.0.1:5000/get_new_image/{}' postTagUrl = 'http://127.0.0.1:5000/post_tagged_crop' cfg = yaml.load(open('config.yaml', 'r'), Loader=yaml.BaseLoader) dburl = cfg['dburl'] @pytest.mark.parametrize( 'userid', [1,2,10] ) def test_get_new_image(userid, show=False): ''' Test per verificare che a getNewImage si risponda con un crop 100x100 con un certo id nel filename ''' r = requests.get(url=getNewImageUrl.format(userid))
from fastapi import FastAPI from database import initialize_db from middleware import setup_CORS ## Initialise the FastAPI object to process app = FastAPI() ## Initialise the database and tables as needed initialize_db() ## Set up & configure the middleware for permit CORS setup_CORS(app) ## Pull in the endpoints for Rest View from endpoints import job_endpoints, users_endpoints
from flask_jwt_extended import JWTManager from flask_mail import Mail from celery import Celery from celery.schedules import crontab from database import initialize_db from api.routes import initialize_routes from iq_trello import flask_app, api from config import DevelopmentConfig, ProductionConfig, LOGGING_CONF if flask_app.config["ENV"] == "production": flask_app.config.from_object(ProductionConfig) else: flask_app.config.from_object(DevelopmentConfig) # Initialize extensions initialize_db(flask_app) initialize_routes(api) bcrypt = Bcrypt(flask_app) jwt = JWTManager(flask_app) mail = Mail(flask_app) def make_celery(app): celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL']) celery.conf.update(app.config) return celery celery = make_celery(flask_app) celery.conf['CELERY_IMPORTS'] = ("tasks.celery_worker", )
def main(argv): statistics = StatisticsClass() if sys.argv[1] == '-h': print('How to run file: main.py -i <inputfile> <algorithm>') elif sys.argv[1] == '-i': coordX = [] coordY = [] parsedFile, statistics.ImportAndConvertFileStatistic = createObjectsFromFile(sys.argv[2]) measurementFixations = [] if (sys.argv[4] == '-d'): statistics.ImportDataToDatabase = initialize_db(parsedFile) print('Converting file time: %s' % statistics.ImportAndConvertFileStatistic) parsedMeasurements = [] if sys.argv[4] == '-f': parsedMeasurements = parsedFile elif sys.argv[4] == '-d': parsedMeasurements, statistics.ImportAndConvertDatabaseStatistic = getFromDatabase() else: parsedMeasurements = parsedFile if sys.argv[3] == 'I-DT': print('Starting measurement using I-DT algorithm') for e, measurement in enumerate(parsedMeasurements): if measurement.Type == 'SS': plt.plot(measurement.CoordX, measurement.CoordY, 'ko', markersize=10, label='Eye-tracker points' if e == 0 else "") coordX, coordY, statistics.AlgorithmRunTimeStatistic, statistics.NumberOfFixationsCount, fixations, statistics.SaccadeCount, newPointList, summaryList = idt.calculateIdtAlgorithm(parsedMeasurements) measurementFixations.append(fixations) plt.plot(coordX, coordY, 'wo', markersize=5, markeredgecolor='r', label='Calculated fixations') print('Ending measurement using I-DT algorithm') plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.) elif sys.argv[3] == 'I-VT': print('Starting measurement using I-VT algorithm') for e, measurement in enumerate(parsedMeasurements): if measurement.Type == 'SS': plt.plot(measurement.CoordX, measurement.CoordY, 'ko', markersize=10, label='Eye-tracker points' if e == 0 else "") plt.plot(measurement.CoordX, measurement.CoordY,'ko', color='green', markersize=6) coordX, coordY, statistics.AlgorithmRunTimeStatistic, statistics.NumberOfFixationsCount, fixations, statistics.SaccadeCount, newPointList, summaryList = ivt.calculateIvtAlgorithm(parsedMeasurements) measurementFixations.append(newPointList) #plt.plot(coordX, coordY, 'wo', markersize=5, markeredgecolor='r', label='Calculated fixations') print('Ending measurement using I-VT algorithm') plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.) elif sys.argv[3] == 'ML': print('Starting measurement using Machine Learning algorithm') for i, item in enumerate(parsedMeasurements): if item.Type == 'SS': plt.plot(item.CoordX, item.CoordY, 'ko', markersize=10, label='Eye-tracker points' if i == 0 else "") fixations = ivt.prepareDataIvt(parsedMeasurements) points = ml.calculateMlHelper(fixations) coordX, coordY, fixationsForPoint, timealgorithm, ite, fixations, saccades, newPointList, summaryList = ml.calculateML(points) plt.plot(coordX, coordY, 'wo', markersize=5, markeredgecolor='r', label='Calculated fixations') statistics.NumberOfFixationsCount = fixationsForPoint statistics.AlgorithmRunTimeStatistic = timealgorithm statistics.MLPrecision = ite statistics.SaccadeCount = len(saccades) measurementFixations.append(newPointList) print('Ending measurement using Machine Learning algorithm') plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.) else: print('INCORRECT ALGORITHM') print('Number of fixations: %s, Algorithm runtime: %s s, Saccades %s' % (statistics.NumberOfFixationsCount, statistics.AlgorithmRunTimeStatistic, statistics.SaccadeCount)) fig1 = plt.gcf() plt.show() plt.draw() fig1.savefig('result/' + sys.argv[2] + sys.argv[3] + '.png', dpi=100) createExitFile(sys.argv[2], statistics, sys.argv[3]) createExitFixationFile(sys.argv[2], measurementFixations, sys.argv[3]) createSummaryFile(sys.argv[2], summaryList, sys.argv[3]) elif sys.argv[1] == '-a': print('Available algorithms: "I-DT", "I-VT", "ML"') else: print('How to run file: main.py -i <inputfile> <algorithm>')
def __init__(self, host='127.0.0.1', port=55333): self.host = host self.port = port self.db = sqlite3.connect('nonce.db') initialize_db(self.db)
def initialize_db(): """ Initializes tables and default entries in the database if it doesn't already exist. :return: """ database.initialize_db()
chrome_options.add_argument('--headless') chrome_options.add_experimental_option("prefs", prefs) return webdriver.Chrome(executable_path=chrome_driver_path, options=chrome_options) with init_driver() as driver: login_url = "http://www.injectsolar.com/portal/#/login" errorlog_url = 'http://www.injectsolar.com/portal/#/inject-solar/errore-log' #login into the portal print("logging in..") login(driver, login_url) print("successfully logged in..") cursor, db = initialize_db() #fetch energy generated data gen_periods = [{ 'month': 'January', 'year': '2020' }, { 'month': 'February', 'year': '2020' }] for gen_period in gen_periods: _file_name = fetch_generation_data(driver, gen_period['month'], gen_period['year'], download_path) commit_generation_logs(cursor, db, _file_name) #fetch error logs
def update_auctions(auctions): for auction in auctions: new_price = fetch_auction_price(auction[1]) print(f'New price for {auction[1]} is {new_price}.') update_auction_price(connection, auction[0], new_price) try: load_dotenv(verbose=True) print(f'Baza: {os.getenv("DB_NAME")}') connection = sqlite3.connect(os.getenv('DB_NAME')) if len(argv) > 1: if argv[1] == 'startup': # python main.py startup initialize_db(connection) elif argv[1] == 'add': # python main.py add <link do aukcji> add_auction(connection, argv[2]) elif argv[1] == 'update': auctions = list_auctions(connection) update_auctions(auctions) elif argv[1] == 'update-daemon': while True: auctions = list_auctions(connection) update_auctions(auctions) print(display_auction_table(list_auctions(connection))) sleep(60) else:
def before_request(): initialize_db()
import os from flask import Flask from flask_bcrypt import Bcrypt from flask_jwt_extended import JWTManager from database import initialize_db from flask_restful import Api from resources.errors import errors from flask_mail import Mail app = Flask(__name__) app.config["JWT_SECRET_KEY"] = os.environ.get("JWT_SECRET_KEY") mail = Mail(app) # import going here to avoid circular import error from resources.routes import initialize_routes # nopep8 api = Api(app, errors=errors) bcrypt = Bcrypt(app) jwt = JWTManager(app) app.config["MONGODB_SETTINGS"] = {'host': os.environ.get('DB')} initialize_db(app) initialize_routes(api)
column_l = {'primary': column_l_prim, 'remainder': aux_2_k['remainder']} print("Task 4 completed.") return column_k, column_l if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-n', type=int, default=None, help='just firs n elements') parser.add_argument('-db', type=str, default='database/database.db', help='where to store the database') args = parser.parse_args() n = args.n db = args.db initialize_db(db_path=db) session = open_session(db_path=db) df = pd.read_excel('DATASET.xlsx', sheet_name='Task 3 - Input') dataset = df[['Sr', 'Title']] column_c = defaultdict(dict) d = dict() if os.path.exists('processed_words.txt'): with open('processed_words.txt') as fp: processed = set([x.strip() for x in fp]) else: processed = set()