def setup_module(mod): utils.setup_db(mod)
year = row['year'].year # TODO: check this if row['netto']: value = row['price_real'] customs_clearing_value = get_cc_value(value, capacity, engine_type, year, cur_ukr) return customs_clearing_value def calculate_total(row): return row['price_real'] + row['customs_clearing'] session = setup_db() DATA = pd.read_sql(session.query(CarArticle).statement, session.bind) # data pre-processing DATA['value'] = DATA['value'].astype('float') DATA['price_eur'] = DATA.apply(convert_currency, axis=1) DATA['price_real'] = DATA.apply(apply_pln_taxes, axis=1) DATA['customs_clearing'] = DATA.apply(calculate_urk_taxes, axis=1) DATA['price_total'] = DATA.apply(calculate_total, axis=1) # TODO: value + tax calculation DB_META = session.query(MetaInfo).get(1) def color_row(workbook, format_class, color=None): format = workbook.add_format()
from flask import Flask from flask_cors import CORS from flask_restful import Api import os from db import db from ressources.UsersRessource import UsersRessource from utils import setup_db app = Flask(__name__) CORS(app) basedir = os.path.abspath(os.path.dirname(__file__)) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + \ os.path.join(basedir, 'app.sqlite') api = Api(app) db.init_app(app) db.app = app setup_db() api.add_resource(UsersRessource, '/users') if __name__ == '__main__': app.run(debug=True)
defaults={ "*": aiohttp_cors.ResourceOptions( allow_credentials=True, expose_headers="*", allow_headers="*", allow_methods=["GET", "POST", "PUT"]) }) for route in list(app.app.router.routes()): cors.add(route) return app if __name__ == '__main__': logging.info('Beginning app startup...') logging.info(f'Using API_PORT: {API_PORT}') logging.info('Setting up RESTapi...') app = setup_app() logging.info('Initializing Database...') db = loop.run_until_complete(setup_db())['db'] logging.info('Starting NewsAPI Webscraping Task...') loop.create_task(setup_recurring_newsapi_scrape(db)) logging.info('Starting Google News Webscraping Task...') loop.create_task(setup_recurring_gnews_scrape(db)) app.run(port=API_PORT)
def setup_module(mod): utils.setup_db(mod) mod.store = Store(db)
'pos': str(sentiment[0]['probability']['pos']), 'neutral': str(sentiment[0]['probability']['neutral']), 'neg': str(sentiment[0]['probability']['neg']) }) csvfile.flush() async def __call__(self): logging.info('Acquiring articles...') async with self.db.bind.acquire() as conn: articles = await conn.all(self.table.query.distinct('url')) article_dump = [article.dump() for article in articles] self.corpus = [ self.__post(article['description']) for article in article_dump ] self.__export() if __name__ == '__main__': logging.info('Setup Database Connection...') loop.run_until_complete(setup_db()) logging.info('Initialize Command...') parser = CorpusParser(db, NewsArchive) loop.run_until_complete(parser())
def setup_module(mod): utils.setup_db(mod) mod.seq = SequenceImpl(db)