def __init__(self, *args): """ initilized data """ super(TaskDb, self).__init__(*args) self._taskdb_config = AppConfig().get_task_data_config() self._resultdb_config = AppConfig().get_result_data_config()
def CreateOptionChainTable(self, tableName): fl = open(AppConfig().ScriptCreateOptionChainTable(), "r") tblcontent = fl.read() tblname = AppConfig().TableName() tblcontent = tblcontent.replace("TABLENAME", tblname) conn = self.GetConnection() conn.execute(tblcontent) conn.close()
def __init__(self, *args): super(ThunderDownloader, self).__init__(*args) self._thunder_path = AppConfig().get_download_config().get( "thunder_path", "") self._save_path = AppConfig().get_download_config().get( "save_path", "") self._thunder_process = None self._thunder_task_process = None self._download_urls = [] self._file_watcher = QFileSystemWatcher(self) self._file_watcher.addPath(self._save_path) self._file_watcher.directoryChanged.connect(self._on_file_dir_changed)
async def main(): config = AppConfig("config.yml") sites = config.get_all_sites() site_configs = [config.get_site_config(site) for site in sites] notify = AsyncNotify(config.get_notify()) # SiteRecord要在这里初始化后,不然每次循环后计数被重置 record = { site: SiteRecord(config.get_site_config(site), notify) for site in sites } while True: tasks = list() for site_config in site_configs: servers = site_config.servers log.info("Site: {} check servers {}".format( site_config.site, servers)) if servers: t = AsyncCheck(site_config) # 将记录对象作为观察者添加进AsyncCheck对象 await t.add_site_record(record[site_config.site]) tasks.append(t.check_servers(servers, site_config.path)) else: log.warning("{} not found servers".format(site_config.site)) await asyncio.wait(tasks) time.sleep(1)
def make_alembic_config(cmd_opts: Namespace, base_path: str = BASE_PATH) -> Config: """ Create custom alembic config. :param cmd_opts: Default config :type cmd_opts: Namespace :param base_path: Path of config file :type base_path: str :return: Alembic configuration :rtype: Config """ # Make path to config absolute if not os.path.isabs(cmd_opts.config): cmd_opts.config = os.path.join(base_path, cmd_opts.config) config = Config(file_=cmd_opts.config, cmd_opts=cmd_opts) # Make path to scripts absolute alembic_location = config.get_main_option("script_location") if not os.path.isabs(alembic_location): config.set_main_option("script_location", os.path.join(base_path, alembic_location)) # Set alembic db from application config config.set_main_option("sqlalchemy.url", AppConfig().db_url) return config
def load_app_config(): global app_config if app_config is None: from config import AppConfig app_config = AppConfig() app_config.refresh_configs() return app_config
def load_config(): """Loads in the Pins4Days config. Returns: dict: The Pins4Days config contents. """ config = AppConfig( os.environ[REMOTE_APP_CONFIG_PATH_KEY], os.environ[LOCAL_APP_CONFIG_PATH_KEY]) config.load_config() return config.contents
def __init__(self): self.config = AppConfig() self.view = PypubGUI('PyPub', self.config.colors) try: self.config.loadDirs() except ConfigError as err: self.view.onError(err) else: for dirName, dirPath in self.config.dirs.items(): self.view.addDir(dirName, dirPath) self.view.startGUI(self.onAction)
def SaveOptionChainData(self): data = self.PrepareData() conn = self.GetConnection() fl = open(AppConfig().ScriptInsertOptionChain(), "r") tbl = fl.read() fl.close() if (len(data) > 0): print("Writing to database") conn.executemany(tbl, data) conn.commit() conn.close()
def app(request): app = nonogram_app app.config.from_object(AppConfig(testing=True)) app.config["TESTING"] = True ctx = app.test_request_context() ctx.push() def teardown(): ctx.pop() request.addfinalizer(teardown) return app
def run(): log.initial() if not check_arg(): sys.exit(ERROR_ARGS) logging.info('check done.') if not AppConfig(CONFIG_FILE).readConfig(): sys.exit(ERROR_CONFIG_FILE_NOT_FOUND) #db.initial() db2 = db.DbHelper.getInstance() logging.info('-' * 150) for cfg in AppConfig.RTCs: htmlloader = RtcSpider(os.path.join(AppConfig.BASE_PATH, cfg[config.KEY_URL]), AppConfig.MEMBERS) htmlloader.load() logging.info('url:{0}'.format(cfg[config.KEY_URL])) for id in cfg[config.KEY_FIX]: logging.info('fix id:{0}'.format(id)) for fix in htmlloader.extract_fix(id): logging.info(fix) db2.put_fix(cfg[config.KEY_URL], fix) logging.info('=' * 150) #out for id in cfg[config.KEY_OUT]: logging.info('out id:{0}'.format(id)) for out in htmlloader.extract_out(id): logging.info(out) db2.put_out(cfg[config.KEY_URL], out) db2.calc_all() db2.calc_new() db2.calc_last() rpt = report.ReportText() rpt.output_report() db2.save_all()
def main(): app_config = AppConfig() app_config.setup_dirs(app_config.template_dir, app_config.data_dir, app_config.log_dir) # print(json.dumps(app_config.cfg, indent=4)) source_loc = os.path.expanduser(app_config.cfg.get("ws_location")) print("Windows Spotlight Source Location:", source_loc) target_loc = os.path.expanduser(app_config.cfg.get("target_location")) print("Target Location:", target_loc) source_files = helper.get_files_list(source_loc) print("Total {} files found in source location.".format(len(source_files))) device_properties = app_config.cfg.get("device_properties") valid_dims = [(v.get("image_width"), v.get("image_height")) for k, v in device_properties.items()] print("List of valid dimensions:", valid_dims) valid_source_set = helper.get_valid_images( source_files, valid_dims) # List[Tuple[str, str]] print("Valid source files:", len(valid_source_set)) valid_source_files = [] target_files = [] for img_file, img_type in valid_source_set: valid_source_files.append(img_file) target_file = os.path.join( target_loc, img_type, helper.add_file_extension(os.path.basename(img_file), ext='.jpg')) target_files.append(target_file) valid_file_set = helper.get_valid_target_files( valid_source_files, target_files) # List[Tuple[str, str]] print("Files to extract:", len(valid_file_set)) if len(valid_file_set) > 0: helper.transfer_files(valid_file_set, mode='copy') else: print("No new files to transfer!")
def main(): """Main function""" signal.signal(signal.SIGINT, quit_program) logging.info('Initialising connection to SDK server') config = AppConfig() with RazerApp(config.to_dict()) as app: logging.info('Connected to the the SDK server') logging.info('Beginning screen capture (Press Ctrl-C to quit)') while True: try: pixels = ImageProcessor.get_keyboard_pixels() except OSError as ose: logger.error(f'Error grabbing screenshot: {ose.strerror}') continue app.set_colour(pixels)
def create_app(): app = Flask(__name__) config = AppConfig() app.config.from_object(config) app.logger.setLevel(logging.INFO) app.app_context().push() @app.errorhandler(Exception) def exception_handler(e): traceback.print_exc() current_app.logger.error(str(e)) # error_code. g.code = 1003 http_code = 500 error_resp = {'error_code': g.code, 'msg': str(e)} return jsonify(error_resp), http_code @app.before_request def before_request(): # init db session. g.db = get_db_session() g.code = 0 @app.after_request def after_request(response): code = response.status_code if hasattr(g, 'code'): code = g.code # close db session. if g.db is not None: if code != 0 and code != 200: # rollback if failed. g.db.rollback() else: g.db.commit() g.db.close() current_app.logger.info("[Request Log]" + request.path + ' [data] ' + str(request.data)) return response return app
def __init__(self): """ The main class which loads the App config and interacts with the ApiService to retrieve Food truck data. This class is also responsible for how the data is printed on the console. """ self.display_message( "===============================| Foodiezz |===============================\n" ) self.display_message("Hello! Welcome to Foodiezz!") # load the AppConfig config = AppConfig() self.socrata_dataset_id = config.socrata_dataset_id self.page_limit = config.page_limit # initialize the the ApiService object self.api_service = ApiService(config.socrata_domain, config.app_token) self.foodtruck_dataset = {} # initializing PrettyTable object with 'Name' and 'Address' as the header # I am using PrettyTable to print the final output in an organized and structured way on the console. self.foodtruck_table = PrettyTable(field_names=['Name', 'Address']) self.foodtruck_table.min_width = 60 self.total_foodtrucks_open = 0
def __init__(self, options): QtGui.QMainWindow.__init__(self) self.appConfig = AppConfig() self.proxy_config = self.appConfig.getProxyConfig() self.ui = MainGui(self.get_server(), options) self.setCentralWidget(self.ui) self.status_bar = self.statusBar() self.status_bar.showMessage('Server started on port ' + str(self.proxy_config.port)) menubar = self.menuBar() file_menu = menubar.addMenu('&File') file_menu.addAction(QtGui.QAction("Settings", self)) toolbar = self.addToolBar('toolbar') self.server_control_action = QtGui.QAction( self.get_server_status_icon(), "Server start/stop", self) self.server_control_action.triggered.connect(self.toggle_server) clear_action = QtGui.QAction(QtGui.QIcon('assets/clear-icon.png'), "Clear", self) clear_action.triggered.connect(self.ui.clear_view) settings_action = QtGui.QAction(QtGui.QIcon('assets/gear-icon.png'), "Settings", self) settings_action.triggered.connect(self.show_settings) search_field = QtGui.QLineEdit() search_field.textChanged.connect(self.ui.search_changed) toolbar.addAction(clear_action) toolbar.addAction(self.server_control_action) toolbar.addAction(settings_action) toolbar.addSeparator() toolbar.addWidget(QtGui.QLabel("Filter:")) toolbar.addWidget(search_field) toolbar.addSeparator() toolbar.setMovable(False) self.setStyleSheet(appStyle) self.show()
def main(): app_cfg = AppConfig() LOGGER.info('Input csv path:{}'.format(app_cfg.input)) LOGGER.info('Output csv path:{}'.format(app_cfg.output)) LOGGER.info('Image whitelist:{}'.format(app_cfg.image_whitelist)) csv_input = CsvConfig(app_cfg) csv_input.validate() csv_input.report('w', 'image1', 'image2', 'similarity', 'elapsed') for line in csv_input.contents: start_time = int(round(time.time() * 1000)) image1 = Image(line[0]) image2 = Image(line[1]) (score, diff) = compare_ssim(image1.grayscale, image2.grayscale, full=True) bjorn_score = round(score.item(), 3) if bjorn_score > 0.99: bjorn_score = 0 time_diff = int(round(time.time() * 1000)) - start_time csv_input.report('a', image1.path, image2.path, bjorn_score, time_diff)
def runMigrations(): from alembic.config import Config from alembic import command alembic_cfg = Config("migrations/alembic.ini") from config import Config as AppConfig appConfig = AppConfig() dbURI = appConfig.SQLALCHEMY_DATABASE_URI print(dbURI) from app import create_app, db app = create_app() from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate, upgrade from flask_script import Manager migrate = Migrate(app, db) manager = Manager(app) manager.add_command('db', upgrade) manager.run()
def __init__(self, config_path=None): super().__init__() self.db = BarcodeDatabase() self.session = Session() self.comManager = ComPortManager() self.comThread = QThread() self.comManager.newCodeRead.connect(self.onNewCode) self.comManager.moveToThread(self.comThread) self.comThread.started.connect(self.comManager.ComReader) self.comThread.start() self.setupSound() self.setupUi(self) # Temp button actions assignment self.exitButton.clicked.connect(self.close) self.clearButton.clicked.connect(self.clearSessionData) self.openDbButton.clicked.connect(self.loadNewDatabase) self.saveButton.clicked.connect(self.onSave) self.menuCOM.aboutToShow.connect(self.loadComPortMenu) self.loadDb.triggered.connect(self.loadNewDatabase) self.openDb.triggered.connect(self.onEditDbFile) self.reloadDb.triggered.connect(self.onReloadDb) self.save.triggered.connect(self.onSave) self.clear.triggered.connect(self.clearSessionData) self.BarcodeHistory.itemClicked.connect(self.onItemClicked) self.BarcodeHistory.currentItemChanged.connect(self.onItemClicked) self.app_config = AppConfig(parent=self) self.session.sessionItemRestore.connect(self.onNewCode) self.comManager.newCodeRead.connect(self.session.new_item) self.session.init_session()
import os from flask import Flask, render_template from flask_cachebuster import CacheBuster from config import AppConfig import giphy app = Flask(__name__) CacheBuster(config={ 'extensions': ['.js', '.css', '.png'], 'hash_size': 5 }).init_app(app) config = AppConfig(os.environ) @app.route('/') def index(): giphy_url = giphy.get_random(config.GIPHY_API_KEY, config.GIPHY_TAG, config.GIPHY_RATING) return render_template('index.html', giphy_url=giphy_url) if __name__ == '__main__': app.run(host=config.HOST, port=config.PORT)
def GetConnection(self): conn = sqlite3.connect(AppConfig().ConnectionString()) return conn
def setUp(self): self.config = AppConfig('stemmer') self.fac = ImportConfigFactory(self.config, shared=False)
def __init__(self): self.app_config = AppConfig() self.db_config = DBConfig()
import time import asyncio from typing import List, Tuple, Dict, Set import aiohttp from action import ActionFactory from utils import SimpleLog, HostRecord from config import AppConfig, SiteConfig log = SimpleLog(__name__).log conf = AppConfig("config.yml") def get_time() -> str: return time.strftime("%Y-%m-%d %H:%M:%S") class AsyncCheck(object): @staticmethod async def _get_status(hostname: str, host: str, path: str, timeout: int) -> Tuple[int, str]: url = "http://{}{}".format(host, path) headers = dict(Host=hostname) try: async with aiohttp.ClientSession( headers=headers, timeout=aiohttp.ClientTimeout(total=timeout)) as session: async with session.get(url) as resp: return resp.status, host except Exception as e:
@app.after_request def after_request(response): code = response.status_code if hasattr(g, 'code'): code = g.code # close db session. if g.db is not None: if code != 0 and code != 200: # rollback if failed. g.db.rollback() else: g.db.commit() g.db.close() current_app.logger.info("[Request Log]" + request.path + ' [data] ' + str(request.data)) return response return app app = create_app() from feeds_route import * if __name__ == '__main__': config = AppConfig() app.run(host='0.0.0.0', port=config.HTTP_PORT)
from flask import Flask, request, Response from jsonschema import validate, exceptions from config import AppConfig app = Flask(__name__) def _load_schema(): with open('resources/schema.json') as s: return json.load(s) SCHEMA = _load_schema() app_config = AppConfig() @app.route('/admit', methods=['POST']) def admit_prod(): data = request.get_json() try: validate(instance=data, schema=SCHEMA) url = app_config.config['model']['serving']['url'] version = app_config.config['model']['serving']['version'] url = url + '/predict' if version == 'latest' else url + '/' + version + '/predict' response = requests.post(url=url, json=data) return response.json() except exceptions.ValidationError:
from botbuilder.core import ( TurnContext, BotFrameworkAdapterSettings, BotFrameworkAdapter, MemoryStorage, UserState, ConversationState ) from botbuilder.schema import Activity, ActivityTypes from config import AppConfig from translation import TranslatorMiddleware, TranslatorM from bots import TeamsQABot CONFIG = AppConfig() SETTINGS = BotFrameworkAdapterSettings(CONFIG.APP_ID, CONFIG.APP_PASSWORD) ADAPTER = BotFrameworkAdapter(SETTINGS) # Catch-all for errors. async def on_error(context: TurnContext, error: Exception): # This check writes out errors to console log .vs. app insights. # NOTE: In production environment, you should consider logging this to Azure # application insights. print(f"\n [on_turn_error] unhandled error: {error}", file=sys.stderr) # Send a message to the user await context.send_activity("The bot encountered an error or bug.") await context.send_activity("To continue to run this bot, please fix the bot source code.") # Send a trace activity if we're talking to the Bot Framework Emulator if context.activity.channel_id == 'emulator':
def setUp(self) -> None: os.environ["MAX_CONSIDERED_SUBREDDITS"] = '3' os.environ["MAX_SUBMISSIONS_PER_SUBREDDIT"] = '2' os.environ["MAX_COMMENTS_PER_SUBMISSION"] = '2' self.app_config = AppConfig()
from flask import Flask from flask_bcrypt import Bcrypt from flask_jwt import JWT from flask_restful import Api from config import AppConfig from db import DB from security import authenticate, identity from routes import Routes app = Flask(__name__) bcrypt = Bcrypt(app) api = Api(app) config = AppConfig(app) # migrations @app.before_first_request def create_table(): try: DB.create_all() except ValueError: print(ValueError) # /auth jwt = JWT(app=app, authentication_handler=authenticate, identity_handler=identity)
import sys from prometheus_client import Gauge from prometheus_client import start_http_server from config import AppConfig from jira_test import test_jira_load_ticket_seconds, test_wiki_login_seconds if __name__ == '__main__': config_file = 'atlassian-exporter.ini' if len(sys.argv) > 1: config_file = sys.argv[1] app_config = AppConfig(filename=config_file) # Start up the server to expose the metrics. print('Starting http server on port: ' + str(app_config.HTTP_PORT)) start_http_server(app_config.HTTP_PORT) # Generate some requests. g1 = Gauge( app_config.EXPORTER_METRIC_JIRA_PAGE1, 'Num of seconds to load JIRA login page with Selenium webdriver') g2 = Gauge( app_config.EXPORTER_METRIC_JIRA_PAGE2, 'Num of seconds to login to JIRA and load Issue page with Selenium webdriver' ) g3 = Gauge( app_config.EXPORTER_METRIC_JIRA_TOTAL,