def handle(self): self.is_run = True req_queue = Queue() rcv_queue = Queue() config = configparser.ConfigParser() config.read("config.ini") db_host = config.get("DB", "HOST") db_port = int(config.get("DB", "PORT")) db_user = config.get("DB", "USER") db_pw = config.get("DB", "PASSWORD") db_name = config.get("DB", "DB_NAME") db_charset = config.get("DB", "CHAR_SET") db_handler_thd = DbHandler(req_queue, rcv_queue, db_host, db_port, db_user, db_pw, db_name, db_charset) db_handler_thd.start() req_thd = threading.Thread(target=self._request_listener, args=(req_queue,)) req_thd.start() rcv_thd = threading.Thread(target=self._receive_listener, args=(rcv_queue,)) rcv_thd.start() req_thd.join() rcv_thd.join() db_handler_thd.is_run = False
def __init__(self): self.current_user = None self.current_stock = None # Stores the current stock that the user is watching. self.stock_dict = {} # {stock_id: stock_ticker} self.current_df = None # Stores the current dataframe. Changes if the user changes the stock. self.stock_change = True # Checks if the user changes the current stock. self.time_change = False # Checks if the user changes the timeframe. self.timeperiod = '1M' # Changes the graph self.dbh = DbHandler() # Initialises a database handler
def setUp(self): # create a connection to test database self.db = DbHandler() # create a test client self.app = app.test_client() # propagate exceptions to the test client self.app.testing = True # default pre-configured user self.user_id = "aaa10022-38b0-4a1a-95af-776f35aa2b8f"
def __init__(self): """ Initializes model instance and gets needed data from instagram :return: None """ print('starting session and getting headers ... ', end='') self.session = requests.Session() self.get_initial_headers() print(colored('DONE', 'green')) self.db_handler = DbHandler()
async def test_containing_db(loop): async def write_data_to_db(record, recid, tags, db=None): await db.insert_to_table('records', record) for tag in tags: try: await db.insert_to_table('tags', {'tagname': tag}, ignore_conflict=[ 'tagname', ]) except UniqueViolationError: pass try: await db.insert_to_table('records_tags', { 'tagname': tag, 'recordid': recid }) except UniqueViolationError: pass aio_jikan = AioJikan(loop=loop) cfg = Config() pg_pool = await asyncpg.create_pool(cfg.DB_ADDRESS) db = DbHandler(pg_pool=pg_pool, cfg=cfg) year_2000 = 2000 seasons = ['winter', 'spring', 'summer', 'fall'] for year in range(2015, 2019): for season in seasons: print(f'[+] reading {season} in {year}') season_year = await aio_jikan.season(year=year, season=season) for item in season_year['anime']: title = item['title'] title_tag = ('_'.join(re.findall(r'\W?(\w+)\W?', title))).lower() recid = recid_gen() record = { 'recordid': recid, 'username': '******', 'record_headline': title, 'record_text': f'{title} ({season} {year})' } tags = [title_tag, str(year), season] await write_data_to_db(record, recid, tags, db=db) await aio_jikan.close() await pg_pool.close()
def main(): conf = read_config() path_watchers = [] global db db = DbHandler() for c in conf['sync']: ssh_clients.append( SshClient(c['server'], c['dirs']['to'], c['dirs']['from'], c['key_file'])) path_watchers.append( PathWatcher(c['dirs']['from'], ssh_clients[-1], db)) signal.signal(signal.SIGINT, clean_up) signal.signal(signal.SIGTERM, clean_up)
def __init__(self): self.db = DbHandler() self.config = configurator.get_config() self.args = self.parse_arguments() self.initialize_logger() _id = self.get_id() self.r = praw.Reddit( user_agent="PrawWallpaperDownloader 1.0.0 by /u/Pusillus", client_id=_id["id"], client_secret=_id["secret"]) self.n_posts = 0 self.albums = 0 self.succeeded = 0 self.failed = 0 self.skipped = 0 self.deleted = 0 self.notify = False self.posts = [] self.que = queue.Queue() self.downloaded_images = []
def __init__(self): config = configparser.ConfigParser() config.read('bot_config.ini') bot_token = config.get("TELEGRAM", "token") titsa_idApp = config.get("TITSA", "idApp") self.adminId = config.get("ADMIN", "chatId") self.apiHandler = ApiHandler(titsa_idApp) self.dbHandler = DbHandler() self.transportThread = OpenTransitThread( "http://www.titsa.com/Google_transit.zip", 7 * 24 * 60 * 60) self.transportThread.start() updater = Updater(token=bot_token, use_context=True) logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) b1 = telegram.KeyboardButton("⭐ Consultar favorito ⭐") b2 = telegram.KeyboardButton("✖️ Borrar favorito ✖️") b3 = telegram.KeyboardButton("🚊 Tranvia de Tenerife 🚊") b4 = telegram.KeyboardButton("📍 Ver paradas cercanas 📍", request_location=True) self.keyboard = telegram.ReplyKeyboardMarkup([[b3, b4], [b1, b2]], resize_keyboard=True) h1 = MessageHandler(Filters.regex(r"^.+Consultar favorito.+$"), self.favKeyBoard) h2 = MessageHandler(Filters.regex(u"^\U0001F68F.+(\d{4})"), self.replyToFav) h3 = MessageHandler(Filters.regex(r"^.+Borrar favorito.+$"), self.favKeyBoard) h4 = MessageHandler(Filters.regex(u"^\u2716.+(\d{4})"), self.eraseFav) updater.dispatcher.add_handler(CommandHandler("start", self.start)) updater.dispatcher.add_handler( MessageHandler(Filters.regex(r"^\d{4}$"), self.responder_a_codigo)) updater.dispatcher.add_handler(h1) updater.dispatcher.add_handler(h2) updater.dispatcher.add_handler(h3) updater.dispatcher.add_handler(h4) updater.dispatcher.add_handler( ConversationHandler( entry_points=[ MessageHandler(Filters.regex(r"^.+Tranvia de Tenerife.+$"), self.listStops), ], states={ TitsaBot.TRANVIA: [MessageHandler(Filters.all, self.queryTram)] }, fallbacks=[])) updater.dispatcher.add_handler( MessageHandler(Filters.location, self.nearStops)) updater.dispatcher.add_handler( ConversationHandler( entry_points=[ CommandHandler("broadcast", self.newBroadcast), ], states={ TitsaBot.BROADCAST_TEXT: [MessageHandler(Filters.all, self.broadcast)] }, fallbacks=[])) updater.dispatcher.add_handler( ConversationHandler(entry_points=[ CommandHandler("addFav", self.addFavCommand, pass_args=True, pass_user_data=True), CallbackQueryHandler(self.addFavQuery, pattern=r"^\d{4}$", pass_user_data=True) ], states={ TitsaBot.CUSTOM_OR_DEFAULT: [ CallbackQueryHandler( self.setFavNameOption, pass_user_data=True) ], TitsaBot.INSERT_CUSTOM: [ MessageHandler(Filters.text, self.customName, pass_user_data=True) ] }, fallbacks=[h1, h2, h3, h4])) updater.dispatcher.add_handler( ConversationHandler(entry_points=[ CallbackQueryHandler(self.reloadStationQuery, pattern=r"^Repetir \d{4}$", pass_user_data=True) ], states={ TitsaBot.CUSTOM_OR_DEFAULT: [ CallbackQueryHandler( self.reloadStationQuery, pass_user_data=True) ] }, fallbacks=[h1, h2, h3, h4])) updater.dispatcher.add_handler( CallbackQueryHandler(self.sendStopAndLocation, pattern=r"^Locate \d{4}$", pass_user_data=True)) updater.start_polling() updater.idle() self.dbHandler.save() self.transportThread.stop()
def __init__(self): QWidget.__init__(self) self.db = DbHandler() self.initUI()
def __init__(self): super().__init__() self.db = DbHandler() self.initUI()
import time from flask import Flask from flask import request from db_handler import DbHandler from flask_cors import CORS app = Flask(__name__) db_handler = DbHandler() CORS(app) @app.route("/") def hello_world(): return "<p>Hello, World!</p>" @app.route("/subscribe", methods=['POST']) def subscribe(): global db_handler print(request.json) email = request.json['email'] application = request.json['application'] print(email) print(application) before = time.time() db_handler.subscribe(email, application) after = time.time() return f"<p>{(after-before)*1000}ms</p>" @app.route("/unsubscribe", methods=['POST'])
response_parsed = xmltodict.parse(response.text) print(str(response_parsed['resultset']['summary'])) reponse_list[1] = response_parsed['resultset']['summary']['NumOfErrors'] reponse_list[2] = response_parsed['resultset']['summary']['NumOfLikelyProblems'] reponse_list[3] = response_parsed['resultset']['summary']['NumOfPotentialProblems'] except: logging.info("Could not process URI : " + uri); return reponse_list ## PROGRAM EXECUTION STARTS HERE __DB_FILE__ = "world_universities.db" db_handler = DbHandler(__DB_FILE__) db_handler.createACheckerSchema() start_time = millis() logging.info("\nPROGRAM STARTED AT : " + str(start_time) + " ms\n") universities = db_handler.getURLSNotAnalyzedByAcheckerWithLock(limit = 7358) # processUniversityURL(universities[0]) # sys.exit() if len(universities) > 0: try: pool = mp.Pool(processes=4) results = pool.map(processUniversityURL, universities) except: raise
def __init__(self): self.db = DbHandler()
from flask import Flask, request, Response import json from db_handler import DbHandler from auth_handler import AuthHandler from qc.qc_handler import QcHandler from util import log app = Flask(__name__) # TODO move to config file (along with configs in DbHandler and AuthHandler mayhaps) app.config['MAIN_RECORDINGS_PATH'] = '/data/eyra/recordings' dbHandler = DbHandler(app) authHandler = AuthHandler( app) # sets up /auth/login @app.route and @login_required() qcHandler = QcHandler(app, dbHandler) # SUBMISSION ROUTES @app.route('/submit/general/<method>', methods=['POST']) @authHandler.login_required() def submit_general(method): """ supports everything in the client-server API right now, /submit/general/{device,instuctor} requires sender to be authenticated with JWT, see auth_handler.py
from discord import message from discord.ext import commands from discord.ext.commands.errors import MissingRequiredArgument from dotenv import load_dotenv from db_handler import DbHandler load_dotenv() TOKEN = os.getenv('DISCORD_TOKEN') HOST = os.getenv('HOST') USER = os.getenv('USER') PASSWORD = os.getenv('PASSWORD') DATABASE = os.getenv('DATABASE') bot = commands.Bot(command_prefix='!') db = DbHandler(HOST, USER, PASSWORD, DATABASE) @bot.event async def on_ready(): print(f"{bot.user.name} has connected to Discord!") @bot.command( name="add", help= "Προσθέτω εργασία. Το date να είναι της μόρφης 2021-03-25, ενώ η ώρα 21:00:00." ) async def add_command(ctx, assignment_name, class_name, turn_in_date, turn_in_time): author = ctx.message.author
def setUp(self): # create a connection to test database self.db = DbHandler() # default pre-configured user self.user_id = "aaa10022-38b0-4a1a-95af-776f35aa2b8f"
from crawler import Spider from db_handler import DbHandler import scrapy from scrapy.crawler import CrawlerProcess # input_handler = XlsHandler('./hubspot_list.xlsx','Meta Data') db = DbHandler() process = CrawlerProcess( {'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'}) temp_url_list = db.getUrls() domain_list = db.getDomains() url_list = [] for item in temp_url_list: if item != None: for url in item.split(";"): url_list.append("https://" + url) print(domain_list) print(url_list) class MySpider(Spider): allowed_domains = domain_list start_urls = url_list process.crawl(MySpider)