def __init__(self): self.db = DAL('sqlite://ccaccounts.db') self.db.define_table('accounts', Field('name'), Field('account_number', 'integer'), Field('balance', 'integer'), Field('balance_limit', 'integer')) self.log = Logger()
def __init__(self): self.conn_type = c.CONNECTOR_TYPE self.log = Logger() if self.conn_type == 'DB': self.conn = DBConnector() else: self.log.log_message('NCS')
def __init__(self): self.log = Logger().custom_logger() self.db_client = DataBaseClient() opts = Options() opts.log.level = "fatal" self.driver = webdriver.Firefox(executable_path=path_to_driver, options=opts) self.driver.implicitly_wait(60) self.wait = WebDriverWait(self.driver, 60) self.start_url = start_url
def __init__(self, args, logFile=sys.stderr): # Analogous to command line arguments self.argAll = args['all'] self.argBugs = args['bugs'] self.argFunction = args['function'] self.argNoColor = args['no_color'] self.argRemotes = args['remotes'] self.argShowStash = args['show_stash'] self.argSubmodules = args['submodules'] self.argVerbose = args['verbose'] # File like object to log to self.logger = Logger(logFile, not self.argNoColor)
def __init__(self, name="", account_number=0, balance=0, balance_limit=0, amount=0): self.connector = Connector() self.log = Logger() if amount > 0: self.name = name self.amount = amount elif type(name) == str and type(account_number) == int and type( balance_limit) == int: self.name = name self.account_number = account_number self.balance = balance self.balance_limit = balance_limit self.amount = amount else: self.log.log_message('ICPT') pass
def downloadData(baseURL, baseOutputFilename, opts=sys.argv): log = Logger() getFootyOptions(log, opts) (algoCfg, mailCfg) = getFootyConfig() seasons = algoCfg['seasons'] rangeMap = algoCfg['rangeMap'] leagues = rangeMap.keys() for l in leagues: for s in seasons: resultsURL = baseURL.format(s, l) log.info('Downloading...' + resultsURL) with readCSVFileAsDict(resultsURL) as resultsReader: outputFilename = baseOutputFilename.format(l, s) log.info('Output to...' + outputFilename) # Correct the first header field i = resultsReader.__iter__() headers = i.__next__() headers[0] = 'Div' with newCSVFile(outputFilename, headers) as outputWriter: for row in i: outputWriter.writerow(row)
if not cfg.randomize: # set fixed seed random.seed(cfg.seed) np.random.seed(cfg.seed) torch.manual_seed(cfg.seed) torch.cuda.manual_seed(cfg.seed) log_path = os.path.join(cfg.log_dir, cfg.exp_name) mkdir_if_missing(log_path) snap_path = os.path.join(cfg.snap_dir, cfg.exp_name) mkdir_if_missing(snap_path) summary_writer = None if not cfg.no_log: log_name = cfg.exp_name + "_log_" + \ strftime("%Y-%m-%d_%H-%M-%S", gmtime()) + '.txt' sys.stdout = Logger(os.path.join(log_path, log_name)) summary_writer = SummaryWriter(log_dir=log_path) print("Input Args: ") pprint.pprint(cfg) train_loader, test_loader, num_classes, img_size = get_data_loader( data_name=cfg.data_name, data_dir=cfg.data_dir, batch_size=cfg.batch_size, test_batch_size=cfg.eval_batch_size, num_workers=4) model = create_model(name=cfg.model_name, num_classes=num_classes) # optimizer = torch.optim.Adam(model.parameters(), lr=cfg.lr, # weight_decay=0.0005, amsgrad=False) optimizer = torch.optim.SGD(model.parameters(),
logger.write("Cant load music {} because: {}".format(name, ex), logger.ERROR) def text(text, color): font = pygame.font.Font(None, 30) string_rendered = font.render(text, 1, color) return string_rendered def terminate(): pygame.quit() sys.exit() def load_background(photo): # Photo должно лежать в папке sprites return pygame.transform.scale(load_image(photo), (width, height)) pygame.init() size = width, height = 608, 608 screen_rect = pygame.Rect(0, 0, width, height) screen = pygame.display.set_mode(size) pygame.display.set_caption("Tanks 2D") logger = Logger() # Логирование событий в текстовик pygame.display.set_icon(load_image("icon.png"))
def footyBackTest(resultsURLTmpl, opts=sys.argv): (algoCfg, mailCfg) = getFootyConfig() rangeMap = algoCfg['rangeMap'] seasons = algoCfg['seasons'] log = Logger() (sm, rm) = getFootyOptions(log, opts) rangeMap = rm if rm else rangeMap for league in rangeMap.keys(): summaryData = {} with readCSVFileAsDict('{}/{}/Summary.{}.csv'.format( analysisDir, league, model.__class__.__name__)) as summaryReader: for row in summaryReader: mark = int(row['Mark']) summaryData[mark] = { 'H': (float(row['%H']), float(row['HO'])), 'D': (float(row['%D']), float(row['DO'])), 'A': (float(row['%A']), float(row['AO'])) } with newCSVFile( '{}/{}/BackTest.{}.csv'.format(analysisDir, league, model.__class__.__name__), [ 'Date', 'HomeTeam', 'AwayTeam', 'Mark', 'Result', 'MyBet', 'MyOdds', 'Bookie', 'BookieOdds', 'Winnings', 'PnL', 'T_Stk', 'T_W', 'Yield' ]) as backTestWriter: ts = tw = y = 0 for season in seasons: resultsURL = resultsURLTmpl.format(season, league) log.debug('Processing...{}'.format(resultsURL)) with readCSVFileAsDict(resultsURL) as resultsReader: # Assemble results as list so that we can reset the iterator res = list(resultsReader) data = model.processMatches(res) # Resetting the iterator here for row in iter(res): date, ht, at, mark, hForm, aForm = model.markMatch( data, row['Date'], row['HomeTeam'], row['AwayTeam']) if mark is None: continue if mark in rangeMap[league]: bestH = 0 bestD = 0 bestA = 0 bookie = '' try: b365H = float(row['B365H']) b365D = float(row['B365D']) b365A = float(row['B365A']) if b365H > bestH: bestH = b365H bookie = 'B365' except BaseException: log.error('No B365 data - skipping : {} {} {}'\ .format(date, ht, at)) try: bwH = float(row['BWH']) bwD = float(row['BWD']) bwA = float(row['BWA']) if bwH > bestH: bestH = bwH bookie = 'BW' except BaseException: log.error('No BW data - skipping : {} {} {}'\ .format(date, ht, at)) try: iwH = float(row['IWH']) iwD = float(row['IWD']) iwA = float(row['IWA']) if iwH > bestH: bestH = iwH bookie = 'IW' except BaseException: log.error('No IW data - skipping : {} {} {}'\ .format(date, ht, at)) try: lbH = float(row['LBH']) lbD = float(row['LBD']) lbA = float(row['LBA']) if lbH > bestH: bestH = lbH bookie = 'LB' except BaseException: log.error('No LB data - skipping : {} {} {}'\ .format(date, ht, at)) try: whH = float(row['WHH']) whD = float(row['WHD']) whA = float(row['WHA']) if whH > bestH: bestH = whH bookie = 'WH' except BaseException: log.error('No WH data - skipping : {} {} {}'\ .format(date, ht, at)) try: vcH = float(row['VCH']) vcD = float(row['VCD']) vcA = float(row['VCA']) if vcH > bestH: bestH = vcH bookie = 'VC' except BaseException: log.error('No VC data - skipping : {} {} {}'\ .format(date, ht, at)) hSD = summaryData[mark]['H'] aSD = summaryData[mark]['A'] dSD = summaryData[mark]['D'] myBet = '' myOdds = 0.0 myPercent = 0.0 bookieOdds = 0.0 winnings = 0.0 pnl = 0.0 if bestH > hSD[1]: # and bestH < (hSD[1] * 2): myBet = 'H' myOdds = hSD[1] #myOdds = (1.97*mark+45.42)*0.9 myPercent = hSD[0] bookieOdds = bestH winnings = bookieOdds pnl = winnings - 1 if False and myPercent < dSD[0] and bestD > dSD[1]: #if myPercent < dSD[0] and b365D > dSD[1]: myBet = 'D' myOdds = dSD[1] myPercent = dSD[0] bookieOdds = bestD winnings = bookieOdds pnl = winnings - 1 if False and myPercent < aSD[0] and bestA > aSD[1]: #if myPercent < aSD[0] and b365A > aSD[1]: myBet = 'A' myOdds = aSD[1] myPercent = aSD[0] bookieOdds = bestA winnings = bookieOdds pnl = winnings - 1 matchResult = row['FTR'] if myBet != '': if matchResult != myBet: winnings = 0.0 pnl = -1.0 ts += 1 tw += winnings y = (tw - ts) / ts backTestWriter.writerow( (date, ht, at, mark, matchResult, myBet, myOdds, bookie, bookieOdds, winnings, pnl, ts, tw, y)) log.info( '{:<5s} - Staked: GBP{:>6.2f} Won: GBP{:>6.2f} Yield: {:>6.2f}%'. format(league, ts, tw, y * 100))
#coding:utf-8 from fcoin3 import Fcoin from Logging import Logger import time import json import sys import traceback import math import config fcoin = Fcoin() fcoin.auth(config.key, config.secret) # 授权 # 写日志 log = Logger('all.log', level='debug') # 例子 # log.logger.debug('debug') # log.logger.info('info') # log.logger.warning('警告') # log.logger.error('报错') # log.logger.critical('严重') #平价买卖 def get_ticket1(): r = fcoin.get_market_ticker(config.symbol['name']) num = (r['data']['ticker'][2] + r['data']['ticker'][4]) / 2.0 return pricedecimal(num) #精度控制
def analyseFixtures(resultsURLTmpl, fixturesURL, opts=sys.argv): log = Logger() (sendMail, rangeMap) = getFootyOptions(log, opts) (algoCfg, mailCfg) = getFootyConfig() rangeMap = algoCfg['rangeMap'] season = algoCfg['season'] teamErrorMap = algoCfg['teamErrorMap'] mailText = '<table border=1><tr><th>Lge</th><th>Date</th><th>HomeTeam</th><th>AwayTeam</th><th>Mark</th><th>H#</th><th>H%</th><th>H Odds</th><th>HomeTeamForm</th><th>AwayTeamForm</th></tr>' s = '{:<4s} {:<8s} {:<16s} {:<16s} {:<4s} {:s} {:<37s} {:<37s}'.format( 'Lge', 'Date', 'HomeTeam', 'AwayTeam', 'Mark', fST(('H#', 'H%', 'HO')), 'HomeTeamForm', 'AwayTeamForm') termText = '\n' + hl(s) + '\n' with newCSVFile( '{}/Betting.{}.csv'.format(analysisDir, model.__class__.__name__), [ 'Lge', 'Date', 'HomeTeam', 'AwayTeam', 'Mark', 'H#', 'H%', 'HOdds', 'HomeTeamForm', 'AwayTeamForm' ]) as bettingWriter: league = '' data = {} summaryData = {} with readCSVFileAsDict(fixturesURL) as fixturesReader: for fix in fixturesReader: log.debug(fix) ind = 'b\"Div' try: fix['b\"Div'] except: ind = 'b\'Div' if fix[ind] not in rangeMap: continue if league != fix[ind]: league = fix[ind] resultsURL = resultsURLTmpl.format(season, league) log.info(resultsURL) with readCSVFileAsDict(resultsURL) as resultsReader: data = model.processMatches(resultsReader) with readCSVFileAsDict( '{}/{}/Summary.{}.csv'.format(analysisDir, league, model.__class__.__name__)) \ as summaryReader: for summ in summaryReader: mark = int(summ['Mark']) f = int(summ['Frequency']) hP = float(summ['%H']) dP = float(summ['%D']) aP = float(summ['%A']) summaryData[mark] = { 'H': (int(f * (hP / 100)), float(summ['%H']), float(summ['HO'])), 'D': (int(f * (dP / 100)), float(summ['%D']), float(summ['DO'])), 'A': (int(f * (aP / 100)), float(summ['%A']), float(summ['AO'])) } ht = fix['HomeTeam'] if ht in teamErrorMap: ht = teamErrorMap[ht] at = fix['AwayTeam'] if at in teamErrorMap: at = teamErrorMap[at] date, ht, at, mark, hForm, aForm = model.markMatch( data, fix['Date'], ht, at) if mark is None or mark not in range(-15, 16): continue hSD = summaryData[mark]['H'] aSD = summaryData[mark]['A'] dSD = summaryData[mark]['D'] s = '{:<4s} {:<8s} {:<16s} {:<16s} {:4d} {:s} ({:s}) ({:s})'\ .format(league, date, ht, at, mark, fSD(hSD), hForm, aForm) mail_s = '<tr><td>{:s}</td><td>{:s}</td><td>{:s}</td><td>{:s}</td><td align="right">{:>4d}</td><td align="right">{:>4d}</td><td align="right">{:>6.2f}%</td><td align="right">{:>5.2f}</td><td align="right">{:s}</td><td align="right">{:s}</td></tr>'.format( league, date, ht, at, mark, hSD[0], hSD[1], hSD[2], hForm, aForm) if mark in rangeMap[league]: termText += hl(s) + '\n' mailText += mail_hl(mail_s) else: termText += s + '\n' mailText += mail_s bettingWriter.writerow((league, date, ht, at, mark, hSD[0], hSD[1], hSD[2], hForm, aForm)) log.info(termText) mailText += '</table>' mailText = 'MIME-Version: 1.0\nContent-type: text/html\nSubject: Footy Bets\n\n{}'.format( mailText) if sendMail: fromAddr = mailCfg['fromAddr'] toAddrs = mailCfg['toAddrs'] server = smtplib.SMTP(mailCfg['svr'], int(mailCfg['port'])) server.ehlo() server.starttls() server.ehlo() server.login(fromAddr, mailCfg['pwd']) server.sendmail(fromAddr, toAddrs, mailText) server.quit() log.info('email sent to: {!s}'.format(toAddrs))
def makeFootyHistory(resultsURLTmpl, opts=sys.argv): log = Logger() getFootyOptions(log, opts) (algoCfg, mailCfg) = getFootyConfig() rangeMap = algoCfg['rangeMap'] seasons = algoCfg['seasons'] ''' Looks like if you go back too far with the historical data it starts to mess up the results, I suspect this is because the league composition has changed enough to mean that the newer and older season data don't play well together... ''' log.info(__name__ + ' : ' + model.__class__.__name__) for league in rangeMap.keys(): log.info('League : {}...'.format(league)) os.makedirs('{}/{}'.format(analysisDir, league), exist_ok=True) summaryData = {} with newCSVFile('{}/{}/History.{}.csv'.format(analysisDir, league, model.__class__.__name__), ['Date', 'HomeTeam', 'AwayTeam', 'Mark', 'Result']) \ as historyWriter: for season in seasons: resultsURL = resultsURLTmpl.format(season, league) log.debug('Processing...{}'.format(resultsURL)) try: with readCSVFileAsDict(resultsURL) as resultsReader: # Assembling as list so that the iterator can be reset res = list(resultsReader) data = model.processMatches(res) # Resetting iterator here... for row in iter(res): try: date, ht, at, mark, hForm, aForm = \ model.markMatch(data, row['Date'], row['HomeTeam'], row['AwayTeam']) except KeyError: continue if mark is None or row['FTR'] == '': continue mark = int(mark) matchResult = row['FTR'].strip() historyWriter.writerow( [date, ht, at, mark, matchResult]) if mark not in summaryData: summaryData[mark] = {'A': 0, 'D': 0, 'H': 0} summaryData[mark][matchResult] += 1 except BaseException: log.error(sys.exc_info()[0:1]) continue log.info('Writing summary data...') with newCSVFile('{}/{}/Summary.{}.csv'.format(analysisDir, league, model.__class__.__name__), ['Mark', 'Frequency', '%H','HO', '%D', 'DO', '%A', 'AO']) \ as summaryWriter: x = [] hY = [] dY = [] aY = [] hist = {} for mark in summaryData: if mark > 15 or mark < -15: continue awayF = summaryData[mark]['A'] drawF = summaryData[mark]['D'] homeF = summaryData[mark]['H'] totalF = awayF + drawF + homeF awayP = awayF / totalF * 100 drawP = drawF / totalF * 100 homeP = homeF / totalF * 100 x.append(mark) hY.append(homeP) dY.append(drawP) aY.append(awayP) awayO = awayP if awayP == 0 else 100 / awayP drawO = drawP if drawP == 0 else 100 / drawP homeO = homeP if homeP == 0 else 100 / homeP hist[mark] = (homeF, homeP) summaryWriter.writerow([ mark, totalF, '{:>4.2f}'.format(homeP), '{:>4.2f}'.format(homeO), '{:>4.2f}'.format(drawP), '{:>4.2f}'.format(drawO), '{:>4.2f}'.format(awayP), '{:>4.2f}'.format(awayO) ]) s = '' for h in sorted(hist.items(), key=lambda x: x[1][0], reverse=True): s += '{:d} ({:d} {:>5.2f}) '.format(h[0], h[1][0], h[1][1]) log.info(s) with newCSVFile('{}/{}/Stats.{}.csv'.format(analysisDir, league, model.__class__.__name__), ['Result', 'Slope', 'Intercept', 'P', 'R', 'R^2', 'Err']) \ as statsWriter: slope, intercept, r, p, stderr = stats.linregress(x, hY) r2 = r**2 log.info( 'Home: {:>4.2f} {:>4.2f} {:>4.2} {:>4.2f} {:>4.2f} {:>4.2}'. format(slope, intercept, p, r, r2, stderr)) statsWriter.writerow([ 'H', '{:>4.2f}'.format(slope), '{:>4.2f}'.format(intercept), '{:>4.2f}'.format(p), '{:>4.2f}'.format(r), '{:>4.2f}'.format(r2), '{:>4.2f}'.format(stderr) ]) slope, intercept, r, p, stderr = stats.linregress(x, dY) r2 = r**2 log.info( 'Draw: {:>4.2f} {:>4.2f} {:>4.2} {:>4.2f} {:>4.2f} {:>4.2}'. format(slope, intercept, p, r, r2, stderr)) statsWriter.writerow([ 'D', '{:>4.2f}'.format(slope), '{:>4.2f}'.format(intercept), '{:>4.2f}'.format(p), '{:>4.2f}'.format(r), '{:>4.2f}'.format(r2), '{:>4.2f}'.format(stderr) ]) slope, intercept, r, p, stderr = stats.linregress(x, aY) r2 = r**2 log.info( 'Away: {:>4.2f} {:>4.2f} {:>4.2} {:>4.2f} {:>4.2f} {:>4.2}'. format(slope, intercept, p, r, r2, stderr)) statsWriter.writerow([ 'A', '{:>4.2f}'.format(slope), '{:>4.2f}'.format(intercept), '{:>4.2f}'.format(p), '{:>4.2f}'.format(r), '{:>4.2f}'.format(r2), '{:>4.2f}'.format(stderr) ])
def run(self, initial_point): additional_data = {} n = initial_point.get_number_of_dimensions() for i in range(n): #if(initial_point.getElement(0, i) < self.lower_bounds[i] or initial_point.getElement(0, i) > self.upper_bounds[i]): if not self.explicit_constraints[i].is_satisfied( initial_point.get_value_at_dimension(i)): print "The given initial_point is not within the explicit constraints." return centroid = initial_point.copy() accepted_points = [] accepted_points.append(initial_point) for t in range(2 * n): elements = [] for i in range(n): elements.append(0) R = random.uniform(0, 1) elements[i] = self.explicit_constraints[i].get_lower_bound( ) + R * (self.explicit_constraints[i].get_upper_bound() - self.explicit_constraints[i].get_lower_bound()) new_point = Point(elements) for j in range(len(self.implicit_constraints)): while (not self.implicit_constraints[j].is_satisfied(new_point) ): new_point = (new_point + centroid).multiply_by_scalar(0.5) accepted_points.append(new_point) #calculate new centroid (with new accepted initial_point) sum_elements = [] for i in range(n): sum_elements.append(0) sum = Point(sum_elements) for i in range(len(accepted_points)): sum = sum + accepted_points[i] #centroid = sum/(simplex.length - 2); centroid = sum.multiply_by_scalar(1.0 / len(accepted_points)) keepGoing = True iteration_number = 1 logger = Logger(self.function) logger.set_implicit_constraints(self.implicit_constraints) while (keepGoing): MIN = float('-inf') max = MIN value_at_xh = MIN value_at_xh2 = MIN xh_index = 0 xh2_index = 0 for i in range(len(accepted_points)): if (self.function.value_at(accepted_points[i]) > self.function.value_at(accepted_points[xh_index])): xh2_index = xh_index xh_index = i #calculate centroid without xh sum_elements = [] for i in range(n): sum_elements.append(0) sum = Point(sum_elements) #for (int i = 0; i < accepted_points.size(); i++) { for i in range(len(accepted_points)): if (i == xh_index): pass else: sum = sum + accepted_points[i] #centroid = Matrix.scalarMultiply(sum, (1.0/(len(accepted_points) - 1))) centroid = sum.multiply_by_scalar( (1.0 / (len(accepted_points) - 1))) xr = self.reflect(centroid, accepted_points[xh_index], self.alpha) for i in range(n): #if(xr.getElement(0,i) < self.lower_bounds[i]): lower_bound = self.explicit_constraints[i].get_lower_bound() upper_bound = self.explicit_constraints[i].get_upper_bound() if (xr.get_value_at_dimension(i) < lower_bound): xr.set_value_at_dimension(i, lower_bound) elif (xr.get_value_at_dimension(i) > upper_bound): xr.set_value_at_dimension(i, upper_bound) for i in range(len(self.implicit_constraints)): while (not self.implicit_constraints[i].is_satisfied(xr)): xr = (xr + centroid).multiply_by_scalar(0.5) if (self.function.value_at(xr) > self.function.value_at( accepted_points[xh2_index])): xr = (xr + centroid).multiply_by_scalar(0.5) accepted_points[xh_index] = xr keepGoing = False for i in range(len(accepted_points)): if (abs( self.function.value_at(accepted_points[i]) - self.function.value_at(centroid)) > self.epsilon): keepGoing = True #TODO check if this is the correct place to log the additional_data points xh_description = "xh - The initial_point in which the function value is highest" xr_description = "xr - Reflected initial_point" xc_description = "xc - Centroid" xh_tuple = (accepted_points[xh_index], xh_description) xr_tuple = (xr, xr_description) xc_tuple = (centroid, xc_description) additional_data["xh"] = xh_tuple additional_data["xr"] = xr_tuple additional_data["xc"] = xc_tuple currentIteration = Iteration(iteration_number, self.function.value_at(centroid), centroid, additional_data, self.function.get_number_of_calls()) logger.add_iteration(currentIteration) iteration_number = iteration_number + 1 return centroid, logger
''' Created on May 10, 2013 @author: Saulius Alisauskas ''' import socket import struct import json import Event import sys from random import randint from Logging import Logger from Logging import Level rudpSockets = [] logger = Logger("RUDP", Level.TRACE) def createSocket(addr, port): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((addr, port)) rudpSocket = RudpSocket(sock) rudpSocket.addr_src = sock.getsockname() rudpSockets.append(rudpSocket) Event.eventFd(sock.fileno(), handleDataAvailable, rudpSocket, "DataReceived") return rudpSocket def closeSocket(rudpSocket): ''' Called by user application '''
# -*- coding: utf-8 -*- import socket import struct import json import Event import sys from random import randint from Logging import Logger from Logging import Level # 基本功能包 lftpSockets = [] logger = Logger("LFTP", Level.TRACE) def createSocket(addr, port): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((addr, port)) lftpSocket = LftpSocket(sock) lftpSocket.addr_src = sock.getsockname() lftpSockets.append(lftpSocket) Event.eventFd(sock.fileno(), handleDataAvailable, lftpSocket, "DataReceived") return lftpSocket def closeSocket(lftpSocket): ''' Called by user application ''' for rs in lftpSockets: if rs == lftpSocket: logger.log(Level.INFO, "Closing socket:" + str(rs))
@author: saulius ''' import time import select import sys TYPE_TIMEOUT = 1 TYPE_CLOSED = 2 EVENT_TYPE_FD = 1 EVENT_TYPE_TIME = 2 from Logging import Logger from Logging import Level events = [] logger = Logger("EVENT", Level.INFO) def getCurrentMills(): return int(round(time.time() * 1000)) def eventTimeout(timeMs, callback, argument, strId): event = EventData(EVENT_TYPE_TIME, callback, argument) event.time = getCurrentMills() + timeMs event.id = strId logger.log(Level.DEBUG, "Registering timeout " + str(event)) events.append(event) def eventTimeoutDelete(callback, argument): for event in events: if event.type == EVENT_TYPE_TIME and event.callback == callback and event.argument == argument: logger.log(Level.DEBUG, "Deleting " + str(event))
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine, Table, Column, Integer, String, MetaData, ForeignKey from sqlalchemy.orm import sessionmaker from sqlalchemy_utils import database_exists, create_database from sqlalchemy.dialects.mysql import TEXT from settings import host_db, name_db, username_db,password_db from Logging import Logger Base = declarative_base() log = Logger().custom_logger() #?charset=utf8mb4 class DataBaseClient: def __init__(self): self.engine = create_engine(f'mysql+pymysql://{username_db}:{password_db}@{host_db}/{name_db}') if not database_exists(self.engine.url): create_database(self.engine.url) log.successfully('Database successfully created') Base.metadata.create_all(self.engine) Session = sessionmaker(bind=self.engine) self.session = Session() class ModelRecord(Base): __tablename__ = 'Records' number_record = Column(String(100), primary_key=True, unique = True, index = True) record_categoty = Column(String(100)) title = Column(String(100)) price = Column(String(50))