def main(): """ Imports an XLS file of Victor exported results. The Experiment ID is determined by the time-stamp of the first measurement. """ options = MakeOpts().parse_args() if options.debug: db = CreateDummyDB() else: db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') if not os.path.exists(options.xls_file): print "File not found: " + options.xls_file sys.exit(-1) print "Importing from file: " + options.xls_file fp = open(options.xls_file, 'r') exp_id = VictorParser.ImportFileToDB(fp, db, options.exp_id) if options.debug: print "Done, go check out the results at %s" % db.filename else: print "Done, go check out the results at http://eladpc1/RoboSite/Exp/%s/0" % exp_id
def deploy_to_database(self): db = MySQLDatabase() for product in self.products: db._insert_query(product) for update in self.updates: db._update_query(update) db.close()
def __init__(self): self.dbhost = rj.get_db_host() self.dbuser = rj.get_db_user() self.dbpwd = rj.get_db_pwd() self.dbport = rj.get_db_port() self.dbname = rj.get_db_name() self.db = MySQLDatabase(self.dbuser, self.dbpwd, self.dbname, self.dbhost, self.dbport) self.Session = self.db.mysql_session()
def __init__(self, set_number, condition='new'): self._url = 'http://www.bricklink.com/catalogPG.asp?S={}-1&ColorID=0'.format( set_number) self._headers = { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36' } self.condition = condition self.prices = {} self.set_number = set_number self.scan_date = datetime.now() self.exchange_rates = {} self.toCurrency = 'CHF' self.result = {} self.db = MySQLDatabase() self.q = Queries() self._parse_prices()
def main(): dbuser = rj.get_db_user(); dbpwd = rj.get_db_pwd() dbhost = rj.get_db_host(); dbname = rj.get_db_name() dbport = rj.get_db_port(); dbtables = rj.get_db_tables() sender = rj.get_sender(); smtphost = rj.get_smtp_host() recipient = rj.get_recipient() db = MySQLDatabase(dbuser, dbpwd, dbname, dbhost, dbport) Session = db.mysql_session() files_list = StorageFilesList().create_selected_list() for file_path in files_list: cksgz_stg = md5Checksum(file_path).calculate_checksum() cks_stg = md5Checksum(file_path).get_checksum_gz() head, fname_gz = os.path.split(file_path) for tbl in dbtables: db_element = db_query(tbl, Session, fname_gz) if db_element is not None: cks_db = db_element[1] cksgz_db = db_element[2] if cks_db is not None and cksgz_db is not None: if cksgz_stg == cksgz_db and cks_stg == cks_db: try: print(file_path) #os.remove(file_path) except Exception as e: msg = "File removal exception --" log.error("{0}{1}".format(msg,e)) else: message = 'Severe alert - storage and DB file checksums DO NOT MATCH' SendEmail(message,recipient,smtphost,sender).send_email() else: message = 'Severe alert - checksum not calculated' SendEmail(message,recipient,smtphost,sender).send_email() else: pass db.close_session()
def _calc_tmp_provider_tbl(self): query = """ SELECT * FROM `tbl_provider_scans` WHERE DATEDIFF(NOW(), scan_date) < 5 ORDER BY `scan_date` ASC """ tmp_deals_l7d = self._select_query(query) deals = [] for tmp_deal in self._select_query( "SELECT * FROM `tbl_provider_scans` JOIN tmp_latest_scan_ids USING(scan_id)" ): deal = dict(tmp_deal) price_range = [ _['price'] for _ in tmp_deals_l7d if _['provider'] == deal['provider'] and _['set_number'] == deal['set_number'] ] max_count_price = max(set(price_range), key=price_range.count) price_change_l7d = int(tmp_deal['price'] - max_count_price) # print('{} has a price change of {} CHF.'.format(tmp_deal['set_number'], price_change_l7d)) deal.update({'price_change_l7d': price_change_l7d}) deals.append(deal) db = MySQLDatabase() self._execute_query("DELETE FROM tmp_deals_tmp") for deal in deals: payload = { 'table_name': 'tmp_deals_tmp', 'data': { 'set_number': deal['set_number'], 'title': deal['title'], 'url': deal['url'], 'price': deal['price'], 'currency': deal['currency'], 'provider': deal['provider'], 'availability': deal['availability'], 'scan_date': deal['scan_date'], 'scan_id': deal['scan_id'], 'price_change_l7d': deal['price_change_l7d'] } } db._insert_query(payload) self._execute_query("DELETE FROM tmp_deals") self._execute_query( "INSERT INTO tmp_deals SELECT * FROM tmp_deals_tmp")
def main(): options = MakeOpts().parse_args() if options.debug: db = CreateDummyDB() else: db = MySQLDatabase(host=options.host, user='******', port=3306, passwd='a1a1a1', db='tecan') if options.xml_dir: if not os.path.exists(options.xml_dir): print "Directory not found: " + options.xml_dir sys.exit(-1) xml_fname = GetLatestFile(options.xml_dir) else: xml_fname = options.xml_filename if not os.path.exists(xml_fname): print "File not found: " + xml_fname sys.exit(-1) print "Importing from file: " + xml_fname header_dom, script_dom, plate_values = tecan.ParseReaderFile(xml_fname) exp_id = GetExperimentID(options, db, header_dom, script_dom) print "Experiment ID: " + exp_id plate_id = options.iteration % options.num_plates print "Plate ID: %d" % plate_id MES = {plate_id: plate_values} tecan.WriteToDatabase(MES, db, exp_id) db.Commit() print "Done!" sys.exit(0)
def deploy_to_database(self): db = MySQLDatabase() for product in self.products: logging.debug('[DEPLOY] {}'.format(product)) db._insert_query(product) db.close()
from read_json import ReadJson from multiprocessing import Pool from utilities import LoggingClass from database import MySQLDatabase log = LoggingClass('', True).get_logger() rj = ReadJson() dbuser = rj.get_db_user() dbpwd = rj.get_db_pwd() dbname = rj.get_db_name() dbhost = rj.get_db_host() dbport = rj.get_db_port() thr_nr = rj.get_threads_number() db = MySQLDatabase(dbuser, dbpwd, dbname, dbhost, dbport) Session = db.mysql_session() def _query_filename(filename): try: rows = Queries(Session, DataFile, filename).match_filename() if not rows: return filename except Exception as e: msg = "Query on database excep - _query_filename --" log.error("{0}{1}".format(msg, e)) finally: db.close_session()
def save_to_db(self, df): db = MySQLDatabase() db.write(df)
from database import MySQLDatabase logging.basicConfig( filename='logs/{}_eol_sets.log'.format(datetime.now().strftime('%Y%m%d')), filemode='a', format='%(asctime)s:%(levelname)s:%(funcName)s:%(message)s', level=logging.INFO) def _get_set_numbers_from_string(string): set_numbers = re.findall(r'([0-9]{7}|[0-9]{4,5})', string) set_numbers = list(dict.fromkeys(set_numbers)) return set_numbers db = MySQLDatabase() ps = ProductScanner() set_number = 0 eol_url = 'https://www.stonewars.de/news/lego-end-of-life-eol-2020/' soup = ps._get_soup(eol_url, headers={}) eol_sets_raw = [ _get_set_numbers_from_string(_.text) for _ in soup.find_all('td', {'class': 'column-2'}) ] eol_sets = [item for sublist in eol_sets_raw for item in sublist] for set_number in eol_sets: logging.info(
def connect(): return MySQLDatabase(host='hldbv02', user='******', passwd='a1a1a1', db='tecan') """
def __init__(self): self.db = MySQLDatabase()
def read_from_db(self): global db db = MySQLDatabase() df = db.get('bookie') return df