def process_patchsets(): cursor = utils.get_cursor() # Load plugins plugins = [] for ent in os.listdir('plugins'): if ent[0] != '.' and ent.endswith('.py'): plugin_info = imp.find_module(ent[:-3], ['plugins']) plugins.append(imp.load_module(ent[:-3], *plugin_info)) cursor.execute('select * from patchsets where state="f";') subcursor = utils.get_cursor() for row in cursor: age = datetime.datetime.now() - row['timestamp'] if age.days < 2: files = [] subcursor.execute('select * from patchset_files where id="%s" and ' 'number=%d;' %(row['id'], row['number'])) for subrow in subcursor: files.append(subrow['filename']) for plugin in plugins: plugin.Handle(row, files) subcursor.execute('update patchsets set state="p" ' 'where id="%s" and number=%d;' %(row['id'], row['number'])) subcursor.execute('commit;')
def index(serverpin, in_): wantsin = (in_ == 'true') wantsout = not wantsin cursor = utils.get_cursor() isin = _server_is_in(serverpin) isout = not isin if wantsin and isin: resp = 'already clocked in' elif wantsin and isout: tip_share = server_tip_share(serverpin) sqlin = 'INSERT INTO hours VALUES(null, %(serverpin)s, NOW(), 0, %(tip_share)s, null)' % locals() utils.execute(sqlin, cursor) resp = 'Clocked in at ' + utils.now() elif wantsout and isin: sqlout = 'UPDATE hours SET outtime = NOW() WHERE person_id = %(serverpin)s AND outtime = 0' % locals() res = utils.execute(sqlout, cursor) resp = 'Clocked out at ' + utils.now() elif wantsout and isout: resp = 'already clocked out' else: resp = 'programming error' cursor.close() return json.dumps(resp)
def index(req, table, shouldPrint, serverpin, close=True): my_logger.info(req.get_remote_host()+': server %s closed tab %s'%(serverpin, table)) cursor = utils.get_cursor() shouldPrint = (shouldPrint == 'true') receipt_text, gift_certs = texttab.get_tab_text(table, serverpin, cursor) if close: cursor.execute(''' UPDATE order_group SET is_open = FALSE, closedby = %s, updated = now() WHERE is_open = TRUE AND table_id = %s ''', args=[serverpin, table]) cursor.close() if shouldPrint: utils.print_slip(receipt_text) for cert in gift_certs: if shouldPrint or cert.is_gift(): cert.print_out() return json.dumps(None)
def index(req, table, shouldPrint, serverpin, close=True): my_logger.info(req.get_remote_host()+': server %s closed tab %s'%(serverpin, table)) cursor = utils.get_cursor() shouldPrint = (shouldPrint == 'true') receipt_text, gift_certs = texttab.get_tab_text(table, serverpin, cursor) if close: cursor.execute(''' UPDATE order_group SET is_open = FALSE, closedby = %(serverpin)s, updated = now() WHERE is_open = TRUE AND table_id = "%(table)s" ''' % locals()) cursor.close() if shouldPrint: recfile = tempfile.NamedTemporaryFile(delete=False) recfile.write(receipt_text.encode('latin1', 'replace')) filename = recfile.name recfile.close() subprocess.call(['enscript', '--font=Courier-Bold@11/16', '-B', '-MEnv10', filename]) os.remove(filename) for cert in gift_certs: cert.print_out() return json.dumps(None)
def Handle(change, files): is_migration = False for filename in files: if filename.find('nova/db/sqlalchemy/migrate_repo/versions') != -1: is_migration = True if is_migration: print 'Sending email' utils.send_email('[CI] Patchset %s #%s' %(change['id'], change['number']), '*****@*****.**', NEW_PATCH_EMAIL % {'change_id': change['id'], 'number': change['number'], 'subject': change['subject'], 'name': change['owner_name'], 'url': change['url'], 'is_migration': is_migration, 'files_list': '\n '.join(files)}) cursor = utils.get_cursor() for dataset in ['nova_trivial_500', 'nova_trivial_6000', 'nova_user_001']: for constraint in ['mysql', 'percona']: w = workunit.WorkUnit(change['id'], change['number'], 'sqlalchemy_migration_%s' % dataset, 0, constraint) w.enqueue(cursor)
def update(req, edits, newrows): edits = json.loads(edits) newrows = json.loads(newrows) insert_ids = {} cursor = utils.get_cursor() for rowid, fields_and_vals in edits.items(): setlist = ','.join('%s = %s'%(f, sql_representation(v)) for f, v in fields_and_vals.items() if f != 'estimated_units_remaining') sql = "update sku set " + setlist + " where id = " + rowid + "\n" utils.execute(sql, cursor) for rowid, fields_and_vals in newrows.items(): for bad_field in ('uid', 'undefined', 'estimated_units_remaining', 'boundindex', 'visibleindex', 'uniqueid'): if fields_and_vals.has_key(bad_field): fields_and_vals.pop(bad_field) fields = fields_and_vals.keys() values = fields_and_vals.values() field_list = ','.join(fields) value_list = ','.join(sql_representation(v) for v in values) sql = "insert into sku ("+field_list+") VALUES ("+value_list+")" utils.execute(sql, cursor) insert_ids[rowid] = utils.select("select LAST_INSERT_ID()", cursor, False)[0][0] cursor.close () wineprint.gen_fodt_and_pdf() return json.dumps(insert_ids)
def call_procedure_in_db(request, proc_name, args=None): cursor = get_cursor(request) try: cursor.callproc(get_full_name(proc_name), args) return True except cx_Oracle.DatabaseError as e: logging.error('Error at call db procedure {}{: {}'.format(proc_name, str(e))) return False
def upload_file(): post = request.form newfile = request.files.get("newfile") try: newname = newfile.filename except AttributeError: # Will happen if newfile is None abort(400, "No file specified") target_file = os.path.join(UPLOAD_DIR, newname.replace(os.sep, "_")) with open(target_file, "wb") as file_obj: shutil.copyfileobj(newfile.stream, file_obj) newfile.stream.close() file_size = os.stat(target_file)[stat.ST_SIZE] fsize = utils.human_fmt(file_size).replace(" ","") # Don't use the CDN; use the generic download URL that will redirect. fldr = {"c": "cb", "d": "dabo"}.get(post["section"], "") cfile = os.path.join(DLBASE, fldr, newname) sql = """INSERT INTO files (ctype, ctitle, mdesc, cfile, ccosttype, ncost, csize, cauthor, cauthoremail, dlastupd, lpublish) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""" args = (post.get("section"), post.get("title"), post.get("description"), cfile, post.get("file_license"), post.get("cost"), fsize, post.get("author"), post.get("author_email"), datetime.date.today(), False) crs = utils.get_cursor() crs.execute(sql, args) body = """Originating IP = %s Section = %s Title = %s File = %s License = %s Cost = %s Size = %s Author = %s Email = %s Description: %s """ % (request.remote_addr, post.get("section"), post.get("title"), newname, post.get("file_license"), post.get("cost"), fsize, post.get("author"), post.get("author_email"), post.get("description")) msg = """From: File Uploads <*****@*****.**> X-Mailer: flask script To: Ed Leafe <*****@*****.**> Subject: New Uploaded File Date: %s %s """ % (time.strftime("%c"), body) smtp = smtplib.SMTP("mail.leafe.com") smtp.sendmail("*****@*****.**", "*****@*****.**", msg) g.message = "Your file has been uploaded." return render("upload.html")
def perform_git_fetches(): fetches_performed = False cursor = utils.get_cursor() subcursor = utils.get_cursor() cursor.execute('select * from patchsets where state="0" limit 25;') for row in cursor: fetches_performed = True repo_path = os.path.join('/srv/git', row['project']) if not os.path.exists(repo_path): utils.clone_git(row['project']) if not os.path.exists(repo_path): subcursor.execute('update patchsets set state="m" ' 'where id="%s" and number=%d;' %(row['id'], row['number'])) subcursor.execute('commit;') continue repo = git.Repo(repo_path) assert repo.bare == False repo.git.checkout('master') repo.git.pull() files = {} print '%s %s' %(datetime.datetime.now(), row['refurl']) repo.git.fetch('https://review.openstack.org/%s' %row['project'], row['refurl']) for line in repo.git.format_patch('-1', '--stdout', 'FETCH_HEAD').split('\n'): m = DIFF_FILENAME_RE.match(line) if m: files[m.group(1)] = True print '%s %d files changed' %(datetime.datetime.now(), len(files)) for filename in files: subcursor.execute('insert ignore into patchset_files ' '(id, number, filename) values ("%s", %d, "%s");' %(row['id'], row['number'], filename)) subcursor.execute('update patchsets set state="f" ' 'where id="%s" and number=%d;' %(row['id'], row['number'])) subcursor.execute('commit;') return fetches_performed
def call_procedure_in_db(request, proc_name, args=None): cursor = get_cursor(request) try: cursor.callproc(get_full_name(proc_name), args) return True except cx_Oracle.DatabaseError as e: logging.error('Error at call db procedure {}{: {}'.format( proc_name, str(e))) return False
def wait_until_done(): while True: cursor = utils.get_cursor() cursor.execute('select count(*) from patchsets where state = "0";') row = cursor.fetchone() print "%s %d rows" % (datetime.datetime.now(), row["count(*)"]) if row["count(*)"] == 0: return time.sleep(30)
def GET_list(): crs = utils.get_cursor() framesets = entities.Frameset.list() g.framesets = sorted([fs.to_dict() for fs in framesets], key=lambda x: x["name"].upper()) albums = entities.Album.list() g.albums = sorted([ab.to_dict() for ab in albums if not ab.parent_id], key=lambda x: x["name"].upper()) g.request_string = str(request.headers) return render_template("frameset_list.html")
def weekly_pay(printmode=0, incursor = None): if incursor is None: incursor = utils.get_cursor() for table_name in ('PAY_STUB', 'PAY_STUB_TEMP'): utils.execute(''' create temporary table v_%(table_name)s as select week_of, last_name, first_name, hours_worked, pay_rate, fed_withholding + nys_withholding + medicare_tax + social_security_tax as weekly_tax, round(weekly_pay -fed_withholding -nys_withholding -medicare_tax -social_security_tax) as net_wage, tips, total_hourly_pay from %(table_name)s where yearweek(week_of) = yearweek(now() - interval '1' week) order by last_name, first_name''' % locals(), incursor=incursor, ) if printmode == 1: break if printmode == 1: return utils.select('''select * from v_PAY_STUB''', incursor=incursor) else: return utils.select(''' select pst.week_of, pst.last_name, pst.first_name, IF(pst.hours_worked = ps.hours_worked or ps.hours_worked is null, pst.hours_worked, concat(pst.hours_worked, ' / ', ps.hours_worked)) hours_worked, IF(pst.pay_rate = ps.pay_rate or ps.pay_rate is null, pst.pay_rate, concat(pst.pay_rate, ' / ', ps.pay_rate)) pay_rate, IF(pst.weekly_tax = ps.weekly_tax or ps.weekly_tax is null, pst.weekly_tax, concat(pst.weekly_tax, ' / ', ps.weekly_tax)) weekly_tax, IF(pst.net_wage = ps.net_wage or ps.net_wage is null, pst.net_wage, concat(pst.net_wage, ' / ', ps.net_wage)) net_wage, IF(pst.tips = ps.tips or ps.tips is null, pst.tips, concat(pst.tips, ' / ', ps.tips)) tips, IF(pst.total_hourly_pay = ps.total_hourly_pay or ps.total_hourly_pay is null, pst.total_hourly_pay, concat(pst.total_hourly_pay, ' / ', ps.total_hourly_pay)) total_hourly_pay from v_PAY_STUB_TEMP pst LEFT OUTER JOIN v_PAY_STUB ps on pst.week_of = ps.week_of and pst.first_name = ps.first_name and pst.last_name = ps.last_name order by last_name, first_name ''', incursor = incursor, label= False )
def get_bioactives_for_target(self, tid): """utils.postgres_array(std_cids) Get activities for given target """ print("""Target ID:""", tid) self.cursor = utils.get_cursor() self.cursor.execute("""SELECT activities.activity_id, activities.pchembl_value, activities.assay_id, activities.molregno, activities.standard_relation , activities.standard_value, activities.standard_units, activities.standard_flag, activities.standard_type, activities.activity_comment, assays.confidence_score, assays.assay_type, target_dictionary.target_type as tgt_type, target_dictionary.pref_name as tgt_pref_name, target_dictionary.chembl_id as tgt_chembl_id, target_dictionary.organism, molecule_dictionary.pref_name as cmpd_pref_name, molecule_dictionary.molecule_type, molecule_dictionary.structure_type, molecule_dictionary.inorganic_flag, compound_structures.canonical_smiles, compound_structures.standard_inchi_key, molecule_dictionary.chembl_id AS cmpd_chembl_id, assays.tid FROM activities, assays, target_dictionary, target_components, molecule_dictionary, compound_properties, compound_structures WHERE pchembl_value IS NOT NULL AND activities.data_validity_comment IS NULL AND activities.assay_id = assays.assay_id AND assays.tid = target_dictionary.tid AND target_dictionary.tid = target_components.tid AND molecule_dictionary.molregno = activities.molregno AND molecule_dictionary.molregno = compound_properties.molregno AND molecule_dictionary.molregno = compound_structures.molregno AND activities.standard_relation = '=' AND assays.confidence_score IN ('7', '9') AND lower(activities.standard_type) IN ({}) AND assays.tid = {}""".format(utils.postgres_array(ACCEPTED_TYPES.keys()), tid)) compounds = utils.dictfetchall(self.cursor) print("Raw data:", len(compounds), "activities") return compounds
def process_all(): while True: wait_until_done() cursor = utils.get_cursor() cursor.execute( 'update patchsets set state="0" where state="m" and ' 'project not like "stackforge/%reddwarf%" limit 100;' ) try: if cursor.rowcount == 0: return finally: cursor.execute("commit;")
def get_session_id(req): #client will use this to create unique ids for order_item commands it sends to server for DB insertion my_logger.info(req.get_remote_host()+': get_session_id called') cursor = utils.get_cursor() cursor.execute("insert into client_session values (null, null);"); session_id = cursor.connection.insert_id() my_logger.info(req.get_remote_host()+': generated session id: %s'%session_id) cursor.close() return json.dumps(session_id)
def show(frameset_id): if frameset_id is None: g.frameset = {"name": "", "pkid": "", "orientation": ""} else: crs = utils.get_cursor() crs.execute("select * from frameset where pkid = %s", (frameset_id, )) g.frameset = crs.fetchall()[0] sql = """select frame.pkid, frame.name, frame.description from frame join frameset_frame on frame.pkid = frameset_frame.frame_id where frameset_frame.frameset_id = %s;""" crs.execute(sql, (frameset_id, )) g.frames = crs.fetchall() return render_template("frameset_detail.html")
def update_frameset_album(album_id, image_ids=None): """Updates the 'images' key for all framesets that are linked to the album.""" crs = utils.get_cursor() if image_ids is None: sql = "select image_id from album_image where album_id = %s;" crs.execute(sql, album_id) image_ids = [rec["image_id"] for rec in crs.fetchall()] sql = "select pkid from frameset where album_id = %s;" crs.execute(sql, (album_id, )) frameset_ids = [rec["pkid"] for rec in crs.fetchall()] if frameset_ids: sql = "select name from image where pkid in %s;" crs.execute(sql, (image_ids, )) image_names = [rec["name"] for rec in crs.fetchall()] for frameset_id in frameset_ids: utils.write_key(frameset_id, "images", image_names)
def add_item(item_id=None, table_id=None, item_name=None, price=None, fraction=None, menu_item_id=None, taxable=True, is_delivered=False, is_comped=False, is_held=False, parent_item=None, incursor=None, **unused): assert item_id != None, 'item id must not be null' + ' ' + str(locals()) assert table_id != 'null' and table_id is not None, ( "table_id cannot be null in call to function 'action/add_item'" + str(locals()) ) assert len(table_id) <= 64, "table_id must be 64 or fewer chars" + str(locals()) assert item_name is not None, 'item _name required' + str(locals()) if price is None: price = 0 #Oassert price is not None, 'price required' + str(locals()) cursor = utils.get_cursor() open_order_group = None for time in (1,2): cursor.execute(''' SELECT id FROM order_group WHERE is_open = TRUE AND table_id = %s''', table_id ) open_order_group = cursor.fetchone() if open_order_group: break else: cursor.execute('''INSERT INTO order_group VALUES (null, %s, TRUE, null, null, null, null)''', table_id) open_order_group = open_order_group[0]; cursor.execute(''' INSERT INTO order_item ( id, order_group_id, item_name, price, fraction, menu_item_id, taxable, is_delivered, is_comped, is_held, parent_item ) VALUES ( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)''', (item_id, open_order_group, item_name, price, fraction, menu_item_id, taxable, is_delivered, is_comped, is_held, parent_item) ) cursor.close()
def delete(pkid=None): if pkid is None: # Form pkid = request.form["pkid"] crs = utils.get_cursor() # Get the file name sql = "select name from frameset where pkid = %s" res = crs.execute(sql, (pkid, )) if not res: abort(404) fname = crs.fetchone()["name"] sql = "delete from frameset where pkid = %s" crs.execute(sql, (pkid, )) sql = "delete from frameset_frame where frameset_id = %s" crs.execute(sql, (pkid, )) utils.commit() return redirect(url_for("list_framesets"))
def call_function_in_db(request, func_name, return_type=cx_Oracle.NUMBER, args=None, cast_func=None): """ Call db function :param request: need to extract connection date :param func_name: name of function to call :param return_type: cx_Oracle type of result :param args: arguments to call function :param cast_func: optional function to cast the result. Specify it to the desired return type or you will get string :return: """ cursor = get_cursor(request) try: result = cursor.callfunc(get_full_name(func_name), return_type, args) return result if cast_func is None else cast_func(result) except cx_Oracle.DatabaseError as e: logging.error('Error at call db function {}: {}'.format(func_name, str(e))) return None
def status(pkid): crs = utils.get_cursor() sql = """ select name, description, interval_time, interval_units, album_id, brightness, contrast, saturation from frame where pkid = %s; """ crs.execute(sql, (pkid, )) recs = crs.fetchall() if not recs: return "" rec = recs[0] name = rec["name"] description = rec["description"] interval_time = rec["interval_time"] interval_units = rec["interval_units"] album_id = rec["album_id"] brightness = rec["brightness"] contrast = rec["contrast"] saturation = rec["saturation"] # Get associated images sql = """ select image.name from image join album_image on image.pkid = album_image.image_id join album on album_image.album_id = album.pkid join frame on frame.album_id = album.pkid where frame.pkid = %s ; """ crs.execute(sql, (pkid, )) image_ids = [rec["name"] for rec in crs.fetchall()] return json.dumps( { "name": name, "description": description, "interval_time": interval_time, "interval_units": interval_units, "brightness": brightness, "contrast": contrast, "saturation": saturation, "images": image_ids, }, cls=DecimalEncoder, )
def update(): rf = request.form if "delete" in rf: return delete() pkid = rf["pkid"] name = rf["name"] orientation = rf["orientation"] crs = utils.get_cursor() if not pkid: # New frameset pkid = utils.gen_uuid() sql = """insert into frameset (pkid, name, orientation) values (%s, %s, %s); """ vals = (pkid, name, orientation) else: sql = """update frameset set name = %s, orientation = %s where pkid = %s; """ vals = (name, orientation, pkid) crs.execute(sql, vals) utils.commit() return redirect(url_for("list_framesets"))
def _run_query(term=None): term = term or "" crs = utils.get_cursor() sql = """select * from files where lpublish = 1 %s order by ctype ASC, dlastupd DESC;""" % term crs.execute(sql) recs = crs.fetchall() g.vfp = [d for d in recs if d["ctype"] == "v"] g.dabo = [d for d in recs if d["ctype"] == "b"] g.python = [d for d in recs if d["ctype"] == "p"] g.osx = [d for d in recs if d["ctype"] == "x"] g.cb = [d for d in recs if d["ctype"] == "c"] g.fox2x = [d for d in recs if d["ctype"] == "f"] g.other = [d for d in recs if d["ctype"] == "o"] hl_func = partial(_hilite_match, search_term) func_dict = {"hilite": hl_func, "cost_calc": _cost_type, "any": any, "update_link": _update_link} return render_template("download_list.html", **func_dict)
def make_estub(first_name, last_name, baserate, rate_variance, basehours, hour_variance): incursor = utils.get_cursor() table_name = 'E_STUB' utils.execute(''' create temporary table E_STUB like PAY_STUB; ''', incursor=incursor); for sunday in last_sundays(7): hours = basehours + randint(-basehours, basehours)*hour_variance rate = baserate + randint(-baserate, baserate)*rate_variance wages = rate*hours row = { 'week_of' : sunday, 'person_id' : 0, 'last_name': last_name, 'first_name': first_name, 'hours_worked' : hours, 'pay_rate': rate, 'allowances': 1, 'nominal_scale': 0, 'married': 0, 'weekly_pay': 0, 'gross_wages': wages, 'tips': 0, 'total_hourly_pay': rate } tax.add_witholding_fields(row) columns = ', '.join(row.keys()) values = ', '.join(("'%s'" % value for value in row.values())) sqltext = 'INSERT into %s (%s) VALUES (%s);'%(table_name, columns, values) #my_logger.debug('pay stub: ' + sqltext) utils.execute(sqltext, incursor=incursor) for sunday in last_sundays(12): print_stubs(0, sunday, table_name, incursor=incursor)
def call_function_in_db(request, func_name, return_type=cx_Oracle.NUMBER, args=None, cast_func=None): """ Call db function :param request: need to extract connection date :param func_name: name of function to call :param return_type: cx_Oracle type of result :param args: arguments to call function :param cast_func: optional function to cast the result. Specify it to the desired return type or you will get string :return: """ cursor = get_cursor(request) try: result = cursor.callfunc(get_full_name(func_name), return_type, args) return result if cast_func is None else cast_func(result) except cx_Oracle.DatabaseError as e: logging.error('Error at call db function {}: {}'.format( func_name, str(e))) return None
def get(req, filtered='yes'): cursor = utils.get_cursor() if filtered == 'yes': recs = utils.select("select * from winelist_inv where bin != '0'", cursor) else: recs = utils.select('''select * from winelist_inv''', cursor) cursor.close () class MyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime.date): return obj.isoformat() if isinstance(obj, decimal.Decimal): #Decimal type has no json encoding return str(obj) return json.JSONEncoder.default(self, obj) return json.dumps(recs, cls=MyEncoder)
def get_data(currmsg=0, verbose=False): crs = utils.get_cursor() while True: if verbose: print("CURR", currmsg) crs.execute("SELECT * FROM archive WHERE imsg > %s ORDER BY imsg " "LIMIT %s", (currmsg, BATCH_SIZE)) recs = crs.fetchall() if not recs: break # Set the current message to the highest imsg in the set. currmsg = recs[-1]["imsg"] for rec in recs: if rec["clist"] in "avsgjstm": # Old lists we don't need continue doc = {FIELD_MAP[fld]: val for fld, val in rec.items()} doc["fulltext_subject"] = doc["subject"] doc["id"] = utils.gen_key(doc) yield {"_index": "email", "_type": "mail", "_op_type": "index", "_id": doc["id"], "_source": doc}
def update(): rf = request.form rfc = dict(rf) if "delete" in rfc: pkid = rfc["pkid"] entities.Album.delete(pkid) return redirect(url_for("list_albums")) pkid = rf["pkid"] name = rf["name"] orientation = rf["orientation"] crs = utils.get_cursor() if not pkid: # New Album pkid = utils.gen_uuid() sql = """insert into album (pkid, name, orientation) values (%s, %s, %s); """ vals = (pkid, name, orientation) else: sql = """update album set name = %s, orientation = %s where pkid = %s; """ vals = (name, orientation, pkid) crs.execute(sql, vals) utils.commit() return redirect(url_for("list_albums"))
def update_winelist(req, edits, newrows): edits = json.loads(edits) newrows = json.loads(newrows) cursor = utils.get_cursor() for rowid, fields_and_vals in edits.items(): setlist = ','.join('%s = %s'%(f, sql_representation(v)) for f, v in fields_and_vals.items() if f != 'estimated_units_remaining') sql = "update winelist set " + setlist + " where id = " + rowid + "\n" utils.execute(sql, cursor) for fields_and_vals in newrows.values(): for bad_field in ('uid', 'estimated_units_remaining'): if fields_and_vals.has_key(bad_field): fields_and_vals.pop(bad_field) fields = fields_and_vals.keys() values = fields_and_vals.values() field_list = ','.join(fields) value_list = ','.join(sql_representation(v) for v in values) sql = "insert into winelist ("+field_list+") VALUES ("+value_list+")" utils.execute(sql, cursor) cursor.close () wineprint.gen_fodt_and_pdf()
def delete(pkid): crs = utils.get_cursor() crs.execute("delete from frame where pkid = %s", (pkid, )) utils.commit() return GET_list()
from elasticsearch.helpers import bulk from msg_archive import es_client, get_data import utils crs = utils.get_cursor() sql = "select max(imsg) as highmsg from webdata.archive" crs.execute(sql) rec = crs.fetchone() curr = rec["highmsg"] with open(".highmessage") as ff: last = int(ff.read().strip()) if curr > last: success, failures = bulk(es_client, get_data(currmsg=last, verbose=True)) # num = 0 # data_gen = msg_archive.get_data(currmsg=last) # for data in data_gen: # num += 1 # vals = data["_source"] # res = msg_archive.es_client.index(index="email", doc_type="mail", # id=vals["id"], body=vals) # if num % 100 == 0: # print("Imported msg#", num) # with open(".highmessage", "w") as ff: # ff.write("%s" % curr)
def new_sales_by_server(label=False, lag_days=1): tax_rate = texttab.TAXRATE cursor = utils.get_cursor() cursor.execute(''' create temporary table day_sales as SELECT sum(oi.price) sales, sum(ti.price) taxable_sales, sum(oi.price) + COALESCE(round(sum(ti.price) * %(tax_rate)s, 2),0) receipts, count(distinct og.id) tabs_closed, convert(date(now() - INTERVAL '%(lag_days)s' DAY), CHAR(10)) as dat, og.closedby as person_id FROM (order_item oi left outer join taxable_item ti on ti.id = oi.id), order_group og, person p WHERE oi.order_group_id = og.id AND oi.is_cancelled = False AND oi.is_comped = False AND og.closedby = p.id AND date(og.updated - interval '6' HOUR) = date(now() - INTERVAL '%(lag_days)s' DAY) GROUP BY p.id''' %locals() ) cursor.execute(''' create temporary table day_receipts as SELECT * from server_receipts where dat = date(now() - INTERVAL '%(lag_days)s' DAY) ''' % locals() ) cursor.execute(''' create temporary table all_sales as select sales.*, concat(p.last_name, ', ', p.first_name) server, p.ccid, receipts.cctotal, receipts.cctips, receipts.cash_drop, receipts.starting_cash, receipts.cash_left_in_bank, receipts.id as receipts_id from day_sales sales join person p on sales.person_id = p.id left outer join day_receipts receipts on sales.person_id = receipts.person_id ''') cursor.execute(''' create temporary table all_receipts as select sales.*, concat(p.last_name, ', ', p.first_name) server, p.ccid, receipts.cctotal, receipts.cctips, receipts.cash_drop, receipts.starting_cash, receipts.cash_left_in_bank, receipts.id as receipts_id from day_receipts receipts join person p on receipts.person_id = p.id left outer join day_sales sales on sales.person_id = receipts.person_id ''') return utils.select(''' select * from all_sales union /* full outer join simulator */ select * from all_receipts ''', incursor=cursor, label=label )
def index(req, the_tip, lag_days): cursor = utils.get_cursor() last_night_items_query = ( ''' create temporary table last_night_items as select si.* from served_item si, order_group where si.order_group_id = order_group.id and (order_group.table_id not rlike '^M|Couch' or time(si.created) > '16:00:00') and DATE(si.created- INTERVAL '4' HOUR) = DATE(NOW()) - INTERVAL '%(lag_days)s' DAY ''' ) utils.execute( last_night_items_query %locals(), cursor ) utils.execute( ''' create temporary table person_hours_items as select si.id as si_id, si.price, h.tip_share, h.id as h_id, h.person_id, IF(h.tip_share >=.8, 'FOH', 'BOH') pool_group from last_night_items si ,hours h where si.created between h.intime and ifnull(h.outtime, now()) '''%locals(), cursor ) utils.execute( ''' create temporary table item_split as select si_id, pool_group, price * IF(pool_group = "FOH", .62, .38) / sum(tip_share) split_price from person_hours_items group by si_id, pool_group; ''', cursor ) utils.execute( ''' create temporary table tip_pay as select p.last_name, phi.h_id, sum(split_price)/(select sum(price) from last_night_items) * %(the_tip)s * tip_share as tip from item_split spl ,person_hours_items phi ,person p where spl.si_id = phi.si_id and p.id = phi.person_id and phi.pool_group = spl.pool_group group by phi.h_id; '''%locals(), cursor ) utils.execute(''' update hours h inner join tip_pay tp on h.id = tp.h_id set h.tip_pay = tp.tip, h.paid = false; ''', cursor ) return json.dumps(0)
with open(os.path.join('/var/www/ci', key[0], str(key[1]), 'index.html'), 'w') as idx: idx.write('<table><tr>%s</tr></table>' % out) f.write('<tr%(color)s>%(out)s</tr>\n' %{'color': row_colors[row_count % 2], 'out': out}) row_count += 1 f.write('</table></body></html>') if __name__ == '__main__': print '...' cursor = utils.get_cursor() subcursor = utils.get_cursor() # Write out individual work logs cursor.execute('select * from work_queue where done is not null ' 'and dumped is null;') for row in cursor: work = workunit.WorkUnit(row['id'], row['number'], row['workname'], row['attempt'], row['constraints']) work.worker = row['worker'] work.persist_to_disk(subcursor) work.mark_dumped(subcursor) # Write out an index file write_index('select * from work_queue order by heartbeat desc limit 100;', '/var/www/ci/index.html')
def __init__(self): self.cursor = utils.get_cursor() self.targets = False
def index(req, doprint=0): if doprint: print_pay() print_message="<p> PRINTED.<br>" else: print_message = "" cursor = utils.get_cursor() populate_response = populate_pay_stub.populate_pay_stub(temp = True, incursor = cursor) weekly = queries.weekly_pay(incursor=cursor) payroll_sql = ''' SELECT week_of, round(sum(hours_worked)) as hours_worked, round(avg(hours_worked)) as avg_hours_worked, count(person_id) as num_employees, round(sum(weekly_pay - nys_withholding - fed_withholding - social_security_tax - medicare_tax)) as payroll from %(table_name)s where yearweek(week_of) > yearweek(now() - interval '5' week) and last_name not in ('Kobrin', 'Labossier', 'Kanarova') group by yearweek(week_of) order by yearweek(week_of) desc ''' new_payroll = utils.select( payroll_sql%{ 'table_name' : 'PAY_STUB_TEMP'}, incursor=cursor, label=False ) past_payroll = utils.select( payroll_sql%{ 'table_name' : 'PAY_STUB'}, incursor=cursor, label=False ) detail = utils.select(''' SELECT concat(yearweek(intime),' ',dayname(intime),' ',date), last_name, first_name, time_in, time_out, hours_worked from hours_worked where yearweek(intime) > yearweek(now() - interval '5' week) order by yearweek(intime) desc, last_name, date(intime)''', incursor=None, label=False ) html = ( ''' <html> <body> ''' ) if populate_response: html += '<h1>' + populate_response + '</h1>' else: html +=''' <form action="time.py?doprint=1" method="POST"> <input type="submit" value="print pay slips"> </form> ''' + print_message html += ( utils.tohtml( 'Hours worked per week by person', ('week of', 'last name', 'first_name', 'hours_worked', 'rate', 'tax', 'net weekly wage', 'tips', 'total hourly'), weekly, breakonfirst = True ) + utils.tohtml( 'New Payroll', ('yearweek', 'hours_worked', 'avg_hrs', '# employees', 'payroll'), new_payroll, breakonfirst = True ) + utils.tohtml( 'Past Payroll', ('yearweek', 'hours_worked', 'avg_hrs', '# employees', 'payroll'), past_payroll, breakonfirst = True ) + utils.tohtml( "detail hours", ('date', 'last_name', 'first_name', 'time_in', 'time_out', 'hours_worked'), detail ) + '''</body></html>''' ) return html
def persist_to_disk(self, cursor): cursor.execute('select * from work_queue where id="%s" and number=%s ' 'and workname="%s" and constraints="%s" and ' 'attempt=%s;' %(self.ident, self.number, self.workname, self.constraints, self.attempt)) row = cursor.fetchone() if row['dumped'] == 'y': return subcursor = utils.get_cursor() path = self.disk_path() datapath = os.path.join(path, 'data') workerpath = os.path.join(path, 'worker') outcome = 'Passed' print path if not os.path.exists(path): os.makedirs(path) with open(workerpath, 'w') as f: f.write(self.worker) with open(os.path.join(path, 'log.html'), 'w') as f: buffered = [] upgrades = [] upgrade_times = {} in_upgrade = False migration_start = None final_version = None cursor.execute('select * from work_logs where id="%s" and ' 'number=%s and workname="%s" and ' 'worker="%s" and constraints="%s" and attempt=%s ' 'order by timestamp asc;' %(self.ident, self.number, self.workname, self.worker, self.constraints, self.attempt)) linecount = 0 f.write(LOG_HEADER %{'id': self.ident, 'number': self.number}) data = {} for logrow in cursor: m = FINAL_VERSION_RE.match(logrow['log']) if m: final_version = int(m.group(1)) m = UPGRADE_BEGIN_RE.match(logrow['log']) if m: upgrade_name = m.group(1) upgrades.append(upgrade_name) upgrade_start = logrow['timestamp'] in_upgrade = True buffered.append('<a name="%s"></a>' % upgrade_name) m = MIGRATION_CLASH_RE.match(logrow['log']) if m: data['color'] = 'bgcolor="#FA5858"' data['result'] = 'Failed: migration number clash' print ' Failed' outcome = 'Failed' m = GIT_CHECKOUT_FAILED_RE.match(logrow['log']) if m: data['color'] = 'bgcolor="#F4FA58"' data['result'] = 'Warning: merge failure' print ' Warning' outcome = 'Warning' line = ('<a name="%(linenum)s"></a>' '<a href="#%(linenum)s">#</a> ' % {'linenum': linecount}) if in_upgrade: line += '<b>' cleaned = logrow['log'].rstrip() cleaned = cleaned.replace('/srv/openstack-ci-tools', '...') cleaned = GIT_CHECKOUT_RE.sub('...git...', cleaned) cleaned = VENV_PATH_RE.sub('...venv...', cleaned) cleaned = cgi.escape(cleaned) m = MIGRATION_END_RE.match(cleaned) if m and migration_start: elapsed = logrow['timestamp'] - migration_start cleaned += (' <font color="red">[%s]</font>' % utils.timedelta_as_str(elapsed)) migration_start = None m = MIGRATION_START_RE.match(cleaned) if m: migration_start = logrow['timestamp'] subcursor.execute('select * from patchset_migrations ' 'where id="%s" and number=%s and ' 'migration=%s;' %(self.ident, self.number, m.group(2))) subrow = subcursor.fetchone() if subrow: cleaned += (' <font color="red">[%s]</font>' % subrow['name']) line += ('%(timestamp)s %(line)s' % {'timestamp': logrow['timestamp'], 'line': cleaned}) if in_upgrade: line += '</b>' line += '\n' buffered.append(line) linecount += 1 m = UPGRADE_END_RE.match(logrow['log']) if m: in_upgrade = False elapsed = logrow['timestamp'] - upgrade_start elapsed_str = utils.timedelta_as_str(elapsed) buffered.append(' ' ' <font color="red"><b>' '[%s total]</b></font>\n' % elapsed_str) upgrade_times[upgrade_name] = elapsed display_upgrades = [] data.update({'order': upgrades, 'details' : {}, 'details_seconds': {}, 'final_schema_version': final_version}) for upgrade in upgrades: time_str = utils.timedelta_as_str(upgrade_times[upgrade]) display_upgrades.append('<li><a href="#%(name)s">' 'Upgrade to %(name)s -- ' '%(elapsed)s</a>' % {'name': upgrade, 'elapsed': time_str}) data['details'][upgrade] = time_str data['details_seconds'][upgrade] = \ upgrade_times[upgrade].seconds data['color'] = '' print ' %s (%s)' %(upgrade, upgrade_times[upgrade].seconds) if upgrade == 'patchset': if upgrade_times[upgrade].seconds > 120: data['color'] = 'bgcolor="#FA5858"' data['result'] = 'Failed: patchset too slow' print ' Failed' outcome = 'Failed' elif upgrade_times[upgrade].seconds > 30: data['color'] = 'bgcolor="#FA8258"' data['result'] = 'Warning: patchset slow' print ' Warning' outcome = 'Warning' if final_version: subcursor.execute('select max(migration) from ' 'patchset_migrations where id="%s" ' 'and number=%s;' %(self.ident, self.number)) subrow = subcursor.fetchone() data['expected_final_schema_version'] = \ subrow['max(migration)'] if final_version != subrow['max(migration)']: data['color'] = 'bgcolor="#FA5858"' data['result'] = 'Failed: incorrect final version' print ' Failed' outcome = 'Failed' f.write('<ul>%s</ul>' % ('\n'.join(display_upgrades))) f.write('<pre><code>\n') f.write(''.join(buffered)) f.write('</code></pre></body></html>') with open(datapath, 'w') as d: d.write(json.dumps(data)) subcursor.execute('update work_queue set outcome="%s" ' 'where id="%s" and number=%s ' 'and workname="%s" and constraints="%s" and ' 'attempt=%s;' %(outcome, self.ident, self.number, self.workname, self.constraints, self.attempt)) subcursor.execute('commit;')
def fetch_log_day(dt): global rechecks new = 0 cursor = utils.get_cursor() for host in ['dfw', 'ord', 'syd']: try: print ('%s Fetching http://gerrit-stream-logger-%s.stillhq.com/' 'output/%s/%s/%s' %(datetime.datetime.now(), host, dt.year, dt.month, dt.day)) remote = urllib.urlopen('http://gerrit-stream-logger-%s.' 'stillhq.com/output/%s/%s/%s' %(host, dt.year, dt.month, dt.day)) for line in remote.readlines(): packet = json.loads(line) if packet.get('type') == 'patchset-created': ts = packet['patchSet']['createdOn'] ts = datetime.datetime.fromtimestamp(ts) cursor.execute('insert ignore into patchsets ' '(id, project, number, refurl, state, ' 'subject, owner_name, url, timestamp) ' 'values (%s, %s, %s, %s, 0, %s, %s, %s, ' '%s);', (packet['change']['id'], packet['change']['project'], packet['patchSet']['number'], packet['patchSet']['ref'], utils.Normalize( packet['change']['subject']), utils.Normalize( packet['change']['owner']['name']), packet['change']['url'], ts)) new += cursor.rowcount cursor.execute('update patchsets set timestamp=%s where ' 'id=%s and number=%s;', (ts, packet['change']['id'], packet['patchSet']['number'])) cursor.execute('commit;') elif packet.get('type') == 'comment-added': if (packet.get('comment').startswith('recheck') or packet.get('comment').startswith('reverify')): # Confusingly, this is the timestamp for the comment ts = packet['patchSet']['createdOn'] ts = datetime.datetime.fromtimestamp(ts) key = (packet['change']['id'], packet['patchSet']['number']) rechecks.setdefault(key, []) if not ts in rechecks[key]: rechecks[key].append(ts) except Exception, e: print '%s Error: %s' %(datetime.datetime.now(), e) try: process_patchsets() perform_git_fetches() process_patchsets() except Exception, e: print '%s Error %s' %(datetime.datetime.now(), e)
def get_tab_text(table, serverpin = None, cursor = None, ogid = None, closed_time = None, admin_view=False, reopen_time = None): if cursor is None: cursor = utils.get_cursor() items_query = ''' SELECT count(*) cnt, og.table_id, oi.id, oi.item_name name, sum(oi.price) price, oi.is_comped, oi.taxable, oi.is_cancelled, time_format(timediff(oi.created, og.created), '+%%H:%%i') creat_time, time_format(timediff(oi.updated, oi.created), '+%%H:%%i') updat_time, oi.created > '%(reopen_time)s' as creat_after, oi.updated > '%(reopen_time)s' as updat_after FROM order_group og JOIN order_item oi ON og.id = oi.order_group_id WHERE (og.is_open = TRUE and "%(closed_time)s" = 'None' or og.updated = "%(closed_time)s") and og.table_id = "%(table)s" and (oi.is_cancelled = FALSE or '%(serverpin)s' = 'NULL' or %(admin_view)s) group by oi.item_name, oi.is_comped, oi.is_cancelled, oi.price, IF(item_name like 'gift%%', oi.id, 1) ''' if admin_view: items_query += ', oi.id\n' # don't group for admin_view items_query += '''order by oi.created''' items_query = items_query % locals() items = utils.select(items_query, cursor) if serverpin and serverpin != 'NULL': servername = utils.select( "select coalesce(nickname, first_name) name from person where id = %(serverpin)s" % locals(), cursor)[0]['name'] else: servername = 'staff' if not items: return "no tab opened for table %s" %table, [] foodtotal = sum(item['price'] for item in items if not item['is_cancelled'] and not item['is_comped']) taxable_total = sum(item['price'] for item in items if not item['is_cancelled'] and not item['is_comped'] and item['taxable']) tax = round(taxable_total*TAXRATE, 2) total = foodtotal + tax divider = '-'*(NUMWIDTH + TEXTWIDTH) + "\n" if utils.is_salumi(): tabtext = "SALUMI".center(NUMWIDTH + TEXTWIDTH) + '\n' tabtext += "5600 Merrick Rd Massapequa".center(NUMWIDTH + TEXTWIDTH) + '\n' tabtext += "516-620-0057".center(NUMWIDTH + TEXTWIDTH) + '\n\n' else: tabtext = "PLANCHA".center(NUMWIDTH + TEXTWIDTH) + '\n' tabtext += "931 Franklin Avenue".center(NUMWIDTH + TEXTWIDTH) + '\n' tabtext += "Garden City, NY".center(NUMWIDTH + TEXTWIDTH) + '\n' tabtext += "516-246-9459".center(NUMWIDTH + TEXTWIDTH) + '\n\n' now = utils.now() tabtext += ' Table:%s %s \n\n' % (table, closed_time or now) tabtext += 'FOOD & DRINK' + "\n" tabtext += divider gift_certs = [GiftCert({'name':'coupon'})] gratuity = 0 gratuity_rate = 0 for item in items: if is_gratuity(item): gratuity_rate = item['price'] gratuity = round((taxable_total) * gratuity_rate/100.0, 2) total = total + gratuity continue if is_gift(item) or is_coupon(item): gift_certs.append(GiftCert(item)) if item['price'] == 0 and not admin_view: continue if item['is_cancelled']: price = 'cancelled' elif item['is_comped']: price = 'comped' else: price = '%.2f'%item['price'] tabtext += format_item(item['name'], item['cnt']).ljust(TEXTWIDTH) + price.rjust(NUMWIDTH) + "\n" if admin_view: tabtext += '<a style="font-size:12; color: green">' if item['creat_after'] : tabtext += '<a style="color: red">' tabtext += str(item['creat_time']).replace('+00:', '+').replace('+0', '+') if item['creat_after'] : tabtext += '</a>' if item['updat_after'] : tabtext += '<a style="color: red">' tabtext += ' ' + str(item['updat_time'] or '').replace('+00:', '+').replace('+0', '+') if item['updat_after'] : tabtext += '</a>' tabtext += '</a><br/>' foodtotal, tax, gratuity, total = ( ('%.2f'%x).rjust(NUMWIDTH) for x in (foodtotal, tax, gratuity, total) ) tabtext += '\n' + \ 'SUBTOTAL'.ljust(TEXTWIDTH) + foodtotal + '\n' tabtext += 'TAX'.ljust(TEXTWIDTH) + tax + '\n' if gratuity_rate != 0: tabtext += ('GRATIUITY %s%%'%gratuity_rate).ljust(TEXTWIDTH) + gratuity + '\n' tabtext += divider tabtext += 'TOTAL'.ljust(TEXTWIDTH) + total + '\n' tabtext += ''' Thank You. - %s %s ''' % (servername, MSG) return tabtext, gift_certs
#!/usr/bin/python import datetime import os import select import socket import subprocess import time import utils cursor = utils.get_cursor() cmd = ('./plugins/test_sqlalchemy_migrations.sh refs_changes_51_29251_11 ' '/srv/git-checkouts/testing nova nova nova_user_001 2>&1') for i in range(0, 5): print 'Executing script: %s' % cmd names = {} lines = {} syslog = os.open('/var/log/syslog', os.O_RDONLY) os.lseek(syslog, 0, os.SEEK_END) names[syslog] = '[syslog] ' lines[syslog] = '' slow = os.open('/var/log/mysql/slow-queries.log', os.O_RDONLY) os.lseek(slow, 0, os.SEEK_END) names[slow] = '[mysql slow queries] ' lines[slow] = '' mysql = os.open('/var/log/mysql/error.log', os.O_RDONLY)