def assertAlmostEqualValue(test, expected, digits=None, places=None, msg=None, delta=None): """ Snagged from unittest/case.py, then modified (Aug2014) """ if expected == None: # None has no expectations return if test == expected: # shortcut return if not Math.is_number(expected): # SOME SPECIAL CASES, EXPECTING EMPTY CONTAINERS IS THE SAME AS EXPECTING NULL if isinstance(expected, list) and len(expected)==0 and test == None: return if isinstance(expected, Mapping) and not expected.keys() and test == None: return if test != expected: raise AssertionError(expand_template("{{test}} != {{expected}}", locals())) return num_param = 0 if digits != None: num_param += 1 if places != None: num_param += 1 if delta != None: num_param += 1 if num_param>1: raise TypeError("specify only one of digits, places or delta") if digits is not None: with suppress_exception: diff = Math.log10(abs(test-expected)) if diff < digits: return standardMsg = expand_template("{{test}} != {{expected}} within {{digits}} decimal places", locals()) elif delta is not None: if abs(test - expected) <= delta: return standardMsg = expand_template("{{test}} != {{expected}} within {{delta}} delta", locals()) else: if places is None: places = 15 with suppress_exception: diff = Math.log10(abs(test-expected)) if diff < Math.ceiling(Math.log10(abs(test)))-places: return standardMsg = expand_template("{{test|json}} != {{expected|json}} within {{places}} places", locals()) raise AssertionError(coalesce(msg, "") + ": (" + standardMsg + ")")
def _aggop(self, query): """ SINGLE ROW RETURNED WITH AGGREGATES """ if isinstance(query.select, list): # RETURN SINGLE OBJECT WITH AGGREGATES for s in query.select: if s.aggregate not in aggregates: Log.error("Expecting all columns to have an aggregate: {{select}}", select=s) selects = DictList() for s in query.select: selects.append(aggregates[s.aggregate].replace("{{code}}", s.value) + " AS " + self.db.quote_column(s.name)) sql = expand_template(""" SELECT {{selects}} FROM {{table}} {{where}} """, { "selects": SQL(",\n".join(selects)), "table": self._subquery(query["from"])[0], "where": self._where2sql(query.filter) }) return sql, lambda sql: self.db.column(sql)[0] # RETURNING SINGLE OBJECT WITH AGGREGATE VALUES else: # RETURN SINGLE VALUE s0 = query.select if s0.aggregate not in aggregates: Log.error("Expecting all columns to have an aggregate: {{select}}", select=s0) select = aggregates[s0.aggregate].replace("{{code}}", s0.value) + " AS " + self.db.quote_column(s0.name) sql = expand_template(""" SELECT {{selects}} FROM {{table}} {{where}} """, { "selects": SQL(select), "table": self._subquery(query["from"])[0], "where": self._where2sql(query.where) }) def post(sql): result = self.db.column_query(sql) return result[0][0] return sql, post # RETURN SINGLE VALUE
def _aggop(self, query): """ SINGLE ROW RETURNED WITH AGGREGATES """ if isinstance(query.select, list): # RETURN SINGLE OBJECT WITH AGGREGATES for s in query.select: if s.aggregate not in aggregates: Log.error("Expecting all columns to have an aggregate: {{select}}", select=s) selects = DictList() for s in query.select: selects.append(aggregates[s.aggregate].replace("{{code}}", s.value) + " AS " + self.db.quote_column(s.name)) sql = expand_template(""" SELECT {{selects}} FROM {{table}} {{where}} """, { "selects": SQL(",\n".join(selects)), "table": self._subquery(query["from"])[0], "where": self._where2sql(query.filter) }) return sql, lambda sql: self.db.column(sql)[0] # RETURNING SINGLE OBJECT WITH AGGREGATE VALUES else: # RETURN SINGLE VALUE s0 = query.select if s0.aggregate not in aggregates: Log.error("Expecting all columns to have an aggregate: {{select}}", select=s0) select = aggregates[s0.aggregate].replace("{{code}}", s0.value) + " AS " + self.db.quote_column(s0.name) sql = expand_template(""" SELECT {{selects}} FROM {{table}} {{where}} """, { "selects": SQL(select), "table": self._subquery(query["from"])[0], "where": self._where2sql(query.where) }) def post(sql): result = self.db.column_query(sql) return result[0][0] return sql, post # RETURN SINGLE VALUE
def execute( self, command, param=None, retry=True # IF command FAILS, JUST THROW ERROR ): if param: command = expand_template(command, self.quote_param(param)) output = None done = False while not done: try: with self.locker: if not self.connection: self._connect() with Closer(self.connection.cursor()) as curs: curs.execute(command) if curs.rowcount >= 0: output = curs.fetchall() self.connection.commit() done = True except Exception, e: with suppress_exception: self.connection.rollback() # TODO: FIGURE OUT WHY rollback() DOES NOT HELP self.connection.close() self.connection = None self._connect() if not retry: Log.error("Problem with command:\n{{command|indent}}", command=command, cause=e)
def quote_value(self, value): """ convert values to mysql code for the same mostly delegate directly to the mysql lib, but some exceptions exist """ try: if value == None: return "NULL" elif isinstance(value, SQL): if not value.param: # value.template CAN BE MORE THAN A TEMPLATE STRING return self.quote_sql(value.template) param = {k: self.quote_sql(v) for k, v in value.param.items()} return expand_template(value.template, param) elif isinstance(value, basestring): return self.db.literal(value) elif isinstance(value, datetime): return "str_to_date('" + value.strftime("%Y%m%d%H%M%S") + "', '%Y%m%d%H%i%s')" elif hasattr(value, '__iter__'): return self.db.literal(json_encode(value)) elif isinstance(value, Mapping): return self.db.literal(json_encode(value)) elif Math.is_number(value): return unicode(value) else: return self.db.literal(value) except Exception, e: Log.error("problem quoting SQL", e)
def fill_container(self, subtest, tjson=False): """ RETURN SETTINGS THAT CAN BE USED TO POINT TO THE INDEX THAT'S FILLED """ subtest = wrap(subtest) _settings = self._es_test_settings # ALREADY COPIED AT setUp() # _settings.index = "testing_" + Random.hex(10).lower() # settings.type = "test_result" try: url = "file://resources/schema/basic_schema.json.template?{{.|url}}" url = expand_template(url, { "type": _settings.type, "metadata": subtest.metadata }) _settings.schema = jsons.ref.get(url) # MAKE CONTAINER container = self._es_cluster.get_or_create_index( tjson=tjson, settings=_settings) container.add_alias(_settings.index) # INSERT DATA container.extend([{"value": v} for v in subtest.data]) container.flush() # ENSURE query POINTS TO CONTAINER frum = subtest.query["from"] if isinstance(frum, basestring): subtest.query["from"] = frum.replace(TEST_TABLE, _settings.index) else: Log.error("Do not know how to handle") except Exception, e: Log.error("can not load {{data}} into container", {"data": subtest.data}, e)
def execute( self, command, param=None, retry=True # IF command FAILS, JUST THROW ERROR ): if param: command = expand_template(command, self.quote_param(param)) output = None done = False while not done: try: with self.locker: if not self.connection: self._connect() with Closer(self.connection.cursor()) as curs: curs.execute(command) if curs.rowcount >= 0: output = curs.fetchall() self.connection.commit() done = True except Exception, e: try: self.connection.rollback() # TODO: FIGURE OUT WHY rollback() DOES NOT HELP self.connection.close() except Exception, f: pass self.connection = None self._connect() if not retry: Log.error("Problem with command:\n{{command|indent}}", command= command, cause=e)
def json2value(json_string, params={}, flexible=False, leaves=False): """ :param json_string: THE JSON :param params: STANDARD JSON PARAMS :param flexible: REMOVE COMMENTS :param leaves: ASSUME JSON KEYS ARE DOT-DELIMITED :return: Python value """ if isinstance(json_string, str): Log.error("only unicode json accepted") try: if flexible: # REMOVE """COMMENTS""", # COMMENTS, //COMMENTS, AND \n \r # DERIVED FROM https://github.com/jeads/datasource/blob/master/datasource/bases/BaseHub.py# L58 json_string = re.sub(r"\"\"\".*?\"\"\"", r"\n", json_string, flags=re.MULTILINE) json_string = "\n".join(remove_line_comment(l) for l in json_string.split("\n")) # ALLOW DICTIONARY'S NAME:VALUE LIST TO END WITH COMMA json_string = re.sub(r",\s*\}", r"}", json_string) # ALLOW LISTS TO END WITH COMMA json_string = re.sub(r",\s*\]", r"]", json_string) if params: # LOOKUP REFERENCES json_string = expand_template(json_string, params) try: value = wrap(json_decoder(unicode(json_string))) except Exception, e: Log.error("can not decode\n{{content}}", content=json_string, cause=e) if leaves: value = wrap_leaves(value) return value
def write(self, template, params): with self.locker: if params.params.warning.template or params.params.warning.template: self.accumulation.append(expand_template(template, params)) if Date.now() > self.last_sent + WAIT_TO_SEND_MORE: self._send_email()
def write(self, template, params): with self.locker: if params.context not in [NOTE, ALARM]: # DO NOT SEND THE BORING STUFF self.accumulation.append(expand_template(template, params)) if Date.now() > self.next_send: self._send_email()
def quote_value(self, value): """ convert values to mysql code for the same mostly delegate directly to the mysql lib, but some exceptions exist """ try: if value == None: return "NULL" elif isinstance(value, SQL): if not value.param: # value.template CAN BE MORE THAN A TEMPLATE STRING return self.quote_sql(value.template) param = {k: self.quote_sql(v) for k, v in value.param.items()} return expand_template(value.template, param) elif isinstance(value, basestring): return self.db.literal(value) elif isinstance(value, datetime): return "str_to_date('" + value.strftime( "%Y%m%d%H%M%S") + "', '%Y%m%d%H%i%s')" elif hasattr(value, '__iter__'): return self.db.literal(json_encode(value)) elif isinstance(value, Mapping): return self.db.literal(json_encode(value)) elif Math.is_number(value): return unicode(value) else: return self.db.literal(value) except Exception, e: Log.error("problem quoting SQL", e)
def time_delta_pusher(please_stop, appender, queue, interval): """ appender - THE FUNCTION THAT ACCEPTS A STRING queue - FILLED WITH LOG ENTRIES {"template":template, "params":params} TO WRITE interval - timedelta USE IN A THREAD TO BATCH LOGS BY TIME INTERVAL """ next_run = time() + interval while not please_stop: (Till(till=next_run) | please_stop).wait() next_run = time() + interval logs = queue.pop_all() if not logs: continue lines = [] for log in logs: try: if log is Thread.STOP: please_stop.go() next_run = time() else: expanded = expand_template(log.get("template"), log.get("params")) lines.append(expanded) except Exception, e: Log.warning("Trouble formatting logs", cause=e) # SWALLOW ERROR, GOT TO KEEP RUNNING try: appender(u"\n".join(lines) + u"\n") except Exception, e: sys.stderr.write(b"Trouble with appender: " + str(e.message) + b"\n")
def column_query(self, sql, param=None): """ RETURN RESULTS IN [column][row_num] GRID """ self._execute_backlog() try: old_cursor = self.cursor if not old_cursor: # ALLOW NON-TRANSACTIONAL READS self.cursor = self.db.cursor() self.cursor.execute("SET TIME_ZONE='+00:00'") self.cursor.close() self.cursor = self.db.cursor() if param: sql = expand_template(sql, self.quote_param(param)) sql = self.preamble + outdent(sql) if self.debug: Log.note("Execute SQL:\n{{sql}}", sql=indent(sql)) self.cursor.execute(sql) grid = [[utf8_to_unicode(c) for c in row] for row in self.cursor] # columns = [utf8_to_unicode(d[0]) for d in coalesce(self.cursor.description, [])] result = zip(*grid) if not old_cursor: # CLEANUP AFTER NON-TRANSACTIONAL READS self.cursor.close() self.cursor = None return result except Exception, e: if isinstance(e, InterfaceError) or e.message.find("InterfaceError") >= 0: Log.error("Did you close the db connection?", e) Log.error("Problem executing SQL:\n{{sql|indent}}", sql= sql, cause=e,stack_depth=1)
def execute_sql(host, username, password, sql, schema=None, param=None, settings=None): """EXECUTE MANY LINES OF SQL (FROM SQLDUMP FILE, MAYBE?""" settings.schema = coalesce(settings.schema, settings.database) if param: with MySQL(settings) as temp: sql = expand_template(sql, temp.quote_param(param)) # MWe have no way to execute an entire SQL file in bulk, so we # have to shell out to the commandline client. args = [ "mysql", "-h{0}".format(settings.host), "-u{0}".format(settings.username), "-p{0}".format(settings.password) ] if settings.schema: args.append("{0}".format(settings.schema)) try: proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1) if isinstance(sql, unicode): sql = sql.encode("utf8") (output, _) = proc.communicate(sql) except Exception, e: Log.error("Can not call \"mysql\"", e)
def forall(self, sql, param=None, _execute=None): assert _execute num = 0 self._execute_backlog() try: old_cursor = self.cursor if not old_cursor: # ALLOW NON-TRANSACTIONAL READS self.cursor = self.db.cursor() if param: sql = expand_template(sql, self.quote_param(param)) sql = self.preamble + outdent(sql) if self.debug: Log.note("Execute SQL:\n{{sql}}", sql=indent(sql)) self.cursor.execute(sql) columns = tuple( [utf8_to_unicode(d[0]) for d in self.cursor.description]) for r in self.cursor: num += 1 _execute( wrap(dict(zip(columns, [utf8_to_unicode(c) for c in r])))) if not old_cursor: # CLEANUP AFTER NON-TRANSACTIONAL READS self.cursor.close() self.cursor = None except Exception, e: Log.error("Problem executing SQL:\n{{sql|indent}}", sql=sql, cause=e, stack_depth=1)
def forall(self, sql, param=None, _execute=None): assert _execute num = 0 self._execute_backlog() try: old_cursor = self.cursor if not old_cursor: # ALLOW NON-TRANSACTIONAL READS self.cursor = self.db.cursor() if param: sql = expand_template(sql, self.quote_param(param)) sql = self.preamble + outdent(sql) if self.debug: Log.note("Execute SQL:\n{{sql}}", sql= indent(sql)) self.cursor.execute(sql) columns = tuple([utf8_to_unicode(d[0]) for d in self.cursor.description]) for r in self.cursor: num += 1 _execute(wrap(dict(zip(columns, [utf8_to_unicode(c) for c in r])))) if not old_cursor: # CLEANUP AFTER NON-TRANSACTIONAL READS self.cursor.close() self.cursor = None except Exception, e: Log.error("Problem executing SQL:\n{{sql|indent}}", sql= sql, cause=e, stack_depth=1)
def write(self, template, params): try: with self.file_lock: self.file.append(expand_template(template, params)) except Exception, e: _Log.warning("Problem writing to file {{file}}, waiting...", file=file.name, cause=e) _Till(seconds=5).wait()
def format_trace(tbs, start=0): trace = [] for d in tbs[start::]: item = expand_template( 'File "{{file}}", line {{line}}, in {{method}}\n', d) trace.append(item) return "".join(trace)
def write(self, template, params): with self.locker: if params.params.warning.template or params.params.warning.template: self.accumulation.append(expand_template(template, params)) if Date.now() > self.last_sent + WAIT_TO_SEND_MORE: self._send_email()
def json2value(json_string, params={}, flexible=False, leaves=False): """ :param json_string: THE JSON :param params: STANDARD JSON PARAMS :param flexible: REMOVE COMMENTS :param leaves: ASSUME JSON KEYS ARE DOT-DELIMITED :return: Python value """ if isinstance(json_string, str): Log.error("only unicode json accepted") try: if flexible: # REMOVE """COMMENTS""", # COMMENTS, //COMMENTS, AND \n \r # DERIVED FROM https://github.com/jeads/datasource/blob/master/datasource/bases/BaseHub.py# L58 json_string = re.sub(r"\"\"\".*?\"\"\"", r"\n", json_string, flags=re.MULTILINE) json_string = "\n".join(remove_line_comment(l) for l in json_string.split("\n")) # ALLOW DICTIONARY'S NAME:VALUE LIST TO END WITH COMMA json_string = re.sub(r",\s*\}", r"}", json_string) # ALLOW LISTS TO END WITH COMMA json_string = re.sub(r",\s*\]", r"]", json_string) if params: json_string = expand_template(json_string, params) # LOOKUP REFERENCES value = wrap(json_decoder(json_string)) if leaves: value = wrap_leaves(value) return value except Exception, e: e = Except.wrap(e) if "Expecting '" in e and "' delimiter: line" in e: line_index = int(strings.between(e.message, " line ", " column ")) - 1 column = int(strings.between(e.message, " column ", " ")) - 1 line = json_string.split("\n")[line_index].replace("\t", " ") if column > 20: sample = "..." + line[column - 20:] pointer = " " + (" " * 20) + "^" else: sample = line pointer = (" " * column) + "^" if len(sample) > 43: sample = sample[:43] + "..." Log.error("Can not decode JSON at:\n\t" + sample + "\n\t" + pointer + "\n") base_str = unicode2utf8(strings.limit(json_string, 1000)) hexx_str = bytes2hex(base_str, " ") try: char_str = " " + (" ".join(c.decode("latin1") if ord(c) >= 32 else ".") for c in base_str) except Exception: char_str = " " Log.error("Can not decode JSON:\n" + char_str + "\n" + hexx_str + "\n", e)
def assertAlmostEqualValue(test, expected, digits=None, places=None, msg=None, delta=None): """ Snagged from unittest/case.py, then modified (Aug2014) """ if test == expected: # shortcut return if not Math.is_number(expected): # SOME SPECIAL CASES, EXPECTING EMPTY CONTAINERS IS THE SAME AS EXPECTING NULL if isinstance(expected, list) and len(expected) == 0 and test == None: return if isinstance(expected, Mapping) and not expected.keys() and test == None: return if test != expected: raise AssertionError( expand_template("{{test}} != {{expected}}", locals())) return num_param = 0 if digits != None: num_param += 1 if places != None: num_param += 1 if delta != None: num_param += 1 if num_param > 1: raise TypeError("specify only one of digits, places or delta") if digits is not None: try: diff = Math.log10(abs(test - expected)) if diff < digits: return except Exception, e: pass standardMsg = expand_template( "{{test}} != {{expected}} within {{digits}} decimal places", locals())
def write(self, template, params): with self.locker: if params.context not in [NOTE, ALARM]: # SEND ONLY THE NOT BORING STUFF self.accumulation.append(expand_template(template, params)) if Date.now() > self.next_send: self._send_email()
def execute(self, sql, param=None): if self.transaction_level == 0: Log.error("Expecting transaction to be started before issuing queries") if param: sql = expand_template(sql, self.quote_param(param)) sql = outdent(sql) self.backlog.append(sql) if self.debug or len(self.backlog) >= MAX_BATCH_SIZE: self._execute_backlog()
def test_right_align(self): total = 123.45 some_list = [10, 11, 14, 80] details = {"person": {"name": "Kyle Lahnakoski", "age": 40}} result = expand_template("it is currently {{now|datetime}}", {"now": 1420119241000}) self.assertEqual(result, 'it is currently 2015-01-01 13:34:01') result = expand_template("Total: {{total|right_align(20)}}", {"total": total}) self.assertEqual(result, 'Total: 123.45') result = expand_template("Summary:\n{{list|json|indent}}", {"list": some_list}) self.assertEqual(result, 'Summary:\n\t[10, 11, 14, 80]') result = expand_template("Summary:\n{{list|indent}}", {"list": some_list}) self.assertEqual(result, 'Summary:\n\t[10, 11, 14, 80]') result = expand_template("{{person.name}} is {{person.age}} years old", details) self.assertEqual(result, "Kyle Lahnakoski is 40 years old")
def execute(self, sql, param=None): if self.transaction_level == 0: Log.error( "Expecting transaction to be started before issuing queries") if param: sql = expand_template(sql, self.quote_param(param)) sql = outdent(sql) self.backlog.append(sql) if self.debug or len(self.backlog) >= MAX_BATCH_SIZE: self._execute_backlog()
def compileString2Term(edge): if edge.esscript: Log.error("edge script not supported yet") value = edge.value if isKeyword(value): value = strings.expand_template("getDocValue({{path}})", {"path": convert.string2quote(value)}) else: Log.error("not handled") def fromTerm(value): return edge.domain.getPartByKey(value) return Dict(toTerm={"head": "", "body": value}, fromTerm=fromTerm)
def table2csv(table_data): """ :param table_data: expecting a list of tuples :return: text in nice formatted csv """ text_data = [tuple(value2json(vals, pretty=True) for vals in rows) for rows in table_data] col_widths = [max(len(text) for text in cols) for cols in zip(*text_data)] template = ", ".join( "{{" + unicode(i) + "|left_align(" + unicode(w) + ")}}" for i, w in enumerate(col_widths) ) text = "\n".join(expand_template(template, d) for d in text_data) return text
def assertAlmostEqualValue(test, expected, digits=None, places=None, msg=None, delta=None): """ Snagged from unittest/case.py, then modified (Aug2014) """ if test == expected: # shortcut return if not Math.is_number(expected): # SOME SPECIAL CASES, EXPECTING EMPTY CONTAINERS IS THE SAME AS EXPECTING NULL if isinstance(expected, list) and len(expected) == 0 and test == None: return if isinstance(expected, Mapping) and not expected.keys() and test == None: return if test != expected: raise AssertionError(expand_template("{{test}} != {{expected}}", locals())) return num_param = 0 if digits != None: num_param += 1 if places != None: num_param += 1 if delta != None: num_param += 1 if num_param > 1: raise TypeError("specify only one of digits, places or delta") if digits is not None: try: diff = Math.log10(abs(test - expected)) if diff < digits: return except Exception, e: pass standardMsg = expand_template("{{test}} != {{expected}} within {{digits}} decimal places", locals())
def get_job_classification(self, branch, revision): results = http.get_json(expand_template(RESULT_SET_URL, {"branch": branch, "revision": revision[0:12:]})) for r in results.results: jobs = http.get_json(expand_template(JOBS_URL, {"branch": branch, "result_set_id": r.id})) for j in jobs: notes = http.get_json(expand_template(NOTES_URL, {"branch": branch, "job_id": j.id})) for n in notes: if not n.note: continue Log.note( "{{note|json}}", note={ "job_id": j.id, "result_set_id": r.id, "branch": branch, "revision": r.revision, "failure_classification_id": j.failure_classification_id, "result": j.result, "note_timestamp": n.timestamp, "note": n.note } )
def assertAlmostEqualValue(first, second, digits=None, places=None, msg=None, delta=None): """ Snagged from unittest/case.py, then modified (Aug2014) """ if first == second: # shortcut return places = places if places is not None else digits if delta is not None and places is not None: raise TypeError("specify delta or places not both") if delta is not None: if abs(first - second) <= delta: return standardMsg = expand_template("{{first}} != {{second}} within {{delta}} delta", { "first": first, "second": second, "delta": delta }) else: if places is None: places = 18 diff = log10(abs(first-second)) if diff < Math.ceiling(log10(abs(first)))-places: return standardMsg = expand_template("{{first}} != {{second}} within {{places}} places", { "first": first, "second": second, "": places }) raise AssertionError(nvl(msg, "") + ": (" + standardMsg + ")")
def compileString2Term(edge): if edge.esscript: Log.error("edge script not supported yet") value = edge.value if isKeyword(value): value = strings.expand_template("getDocValue({{path}})", {"path": convert.string2quote(value)}) else: Log.error("not handled") def fromTerm(value): return edge.domain.getPartByKey(value) return Dict(toTerm={"head": "", "body": value}, fromTerm=fromTerm)
def add(self, data): data = wrap(data) uid, count = self.uid.advance() link = expand_template( LINK_PATTERN, { "region": self.bucket.settings.region, "bucket": self.bucket.settings.bucket, "uid": uid } ) data.etl.id = count data.etl.source.href = link data[UID_PATH] = uid self.temp_queue.add(data) return link, count
def __str__(self): output = self.type + ": " + self.template + "\n" if self.params: output = expand_template(output, self.params) if self.trace: output += indent(format_trace(self.trace)) if self.cause: cause_strings = [] for c in listwrap(self.cause): try: cause_strings.append(unicode(c)) except Exception, e: pass output += "caused by\n\t" + "and caused by\n\t".join(cause_strings)
def __unicode__(self): output = self.type + ": " + self.template + "\n" if self.params: output = expand_template(output, self.params) if self.trace: output += indent(format_trace(self.trace)) if self.cause: cause_strings = [] for c in listwrap(self.cause): with suppress_exception: cause_strings.append(unicode(c)) output += "caused by\n\t" + "and caused by\n\t".join(cause_strings) return output
def __str__(self): output = self.type + ": " + self.template + "\n" if self.params: output = expand_template(output, self.params) if self.trace: output += indent(format_trace(self.trace)) if self.cause: cause_strings = [] for c in listwrap(self.cause): try: cause_strings.append(unicode(c)) except Exception, e: pass output += "caused by\n\t" + "and caused by\n\t".join(cause_strings)
def __unicode__(self): output = self.type + ": " + self.template + "\n" if self.params: output = expand_template(output, self.params) if self.trace: output += indent(format_trace(self.trace)) if self.cause: cause_strings = [] for c in listwrap(self.cause): with suppress_exception: cause_strings.append(unicode(c)) output += "caused by\n\t" + "and caused by\n\t".join(cause_strings) return output
def time_delta_pusher(please_stop, appender, queue, interval): """ appender - THE FUNCTION THAT ACCEPTS A STRING queue - FILLED WITH LOG ENTRIES {"template":template, "params":params} TO WRITE interval - timedelta USE IN A THREAD TO BATCH LOGS BY TIME INTERVAL """ if not isinstance(interval, timedelta): Log.error("Expecting interval to be a timedelta") next_run = datetime.utcnow() + interval while not please_stop: Thread.sleep(till=next_run) next_run = datetime.utcnow() + interval logs = queue.pop_all() if logs: lines = [] for log in logs: try: if log is Thread.STOP: please_stop.go() next_run = datetime.utcnow() else: expanded = expand_template(log.get("template"), log.get("params")) lines.append(expanded) except Exception, e: Log.warning("Trouble formatting logs", e) # SWALLOW ERROR, GOT TO KEEP RUNNING try: if DEBUG_LOGGING and please_stop: sys.stdout.write("Call to appender with " + str(len(lines)) + " lines\n") appender(u"\n".join(lines) + u"\n") if DEBUG_LOGGING and please_stop: sys.stdout.write("Done call to appender with " + str(len(lines)) + " lines\n") except Exception, e: sys.stderr.write("Trouble with appender: " + str(e.message) + "\n")
def quote_sql(self, value, param=None): """ USED TO EXPAND THE PARAMETERS TO THE SQL() OBJECT """ try: if isinstance(value, SQL): if not param: return value param = {k: self.quote_sql(v) for k, v in param.items()} return expand_template(value, param) elif isinstance(value, basestring): return value elif isinstance(value, Mapping): return self.db.literal(json_encode(value)) elif hasattr(value, '__iter__'): return "(" + ",".join([self.quote_sql(vv) for vv in value]) + ")" else: return unicode(value) except Exception, e: Log.error("problem quoting SQL", e)
def query(self, sql, param=None): """ RETURN RESULTS IN [row_num][column] GRID """ self._execute_backlog() try: old_cursor = self.cursor if not old_cursor: # ALLOW NON-TRANSACTIONAL READS self.cursor = self.db.cursor() self.cursor.execute("SET TIME_ZONE='+00:00'") self.cursor.close() self.cursor = self.db.cursor() if param: sql = expand_template(sql, self.quote_param(param)) sql = self.preamble + outdent(sql) if self.debug: Log.note("Execute SQL:\n{{sql}}", sql=indent(sql)) self.cursor.execute(sql) columns = [ utf8_to_unicode(d[0]) for d in coalesce(self.cursor.description, []) ] fixed = [[utf8_to_unicode(c) for c in row] for row in self.cursor] result = convert.table2list(columns, fixed) if not old_cursor: # CLEANUP AFTER NON-TRANSACTIONAL READS self.cursor.close() self.cursor = None return result except Exception, e: if isinstance( e, InterfaceError) or e.message.find("InterfaceError") >= 0: Log.error("Did you close the db connection?", e) Log.error("Problem executing SQL:\n{{sql|indent}}", sql=sql, cause=e, stack_depth=1)
def quote_sql(self, value, param=None): """ USED TO EXPAND THE PARAMETERS TO THE SQL() OBJECT """ try: if isinstance(value, SQL): if not param: return value param = {k: self.quote_sql(v) for k, v in param.items()} return expand_template(value, param) elif isinstance(value, basestring): return value elif isinstance(value, Mapping): return self.db.literal(json_encode(value)) elif hasattr(value, '__iter__'): return "(" + ",".join([self.quote_sql(vv) for vv in value]) + ")" else: return unicode(value) except Exception, e: Log.error("problem quoting SQL", e)
def execute_sql( host, username, password, sql, schema=None, param=None, settings=None ): """EXECUTE MANY LINES OF SQL (FROM SQLDUMP FILE, MAYBE?""" settings.schema = coalesce(settings.schema, settings.database) if param: with MySQL(settings) as temp: sql = expand_template(sql, temp.quote_param(param)) # MWe have no way to execute an entire SQL file in bulk, so we # have to shell out to the commandline client. args = [ "mysql", "-h{0}".format(settings.host), "-u{0}".format(settings.username), "-p{0}".format(settings.password) ] if settings.schema: args.append("{0}".format(settings.schema)) try: proc = subprocess.Popen( args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1 ) if isinstance(sql, unicode): sql = sql.encode("utf8") (output, _) = proc.communicate(sql) except Exception, e: Log.error("Can not call \"mysql\"", e)
def time_delta_pusher(please_stop, appender, queue, interval): """ appender - THE FUNCTION THAT ACCEPTS A STRING queue - FILLED WITH LOG ENTRIES {"template":template, "params":params} TO WRITE interval - timedelta USE IN A THREAD TO BATCH LOGS BY TIME INTERVAL """ if not isinstance(interval, timedelta): Log.error("Expecting interval to be a timedelta") next_run = datetime.utcnow() + interval while not please_stop: Thread.sleep(till=next_run) next_run = datetime.utcnow() + interval logs = queue.pop_all() if logs: lines = [] for log in logs: try: if log is Thread.STOP: please_stop.go() next_run = datetime.utcnow() else: expanded = expand_template(log.get("template"), log.get("params")) lines.append(expanded) except Exception, e: Log.warning("Trouble formatting logs", e) # SWALLOW ERROR, GOT TO KEEP RUNNING try: if DEBUG_LOGGING and please_stop: sys.stdout.write("Call to appender with " + str(len(lines)) + " lines\n") appender(u"\n".join(lines) + u"\n") if DEBUG_LOGGING and please_stop: sys.stdout.write("Done call to appender with " + str(len(lines)) + " lines\n") except Exception, e: sys.stderr.write("Trouble with appender: " + str(e.message) + "\n")
def qb_expression_to_ruby(expr): if expr == None: return "nil" elif Math.is_number(expr): return unicode(expr) elif is_keyword(expr): return "doc[" + convert.string2quote(expr) + "].value" elif isinstance(expr, basestring): Log.error("{{name|quote}} is not a valid variable name", name=expr) elif isinstance(expr, CODE): return expr.code elif isinstance(expr, Date): return unicode(expr.unix) elif expr is True: return "true" elif expr is False: return "false" op, term = expr.items()[0] mop = ruby_multi_operators.get(op) if mop: if isinstance(term, list): if not term: return mop[1] # RETURN DEFAULT else: output = mop[0].join(["(" + qb_expression_to_ruby(t) + ")" for t in term]) return output elif isinstance(term, Mapping): a, b = term.items()[0] output = "(" + qb_expression_to_ruby(a) + ")" + mop[0] + "(" + qb_expression_to_ruby(b) + ")" return output else: qb_expression_to_ruby(term) bop = ruby_binary_operators.get(op) if bop: if isinstance(term, list): output = bop.join(["(" + qb_expression_to_ruby(t) + ")" for t in term]) return output elif isinstance(term, Mapping): if op == "eq": # eq CAN ACCEPT A WHOLE OBJECT OF key:value PAIRS TO COMPARE output = " and ".join("(" + qb_expression_to_ruby(a) + ")" + bop + "(" + qb_expression_to_ruby(b) + ")" for a, b in term.items()) return output else: a, b = term.items()[0] output = "(" + qb_expression_to_ruby(a) + ")" + bop + "(" + qb_expression_to_ruby(b) + ")" return output else: Log.error("Expecting binary term") uop = ruby_unary_operators.get(op) if uop: output = expand_template(uop, {"term": qb_expression_to_ruby(term)}) return output cop = complex_operators.get(op) if cop: output = cop(term).to_ruby() return output Log.error("`{{op}}` is not a recognized operation", op= op)
def note(cls, template, params=None, stack_depth=0): cls.moz_logger.debug(expand_template(template, params))
def assertAlmostEqualValue(test, expected, digits=None, places=None, msg=None, delta=None): """ Snagged from unittest/case.py, then modified (Aug2014) """ if expected == None: # None has no expectations return if test == expected: # shortcut return if not Math.is_number(expected): # SOME SPECIAL CASES, EXPECTING EMPTY CONTAINERS IS THE SAME AS EXPECTING NULL if isinstance(expected, list) and len(expected) == 0 and test == None: return if isinstance(expected, Mapping) and not expected.keys() and test == None: return if test != expected: raise AssertionError( expand_template("{{test}} != {{expected}}", locals())) return num_param = 0 if digits != None: num_param += 1 if places != None: num_param += 1 if delta != None: num_param += 1 if num_param > 1: raise TypeError("specify only one of digits, places or delta") if digits is not None: with suppress_exception: diff = Math.log10(abs(test - expected)) if diff < digits: return standardMsg = expand_template( "{{test}} != {{expected}} within {{digits}} decimal places", locals()) elif delta is not None: if abs(test - expected) <= delta: return standardMsg = expand_template( "{{test}} != {{expected}} within {{delta}} delta", locals()) else: if places is None: places = 15 with suppress_exception: diff = Math.log10(abs(test - expected)) if diff < Math.ceiling(Math.log10(abs(test))) - places: return standardMsg = expand_template( "{{test|json}} != {{expected|json}} within {{places}} places", locals()) raise AssertionError(coalesce(msg, "") + ": (" + standardMsg + ")")
def message(self): return expand_template(self.template, self.params)
def message(self): return expand_template(self.template, self.params)
def _grouped(self, query, stacked=False): select = listwrap(query.select) # RETURN SINGLE OBJECT WITH AGGREGATES for s in select: if s.aggregate not in aggregates: Log.error( "Expecting all columns to have an aggregate: {{select}}", select=s) selects = DictList() groups = DictList() edges = query.edges for e in edges: if e.domain.type != "default": Log.error("domain of type {{type}} not supported, yet", type=e.domain.type) groups.append(e.value) selects.append(e.value + " AS " + self.db.quote_column(e.name)) for s in select: selects.append( aggregates[s.aggregate].replace("{{code}}", s.value) + " AS " + self.db.quote_column(s.name)) sql = expand_template( """ SELECT {{selects}} FROM {{table}} {{where}} GROUP BY {{groups}} """, { "selects": SQL(",\n".join(selects)), "groups": SQL(",\n".join(groups)), "table": self._subquery(query["from"])[0], "where": self._where2sql(query.where) }) def post_stacked(sql): # RETURN IN THE USUAL DATABASE RESULT SET FORMAT return self.db.query(sql) def post(sql): # FIND OUT THE default DOMAIN SIZES result = self.db.column_query(sql) num_edges = len(edges) for e, edge in enumerate(edges): domain = edge.domain if domain.type == "default": domain.type = "set" parts = set(result[e]) domain.partitions = [{ "index": i, "value": p } for i, p in enumerate(parts)] domain.map = {p: i for i, p in enumerate(parts)} else: Log.error("Do not know what to do here, yet") # FILL THE DATA CUBE maps = [(unwrap(e.domain.map), result[i]) for i, e in enumerate(edges)] cubes = DictList() for c, s in enumerate(select): data = Matrix(*[ len(e.domain.partitions) + (1 if e.allow_nulls else 0) for e in edges ]) for rownum, value in enumerate(result[c + num_edges]): coord = [m[r[rownum]] for m, r in maps] data[coord] = value cubes.append(data) if isinstance(query.select, list): return cubes else: return cubes[0] return sql, post if not stacked else post_stacked
def warning(cls, template, params=None, *args, **kwargs): cls.moz_logger.warn(expand_template(template, params))
def write(self, template, params): value = expand_template(template, params) if isinstance(value, unicode): value = value.encode('utf8') self.stream.write(value+b"\n")
def _setop(self, query): """ NO AGGREGATION, SIMPLE LIST COMPREHENSION """ if isinstance(query.select, list): # RETURN BORING RESULT SET selects = DictList() for s in listwrap(query.select): if isinstance(s.value, Mapping): for k, v in s.value.items: selects.append(v + " AS " + self.db.quote_column(s.name + "." + k)) if isinstance(s.value, list): for i, ss in enumerate(s.value): selects.append(s.value + " AS " + self.db.quote_column(s.name + "," + str(i))) else: selects.append(s.value + " AS " + self.db.quote_column(s.name)) sql = expand_template( """ SELECT {{selects}} FROM {{table}} {{where}} {{sort}} {{limit}} """, { "selects": SQL(",\n".join(selects)), "table": self._subquery(query["from"])[0], "where": self._where2sql(query.where), "limit": self._limit2sql(query.limit), "sort": self._sort2sql(query.sort) }) def post_process(sql): result = self.db.query(sql) for s in listwrap(query.select): if isinstance(s.value, Mapping): for r in result: r[s.name] = {} for k, v in s.value: r[s.name][k] = r[s.name + "." + k] r[s.name + "." + k] = None if isinstance(s.value, list): # REWRITE AS TUPLE for r in result: r[s.name] = tuple(r[s.name + "," + str(i)] for i, ss in enumerate(s.value)) for i, ss in enumerate(s.value): r[s.name + "," + str(i)] = None expand_json(result) return result return sql, post_process # RETURN BORING RESULT SET else: # RETURN LIST OF VALUES if query.select.value == ".": select = "*" else: name = query.select.name select = query.select.value + " AS " + self.db.quote_column( name) sql = expand_template( """ SELECT {{selects}} FROM {{table}} {{where}} {{sort}} {{limit}} """, { "selects": SQL(select), "table": self._subquery(query["from"])[0], "where": self._where2sql(query.where), "limit": self._limit2sql(query.limit), "sort": self._sort2sql(query.sort) }) if query.select.value == ".": def post(sql): result = self.db.query(sql) expand_json(result) return result return sql, post else: return sql, lambda sql: [r[name] for r in self.db.query(sql) ] # RETURNING LIST OF VALUES
def unexpected(cls, template, params=None, cause=None): cls.moz_logger.error(expand_template(template, params))
def format_trace(tbs, start=0): trace = [] for d in tbs[start::]: item = expand_template('File "{{file}}", line {{line}}, in {{method}}\n', d) trace.append(item) return "".join(trace)
def println(cls, template, params=None): cls.moz_logger.debug(expand_template(template, params))
def write(self, template, params): self.queue.add(expand_template(template, params))
def sql(self): return expand_template(self.template, self.param)
def write(self, template, params): with self.file_lock: self.file.append(expand_template(template, params))
def error(cls, template, params=None, cause=None, stack_depth=0): cls.moz_logger.error(expand_template(template, params)) cls._old_error(template, params, cause, stack_depth)