def _load_functions(self): global _load_extension_warning_sent library_loc = File.new_instance(sys.modules[__name__].__file__, "../..") full_path = File.new_instance( library_loc, "vendor/sqlite/libsqlitefunctions.so").abspath try: trace = get_stacktrace(0)[0] if self.upgrade: if os.name == "nt": file = File.new_instance( trace["file"], "../../vendor/sqlite/libsqlitefunctions.so") else: file = File.new_instance( trace["file"], "../../vendor/sqlite/libsqlitefunctions") full_path = file.abspath self.db.enable_load_extension(True) self.db.execute( text(SQL_SELECT + "load_extension" + sql_iso(quote_value(full_path)))) except Exception as e: if not _load_extension_warning_sent: _load_extension_warning_sent = True Log.warning( "Could not load {{file}}, doing without. (no SQRT for you!)", file=full_path, cause=e, )
def __exit__(self, exc_type, exc_val, exc_tb): from mo_threads import Thread Thread.run("delete file " + self.name, delete_daemon, file=self, caller_stack=get_stacktrace(1))
def query(self, command): """ WILL BLOCK CALLING THREAD UNTIL THE command IS COMPLETED :param command: COMMAND FOR SQLITE :return: list OF RESULTS """ if self.closed: Log.error("database is closed") signal = _allocate_lock() signal.acquire() result = Data() trace = get_stacktrace(1) if self.get_trace else None if self.get_trace: current_thread = Thread.current() with self.locker: for t in self.available_transactions: if t.thread is current_thread: Log.error(DOUBLE_TRANSACTION_ERROR) self.queue.add(CommandItem(command, result, signal, trace, None)) signal.acquire() if result.exception: Log.error("Problem with Sqlite call", cause=result.exception) return result
def error( cls, template, # human readable template default_params={}, # parameters for template cause=None, # pausible cause stack_depth=0, **more_params): """ raise an exception with a trace for the cause too :param template: *string* human readable string with placeholders for parameters :param default_params: *dict* parameters to fill in template :param cause: *Exception* for chaining :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller :param log_context: *dict* extra key:value pairs for your convenience :param more_params: *any more parameters (which will overwrite default_params) :return: """ if not is_text(template): # sys.stderr.write(str("Log.error was expecting a unicode template")) Log.error("Log.error was expecting a unicode template") if default_params and isinstance( listwrap(default_params)[0], BaseException): cause = default_params default_params = {} params = Data(dict(default_params, **more_params)) add_to_trace = False if cause == None: causes = None elif is_list(cause): causes = [] for c in listwrap( cause ): # CAN NOT USE LIST-COMPREHENSION IN PYTHON3 (EXTRA STACK DEPTH FROM THE IN-LINED GENERATOR) causes.append(Except.wrap(c, stack_depth=1)) causes = FlatList(causes) elif isinstance(cause, BaseException): causes = Except.wrap(cause, stack_depth=1) else: causes = None Log.error("can only accept Exception, or list of exceptions") trace = exceptions.get_stacktrace(stack_depth + 1) if add_to_trace: cause[0].trace.extend(trace[1:]) e = Except( context=exceptions.ERROR, template=template, params=params, cause=causes, trace=trace, ) raise_from_none(e)
def execute_tests(self, subtest, tjson=False, places=6): subtest = wrap(subtest) subtest.name = get_stacktrace()[1]['method'] if subtest.disable: return self.fill_container(subtest, tjson=tjson) self.send_queries(subtest)
def test_read_home(self): file = "~/___test_file.json" source = File.new_instance( get_stacktrace(0)[0]["file"], "../resources/simple.json") File.copy(File(source), File(file)) content = mo_json_config.get("file:///" + file) try: self.assertEqual(content, {"test_key": "test_value"}) finally: File(file).delete()
def query(self, query): if self.db.closed: Log.error("database is closed") signal = _allocate_lock() signal.acquire() result = Data() trace = get_stacktrace(1) if self.db.get_trace else None self.db.queue.add(CommandItem(query, result, signal, trace, self)) signal.acquire() if result.exception: Log.error("Problem with Sqlite call", cause=result.exception) return result
def warning(cls, template, default_params={}, cause=None, stack_depth=0, log_context=None, **more_params): """ :param template: *string* human readable string with placeholders for parameters :param default_params: *dict* parameters to fill in template :param cause: *Exception* for chaining :param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller :param log_context: *dict* extra key:value pairs for your convenience :param more_params: *any more parameters (which will overwrite default_params) :return: """ timestamp = datetime.utcnow() if not is_text(template): Log.error("Log.warning was expecting a unicode template") if isinstance(default_params, BaseException): cause = default_params default_params = {} if "values" in more_params.keys(): Log.error("Can not handle a logging parameter by name `values`") params = Data(dict(default_params, **more_params)) cause = unwraplist([Except.wrap(c) for c in listwrap(cause)]) trace = exceptions.get_stacktrace(stack_depth + 1) e = Except( exceptions.WARNING, template=template, params=params, cause=cause, trace=trace, ) Log._annotate(e, timestamp, stack_depth + 1)
def execute(self, command): if self.end_of_life: Log.error("Transaction is dead") trace = get_stacktrace(1) if self.db.get_trace else None with self.locker: self.todo.append(CommandItem(command, None, None, trace, self))
def execute_tests(self, subtest, typed=True, places=6): subtest = wrap(subtest) subtest.name = text(get_stacktrace()[1]['method']) self.fill_container(subtest, typed=typed) self.send_queries(subtest, places=places)
def __init__(self, *args, **kwargs): FuzzyTestCase.__init__(self, *args, **kwargs) stack = get_stacktrace(0) this_file = stack[0]["file"] self.resources = "file:///" + File.new_instance( this_file, "../resources").abspath
def test_2edge_and_sort(self): test = { "data": [{ "a": "c", "b": 0, "value": 1 }, { "a": "c", "b": 0, "value": 3 }, { "a": "c", "b": 1, "value": 4 }, { "a": "c", "b": 1, "value": 6 }, { "a": "a", "b": 1, "value": 7 }, { "a": "a", "value": 20 }, { "b": 1, "value": 21 }, { "value": 22 }, { "a": "a", "b": 0, "value": 8 }, { "a": "a", "b": 0, "value": 9 }, { "a": "a", "b": 1, "value": 10 }, { "a": "a", "b": 1, "value": 11 }], "query": { "from": TEST_TABLE, "edges": ["a", "b"], "sort": [{ "a": "desc" }, { "b": "desc" }] }, "expecting_list": { "meta": { "format": "list" }, "data": [{ "a": "c", "b": 1, "count": 2 }, { "a": "c", "b": 0, "count": 2 }, { "a": "c", "count": 0 }, { "a": "a", "b": 1, "count": 3 }, { "a": "a", "b": 0, "count": 2 }, { "a": "a", "count": 1 }, { "b": 1, "count": 1 }, { "b": 0, "count": 0 }, { "count": 1 }] }, "expecting_table": { "meta": { "format": "table" }, "header": ["a", "b", "count"], "data": [["c", 1, 2], ["c", 0, 2], ["c", NULL, 0], ["a", 1, 3], ["a", 0, 2], ["a", NULL, 1], [NULL, 1, 1], [NULL, 0, 0], [NULL, NULL, 1]] }, "expecting_cube": { "meta": { "format": "cube" }, "edges": [{ "name": "b", "domain": { "type": "set", "partitions": [{ "value": 0 }, { "value": 1 }] } }, { "name": "a", "domain": { "type": "set", "partitions": [{ "value": "a" }, { "value": "c" }] } }], "data": { "count": [[2, 2, 0], [3, 2, 1], [1, 0, 1]] } } } subtest = wrap(test) subtest.name = get_stacktrace()[0]['method'] self.utils.fill_container(test) test = wrap(test) self.utils.send_queries({ "query": test.query, "expecting_list": test.expecting_list }) self.utils.send_queries({ "query": test.query, "expecting_table": test.expecting_table }) try: self.utils.send_queries({ "query": test.query, "expecting_cube": test.expecting_cube }) Log.error("expecting error regarding sorting edges") except Exception as e: pass
def __exit__(self, exc_type, exc_val, exc_tb): Thread.run("delete dir " + self.name, delete_daemon, file=self, caller_stack=get_stacktrace(1))