Example #1
0
    def error(
        cls,
        template,  # human readable template
        default_params={},  # parameters for template
        cause=None,  # pausible cause
        stack_depth=0,
        **more_params
    ):
        """
        raise an exception with a trace for the cause too

        :param template: *string* human readable string with placeholders for parameters
        :param default_params: *dict* parameters to fill in template
        :param cause: *Exception* for chaining
        :param stack_depth:  *int* how many calls you want popped off the stack to report the *true* caller
        :param log_context: *dict* extra key:value pairs for your convenience
        :param more_params: *any more parameters (which will overwrite default_params)
        :return:
        """
        if default_params and isinstance(listwrap(default_params)[0], BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)

        add_to_trace = False
        cause = wrap(unwraplist([Except.wrap(c, stack_depth=1) for c in listwrap(cause)]))
        trace = exceptions.extract_stack(stack_depth + 1)

        if add_to_trace:
            cause[0].trace.extend(trace[1:])

        e = Except(exceptions.ERROR, template, params, cause, trace)
        raise e
Example #2
0
    def unexpected(cls,
                   template,
                   default_params={},
                   cause=None,
                   stack_depth=0,
                   log_context=None,
                   **more_params):
        """
        :param template: *string* human readable string with placeholders for parameters
        :param default_params: *dict* parameters to fill in template
        :param cause: *Exception* for chaining
        :param stack_depth:  *int* how many calls you want popped off the stack to report the *true* caller
        :param log_context: *dict* extra key:value pairs for your convenience
        :param more_params: *any more parameters (which will overwrite default_params)
        :return:
        """
        if isinstance(default_params, BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)

        if cause and not isinstance(cause, Except):
            cause = Except(exceptions.UNEXPECTED,
                           unicode(cause),
                           trace=exceptions._extract_traceback(0))

        trace = exceptions.extract_stack(1)
        e = Except(exceptions.UNEXPECTED, template, params, cause, trace)
        Log.note("{{error}}",
                 error=e,
                 log_context=set_default({"context": exceptions.WARNING},
                                         log_context),
                 stack_depth=stack_depth + 1)
Example #3
0
    def warning(cls,
                template,
                default_params={},
                cause=None,
                stack_depth=0,
                log_context=None,
                **more_params):
        """
        :param template: *string* human readable string with placeholders for parameters
        :param default_params: *dict* parameters to fill in template
        :param cause: *Exception* for chaining
        :param stack_depth:  *int* how many calls you want popped off the stack to report the *true* caller
        :param log_context: *dict* extra key:value pairs for your convenience
        :param more_params: *any more parameters (which will overwrite default_params)
        :return:
        """
        if isinstance(default_params, BaseException):
            cause = default_params
            default_params = {}

        if "values" in more_params.keys():
            Log.error("Can not handle a logging parameter by name `values`")
        params = dict(unwrap(default_params), **more_params)
        cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
        trace = exceptions.extract_stack(stack_depth + 1)

        e = Except(exceptions.WARNING, template, params, cause, trace)
        Log.note("{{error|unicode}}",
                 error=e,
                 log_context=set_default({"context": exceptions.WARNING},
                                         log_context),
                 stack_depth=stack_depth + 1)
Example #4
0
    def _worker(self, please_stop):
        if Sqlite.canonical:
            self.db = Sqlite.canonical
        else:
            self.db = sqlite3.connect(':memory:')

        try:
            while not please_stop:
                if DEBUG:
                    Log.note("begin pop")
                command, result, signal, trace = self.queue.pop()
                if DEBUG:
                    Log.note("done pop")

                if DEBUG:
                    Log.note("Running command\n{{command|indent}}",
                             command=command)
                with Timer("Run command", debug=DEBUG):
                    if signal is not None:
                        try:
                            curr = self.db.execute(command)
                            result.meta.format = "table"
                            result.data = curr.fetchall()
                        except Exception, e:
                            e = Except.wrap(e)
                            result.exception = Except(
                                ERROR,
                                "Problem with\n{{command|indent}}",
                                command=command,
                                cause=e)
                        finally:
                            signal.go()
Example #5
0
def utf82unicode(value):
    """
    WITH EXPLANATION FOR FAILURE
    """
    try:
        return value.decode("utf8")
    except Exception, e:
        if not _Log:
            _late_import()

        if not isinstance(value, basestring):
            _Log.error(
                "Can not _convert {{type}} to unicode because it's not a string",
                type=type(value).__name__)

        e = _Except.wrap(e)
        for i, c in enumerate(value):
            try:
                c.decode("utf8")
            except Exception, f:
                _Log.error(
                    "Can not _convert charcode {{c}} in string  index {{i}}",
                    i=i,
                    c=ord(c),
                    cause=[e, _Except.wrap(f)])
Example #6
0
    def error(
        cls,
        template,  # human readable template
        default_params={},  # parameters for template
        cause=None,  # pausible cause
        stack_depth=0,
        **more_params
    ):
        """
        raise an exception with a trace for the cause too
        """
        if default_params and isinstance(listwrap(default_params)[0], BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)

        add_to_trace = False
        cause = unwraplist([Except.wrap(c, stack_depth=1) for c in listwrap(cause)])
        trace = exceptions.extract_stack(stack_depth + 1)

        if add_to_trace:
            cause[0].trace.extend(trace[1:])

        e = Except(exceptions.ERROR, template, params, cause, trace)
        raise e
Example #7
0
    def warning(
        cls,
        template,
        default_params={},
        cause=None,
        stack_depth=0,
        log_context=None,
        **more_params
    ):
        """
        :param template: *string* human readable string with placeholders for parameters
        :param default_params: *dict* parameters to fill in template
        :param cause: *Exception* for chaining
        :param stack_depth:  *int* how many calls you want popped off the stack to report the *true* caller
        :param log_context: *dict* extra key:value pairs for your convenience
        :param more_params: *any more parameters (which will overwrite default_params)
        :return:
        """
        if isinstance(default_params, BaseException):
            cause = default_params
            default_params = {}

        if "values" in more_params.keys():
            Log.error("Can not handle a logging parameter by name `values`")
        params = dict(unwrap(default_params), **more_params)
        cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
        trace = exceptions.extract_stack(stack_depth + 1)

        e = Except(exceptions.WARNING, template, params, cause, trace)
        Log.note(
            "{{error|unicode}}",
            error=e,
            log_context=set_default({"context": exceptions.WARNING}, log_context),
            stack_depth=stack_depth + 1
        )
Example #8
0
def get_raw_json(path):
    active_data_timer = Timer("total duration")
    body = flask.request.get_data()
    try:
        with active_data_timer:
            args = wrap(Dict(**flask.request.args))
            limit = args.limit if args.limit else 10
            args.limit = None
            frum = wrap_from(path)
            result = jx.run(
                {
                    "from": path,
                    "where": {
                        "eq": args
                    },
                    "limit": limit,
                    "format": "list"
                }, frum)

            if isinstance(
                    result, Container
            ):  #TODO: REMOVE THIS CHECK, jx SHOULD ALWAYS RETURN Containers
                result = result.format("list")

        result.meta.active_data_response_time = active_data_timer.duration

        response_data = convert.unicode2utf8(
            convert.value2json(result.data, pretty=True))
        Log.note("Response is {{num}} bytes", num=len(response_data))
        return Response(response_data, status=200)
    except Exception, e:
        e = Except.wrap(e)
        return _send_error(active_data_timer, body, e)
Example #9
0
    def error(
        cls,
        template,  # human readable template
        default_params={},  # parameters for template
        cause=None,  # pausible cause
        stack_depth=0,
        **more_params
    ):
        """
        raise an exception with a trace for the cause too

        :param template: *string* human readable string with placeholders for parameters
        :param default_params: *dict* parameters to fill in template
        :param cause: *Exception* for chaining
        :param stack_depth:  *int* how many calls you want popped off the stack to report the *true* caller
        :param log_context: *dict* extra key:value pairs for your convenience
        :param more_params: *any more parameters (which will overwrite default_params)
        :return:
        """
        if default_params and isinstance(listwrap(default_params)[0], BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)

        add_to_trace = False
        cause = wrap(unwraplist([Except.wrap(c, stack_depth=1) for c in listwrap(cause)]))
        trace = exceptions.extract_stack(stack_depth + 1)

        if add_to_trace:
            cause[0].trace.extend(trace[1:])

        e = Except(exceptions.ERROR, template, params, cause, trace)
        raise e
Example #10
0
    def _worker(self, please_stop):
        if Sqlite.canonical:
            self.db = Sqlite.canonical
        else:
            self.db = sqlite3.connect(':memory:')

        try:
            while not please_stop:
                if DEBUG:
                    Log.note("begin pop")
                command, result, signal, trace = self.queue.pop()
                if DEBUG:
                    Log.note("done pop")

                if DEBUG:
                    Log.note("Running command\n{{command|indent}}", command=command)
                with Timer("Run command", debug=DEBUG):
                    if signal is not None:
                        try:
                            curr = self.db.execute(command)
                            result.meta.format = "table"
                            result.data = curr.fetchall()
                        except Exception, e:
                            e=Except.wrap(e)
                            result.exception = Except(ERROR, "Problem with\n{{command|indent}}", command=command, cause=e)
                        finally:
                            signal.go()
Example #11
0
 def write(self, template, params):
     try:
         self.queue.add({"template": template, "params": params})
         return self
     except Exception, e:
         e = _Except.wrap(e)
         raise e  # OH NO!
Example #12
0
    def get_treeherder_job(self):
        try:
            with Timer("Process Request"):
                args = Dict(**flask.request.args)

                # IS THE branch/revision PENDING?

                result = self.get_markup(unwraplist(args.branch),
                                         unwraplist(args.revision),
                                         unwraplist(args.task_id),
                                         unwraplist(args.buildername),
                                         unwraplist(args.timestamp))

                response_data = convert.unicode2utf8(
                    convert.value2json(result))
                return Response(response_data,
                                status=200,
                                headers={
                                    "access-control-allow-origin": "*",
                                    "content-type": "text/plain"
                                })
        except Exception, e:
            e = Except.wrap(e)
            Log.warning("Could not process", cause=e)
            e = e.as_dict()

            return Response(convert.unicode2utf8(convert.value2json(e)),
                            status=400,
                            headers={
                                "access-control-allow-origin": "*",
                                "content-type": "application/json"
                            })
Example #13
0
def json2value(json_string, params={}, flexible=False, leaves=False):
    """
    :param json_string: THE JSON
    :param params: STANDARD JSON PARAMS
    :param flexible: REMOVE COMMENTS
    :param leaves: ASSUME JSON KEYS ARE DOT-DELIMITED
    :return: Python value
    """
    if isinstance(json_string, str):
        Log.error("only unicode json accepted")

    try:
        if flexible:
            # REMOVE """COMMENTS""", # COMMENTS, //COMMENTS, AND \n \r
            # DERIVED FROM https://github.com/jeads/datasource/blob/master/datasource/bases/BaseHub.py# L58
            json_string = re.sub(r"\"\"\".*?\"\"\"", r"\n", json_string, flags=re.MULTILINE)
            json_string = "\n".join(remove_line_comment(l) for l in json_string.split("\n"))
            # ALLOW DICTIONARY'S NAME:VALUE LIST TO END WITH COMMA
            json_string = re.sub(r",\s*\}", r"}", json_string)
            # ALLOW LISTS TO END WITH COMMA
            json_string = re.sub(r",\s*\]", r"]", json_string)

        if params:
            json_string = expand_template(json_string, params)


        # LOOKUP REFERENCES
        value = wrap(json_decoder(json_string))

        if leaves:
            value = wrap_leaves(value)

        return value

    except Exception, e:
        e = Except.wrap(e)
        if "Expecting '" in e and "' delimiter: line" in e:
            line_index = int(strings.between(e.message, " line ", " column ")) - 1
            column = int(strings.between(e.message, " column ", " ")) - 1
            line = json_string.split("\n")[line_index].replace("\t", " ")
            if column > 20:
                sample = "..." + line[column - 20:]
                pointer = "   " + (" " * 20) + "^"
            else:
                sample = line
                pointer = (" " * column) + "^"

            if len(sample) > 43:
                sample = sample[:43] + "..."

            Log.error("Can not decode JSON at:\n\t" + sample + "\n\t" + pointer + "\n")

        base_str = unicode2utf8(strings.limit(json_string, 1000))
        hexx_str = bytes2hex(base_str, " ")
        try:
            char_str = " " + ("  ".join(c.decode("latin1") if ord(c) >= 32 else ".") for c in base_str)
        except Exception:
            char_str = " "
        Log.error("Can not decode JSON:\n" + char_str + "\n" + hexx_str + "\n", e)
Example #14
0
def wrap_function(cache_store, func_):
    attr_name = "_cache_for_" + func_.__name__

    if func_.func_code.co_argcount > 0 and func_.func_code.co_varnames[
            0] == "self":
        using_self = True
        func = lambda self, *args: func_(self, *args)
    else:
        using_self = False
        func = lambda self, *args: func_(*args)

    def output(*args):
        with cache_store.locker:
            if using_self:
                self = args[0]
                args = args[1:]
            else:
                self = cache_store

            now = Date.now()
            try:
                _cache = getattr(self, attr_name)
            except Exception, _:
                _cache = {}
                setattr(self, attr_name, _cache)

            if Random.int(100) == 0:
                # REMOVE OLD CACHE
                _cache = {
                    k: v
                    for k, v in _cache.items() if v[0] == None or v[0] > now
                }
                setattr(self, attr_name, _cache)

            timeout, key, value, exception = _cache.get(
                args, (Null, Null, Null, Null))

        if now > timeout:
            value = func(self, *args)
            with cache_store.locker:
                _cache[args] = (now + cache_store.timeout, args, value, None)
            return value

        if value == None:
            if exception == None:
                try:
                    value = func(self, *args)
                    with cache_store.locker:
                        _cache[args] = (now + cache_store.timeout, args, value,
                                        None)
                    return value
                except Exception, e:
                    e = Except.wrap(e)
                    with cache_store.locker:
                        _cache[args] = (now + cache_store.timeout, args, None,
                                        e)
                    raise e
            else:
                raise exception
Example #15
0
def store_data(path):
    try:
        request = flask.request
        auth = request.headers.get('Authorization')

        if not auth:
            # USE PATTERN MATCHING AUTH
            for c in all_creds:
                if c.path == path:
                    return store_public_data(path, c)
            raise Log.error(
                "No authentication provided.  path={{path}} data.length={{length}}",
                path=path,
                length=len(request.get_data()),
            )

        try:
            receiver = Receiver(
                lookup_credentials,
                auth,
                request.url,
                request.method,
                content=request.get_data(),
                content_type=request.headers['Content-Type'],
                seen_nonce=seen_nonce
            )
        except Exception, e:
            e = Except.wrap(e)
            raise Log.error(
                "Authentication failed.  path={{path}} data.length={{length}}\n{{auth|indent}}",
                path=path,
                length=len(request.get_data()),
                auth=auth,
                cause=e
            )

        permissions = lookup_user(receiver.parsed_header["id"])
        if path not in listwrap(permissions.resources):
            Log.error("{{user}} not allowed access to {{resource}}", user=permissions.hawk.id, resource=path)

        link, id = submit_data(path, permissions, request.json)

        response_content = convert.unicode2utf8(convert.value2json({
            "link": link,
            "etl": {"id": id}
        }))
        receiver.respond(
            content=response_content,
            content_type=RESPONSE_CONTENT_TYPE
        )

        return Response(
            response_content,
            status=200,
            headers={
                b'Server-Authorization': receiver.response_header,
                b'content-type': RESPONSE_CONTENT_TYPE
            }
        )
Example #16
0
 def write(self, template, params):
     try:
         self.queue.add({"template": template, "params": params})
         return self
     except Exception, e:
         e = _Except.wrap(e)
         sys.stdout.write("IF YOU SEE THIS, IT IS LIKELY YOU FORGOT TO RUN Log.start() FIRST\n")
         raise e  # OH NO!
Example #17
0
    def fatal(
        cls,
        template,  # human readable template
        default_params={},  # parameters for template
        cause=None,  # pausible cause
        stack_depth=0,
        log_context=None,
        **more_params
    ):
        """
        SEND TO STDERR

        :param template: *string* human readable string with placeholders for parameters
        :param default_params: *dict* parameters to fill in template
        :param cause: *Exception* for chaining
        :param stack_depth:  *int* how many calls you want popped off the stack to report the *true* caller
        :param log_context: *dict* extra key:value pairs for your convenience
        :param more_params: *any more parameters (which will overwrite default_params)
        :return:
        """
        if default_params and isinstance(listwrap(default_params)[0], BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)

        cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
        trace = exceptions.extract_stack(stack_depth + 1)

        e = Except(exceptions.ERROR, template, params, cause, trace)
        str_e = unicode(e)

        error_mode = cls.error_mode
        with suppress_exception:
            if not error_mode:
                cls.error_mode = True
                Log.note(
                    "{{error|unicode}}",
                    error=e,
                    log_context=set_default({"context": exceptions.FATAL}, log_context),
                    stack_depth=stack_depth + 1
                )
        cls.error_mode = error_mode

        sys.stderr.write(str_e.encode('utf8'))
Example #18
0
 def write(self, template, params):
     try:
         self.queue.add({"template": template, "params": params})
         return self
     except Exception, e:
         e = _Except.wrap(e)
         sys.stdout.write(
             b"IF YOU SEE THIS, IT IS LIKELY YOU FORGOT TO RUN Log.start() FIRST\n"
         )
         raise e  # OH NO!
Example #19
0
 def output(*args, **kwargs):
     while True:
         try:
             return func(*args, **kwargs)
         except Exception, e:
             e = Except.wrap(e)
             if "Request limit exceeded" in e:
                 Log.warning("AWS Problem", cause=e)
                 continue
             else:
                 Log.error("Problem with call to AWS", cause=e)
Example #20
0
 def output(*args, **kwargs):
     while True:
         try:
             return func(*args, **kwargs)
         except Exception, e:
             e = Except.wrap(e)
             if "Request limit exceeded" in e:
                 Log.warning("AWS Problem", cause=e)
                 continue
             else:
                 Log.error("Problem with call to AWS", cause=e)
Example #21
0
    def _kill(self):
        try:
            self.service.kill()
        except Exception, e:
            ee = Except.wrap(e)
            if 'The operation completed successfully' in ee:
                return
            if 'No such process' in ee:
                return

            Log.warning("Failure to kill process {{process|quote}}", process=self.name, cause=ee)
Example #22
0
def wrap_function(cache_store, func_):
    attr_name = "_cache_for_" + func_.__name__

    if func_.func_code.co_argcount > 0 and func_.func_code.co_varnames[0] == "self":
        using_self = True
        func = lambda self, *args: func_(self, *args)
    else:
        using_self = False
        func = lambda self, *args: func_(*args)

    def output(*args):
        with cache_store.locker:
            if using_self:
                self = args[0]
                args = args[1:]
            else:
                self = cache_store

            now = Date.now()
            try:
                _cache = getattr(self, attr_name)
            except Exception, _:
                _cache = {}
                setattr(self, attr_name, _cache)

            if Random.int(100) == 0:
                # REMOVE OLD CACHE
                _cache = {k: v for k, v in _cache.items() if v[0]==None or v[0] > now}
                setattr(self, attr_name, _cache)

            timeout, key, value, exception = _cache.get(args, (Null, Null, Null, Null))

        if now > timeout:
            value = func(self, *args)
            with cache_store.locker:
                _cache[args] = (now + cache_store.timeout, args, value, None)
            return value

        if value == None:
            if exception == None:
                try:
                    value = func(self, *args)
                    with cache_store.locker:
                        _cache[args] = (now + cache_store.timeout, args, value, None)
                    return value
                except Exception, e:
                    e = Except.wrap(e)
                    with cache_store.locker:
                        _cache[args] = (now + cache_store.timeout, args, None, e)
                    raise e
            else:
                raise exception
Example #23
0
    def _kill(self):
        try:
            self.service.kill()
        except Exception, e:
            ee = Except.wrap(e)
            if 'The operation completed successfully' in ee:
                return
            if 'No such process' in ee:
                return

            Log.warning("Failure to kill process {{process|quote}}",
                        process=self.name,
                        cause=ee)
Example #24
0
def value2json(obj, pretty=False, sort_keys=False):
    try:
        json = json_encoder(obj, pretty=pretty)
        if json == None:
            Log.note(str(type(obj)) + " is not valid{{type}}JSON",  type= " (pretty) " if pretty else " ")
            Log.error("Not valid JSON: " + str(obj) + " of type " + str(type(obj)))
        return json
    except Exception, e:
        e = Except.wrap(e)
        with suppress_exception:
            json = pypy_json_encode(obj)
            return json

        Log.error("Can not encode into JSON: {{value}}", value=repr(obj), cause=e)
Example #25
0
    def encode(self, value, pretty=False):
        if pretty:
            return pretty_json(value)

        try:
            scrubbed = scrub(value)
            return unicode(self.encoder.encode(scrubbed))
        except Exception, e:
            from pyLibrary.debugs.exceptions import Except
            from pyLibrary.debugs.logs import Log

            e = Except.wrap(e)
            Log.warning("problem serializing {{type}}", type=_repr(value), cause=e)
            raise e
Example #26
0
    def encode(self, value, pretty=False):
        if pretty:
            return pretty_json(value)

        try:
            scrubbed = scrub(value)
            return unicode(self.encoder.encode(scrubbed))
        except Exception, e:
            from pyLibrary.debugs.exceptions import Except
            from pyLibrary.debugs.logs import Log

            e = Except.wrap(e)
            Log.warning("problem serializing {{type}}", type=_repr(value), cause=e)
            raise e
Example #27
0
    def _got_result(self, data, message):
        data = wrap(data)
        data._meta.count = self.count
        self.count += 1

        if self.settings.debug:
            Log.note("{{data}}", data=data)
        if self.target_queue != None:
            try:
                self.target_queue.add(data)
                message.ack()
            except Exception, e:
                e = Except.wrap(e)
                if not self.target_queue.closed:  # EXPECTED TO HAPPEN, THIS THREAD MAY HAVE BEEN AWAY FOR A WHILE
                    raise e
Example #28
0
    def _got_result(self, data, message):
        data = wrap(data)
        data._meta.count = self.count
        self.count += 1

        if self.settings.debug:
            Log.note("{{data}}", data=data)
        if self.target_queue != None:
            try:
                self.target_queue.add(data)
                message.ack()
            except Exception, e:
                e = Except.wrap(e)
                if not self.target_queue.closed:  # EXPECTED TO HAPPEN, THIS THREAD MAY HAVE BEEN AWAY FOR A WHILE
                    raise e
Example #29
0
    def fatal(
        cls,
        template,  # human readable template
        default_params={},  # parameters for template
        cause=None,  # pausible cause
        stack_depth=0,
        log_context=None,
        **more_params
    ):
        """
        SEND TO STDERR

        :param template: *string* human readable string with placeholders for parameters
        :param default_params: *dict* parameters to fill in template
        :param cause: *Exception* for chaining
        :param stack_depth:  *int* how many calls you want popped off the stack to report the *true* caller
        :param log_context: *dict* extra key:value pairs for your convenience
        :param more_params: *any more parameters (which will overwrite default_params)
        :return:
        """
        if default_params and isinstance(listwrap(default_params)[0], BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)

        cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
        trace = exceptions.extract_stack(stack_depth + 1)

        e = Except(exceptions.ERROR, template, params, cause, trace)
        str_e = unicode(e)

        error_mode = cls.error_mode
        try:
            if not error_mode:
                cls.error_mode = True
                Log.note(
                    "{{error|unicode}}",
                    error=e,
                    log_context=set_default({"context": exceptions.FATAL}, log_context),
                    stack_depth=stack_depth + 1
                )
        except Exception:
            pass
        cls.error_mode = error_mode

        sys.stderr.write(str_e.encode('utf8'))
Example #30
0
 def worker(please_stop):
     while not please_stop:
         try:
             response = requests.get("http://169.254.169.254/latest/meta-data/spot/termination-time")
             if response.status_code not in [400, 404]:
                 Log.warning("Shutdown AWS Spot Node {{name}} {{type}}", name=machine_metadata.name, type=machine_metadata.aws_instance_type)
                 please_stop.go()
                 return
         except Exception, e:
             e = Except.wrap(e)
             if "Failed to establish a new connection: [Errno 10060]" in e or "A socket operation was attempted to an unreachable network" in e:
                 Log.warning("AWS Spot Detection has shutdown, probably not a spot node, (http://169.254.169.254 is unreachable)")
                 return
             else:
                 Log.warning("AWS shutdown detection has problems", cause=e)
             Thread.sleep(seconds=61, please_stop=please_stop)
         Thread.sleep(seconds=11, please_stop=please_stop)
Example #31
0
 def assertRaises(self, problem, function, *args, **kwargs):
     try:
         function(*args, **kwargs)
     except Exception, e:
         e = Except.wrap(e)
         if isinstance(problem, basestring):
             if problem in e:
                 return
             Log.error(
                 "expecting an exception returning {{problem|quote}} got something else instead",
                 problem=problem,
                 cause=e
             )
         elif not isinstance(e, problem):
             Log.error("expecting an exception of type {{type}} to be raised", type=problem)
         else:
             return
Example #32
0
def _get_attr(obj, path):
    if not path:
        return obj

    attr_name = path[0]

    if isinstance(obj, ModuleType):
        if attr_name in obj.__dict__:
            return _get_attr(obj.__dict__[attr_name], path[1:])
        elif attr_name in dir(obj):
            return _get_attr(obj[attr_name], path[1:])

        # TRY FILESYSTEM
        from pyLibrary.env.files import File
        possible_error = None
        if File.new_instance(File(obj.__file__).parent,
                             attr_name).set_extension("py").exists:
            try:
                # THIS CASE IS WHEN THE __init__.py DOES NOT IMPORT THE SUBDIR FILE
                # WE CAN STILL PUT THE PATH TO THE FILE IN THE from CLAUSE
                if len(path) == 1:
                    # GET MODULE OBJECT
                    output = __import__(obj.__name__ + "." + attr_name,
                                        globals(), locals(), [path[0]], 0)
                    return output
                else:
                    # GET VARIABLE IN MODULE
                    output = __import__(obj.__name__ + "." + attr_name,
                                        globals(), locals(), [path[1]], 0)
                    return _get_attr(output, path[1:])
            except Exception, e:
                from pyLibrary.debugs.exceptions import Except
                possible_error = Except.wrap(e)

        # TRY A CASE-INSENSITIVE MATCH
        attr_name = lower_match(attr_name, dir(obj))
        if not attr_name:
            from pyLibrary.debugs.logs import Log
            Log.warning(PATH_NOT_FOUND + ". Returning None.",
                        cause=possible_error)
        elif len(attr_name) > 1:
            from pyLibrary.debugs.logs import Log
            Log.error(AMBIGUOUS_PATH_FOUND + " {{paths}}", paths=attr_name)
        else:
            return _get_attr(obj[attr_name[0]], path[1:])
Example #33
0
 def _iter():
     g = 0
     out = DictList()
     try:
         for i, d in enumerate(data):
             out.append(d)
             if (i + 1) % max_size == 0:
                 yield g, out
                 g += 1
                 out = DictList()
         if out:
             yield g, out
     except Exception, e:
         e = Except.wrap(e)
         if out:
             # AT LEAST TRY TO RETURN WHAT HAS BEEN PROCESSED SO FAR
             yield g, out
         Log.error("Problem inside jx.groupby", e)
Example #34
0
 def assertRaises(self, problem, function, *args, **kwargs):
     try:
         function(*args, **kwargs)
     except Exception, e:
         e = Except.wrap(e)
         if isinstance(problem, basestring):
             if problem in e:
                 return
             Log.error(
                 "expecting an exception returning {{problem|quote}} got something else instead",
                 problem=problem,
                 cause=e)
         elif not isinstance(e, problem):
             Log.error(
                 "expecting an exception of type {{type}} to be raised",
                 type=problem)
         else:
             return
Example #35
0
 def _iter():
     g = 0
     out = DictList()
     try:
         for i, d in enumerate(data):
             out.append(d)
             if (i + 1) % max_size == 0:
                 yield g, out
                 g += 1
                 out = DictList()
         if out:
             yield g, out
     except Exception, e:
         e = Except.wrap(e)
         if out:
             # AT LEAST TRY TO RETURN WHAT HAS BEEN PROCESSED SO FAR
             yield g, out
         Log.error("Problem inside jx.groupby", e)
Example #36
0
def store_public_data(path, permissions):
    """
    :param path: THE BUCKET TO USE
    :param permissions: THE DATA PATTERN EXPECTED
    :return: LINK TO DATA
    """
    try:
        request = flask.request

        if request.content_length > permissions.max_size or len(request.get_data()) > permissions.max_size:
            Log.error("Not acceptable, too big")

        json_data = wrap(request.json)
        for k, _ in permissions.pattern.leaves():
            if not json_data[k]:
                Log.error("Not acceptable\n{{data|json}}", data=json_data)

        link, id = submit_data(path, permissions, request.json)

        response_content = convert.unicode2utf8(convert.value2json({
            "link": link,
            "etl": {"id": id}
        }))

        return Response(
            response_content,
            status=200,
            headers={
                'content-type': RESPONSE_CONTENT_TYPE
            }
        )

    except Exception, e:
        e = Except.wrap(e)
        Log.warning("Error", cause=e)

        return Response(
            RESPONSE_4XX,
            status=403,
            headers={
                'content-type': "text/plain"
            }
        )
Example #37
0
def utf82unicode(value):
    """
    WITH EXPLANATION FOR FAILURE
    """
    try:
        return value.decode("utf8")
    except Exception, e:
        if not _Log:
            _late_import()

        if not isinstance(value, basestring):
            _Log.error("Can not _convert {{type}} to unicode because it's not a string",  type= type(value).__name__)

        e = _Except.wrap(e)
        for i, c in enumerate(value):
            try:
                c.decode("utf8")
            except Exception, f:
                _Log.error("Can not _convert charcode {{c}} in string  index {{i}}", i=i, c=ord(c), cause=[e, _Except.wrap(f)])
Example #38
0
    def _got_result(self, data, message):
        global count

        data = wrap(data)
        with count_locker:
            Log.note("{{count}} from {{exchange}}", count=count, exchange=self.pulse.exchange)
            data._meta.count = count
            data._meta.exchange = self.pulse.exchange
            count += 1

        if self.settings.debug:
            Log.note("{{data}}",  data= data)
        if self.target_queue != None:
            try:
                self.target_queue.add(data)
                message.ack()
            except Exception, e:
                e = Except.wrap(e)
                if not self.target_queue.closed:  # EXPECTED TO HAPPEN, THIS THREAD MAY HAVE BEEN AWAY FOR A WHILE
                    raise e
Example #39
0
def request(method, url, zip=None, retry=None, **kwargs):
    """
    JUST LIKE requests.request() BUT WITH DEFAULT HEADERS AND FIXES
    DEMANDS data IS ONE OF:
    * A JSON-SERIALIZABLE STRUCTURE, OR
    * LIST OF JSON-SERIALIZABLE STRUCTURES, OR
    * None

    Parameters
     * zip - ZIP THE REQUEST BODY, IF BIG ENOUGH
     * json - JSON-SERIALIZABLE STRUCTURE
     * retry - {"times": x, "sleep": y} STRUCTURE

    THE BYTE_STRINGS (b"") ARE NECESSARY TO PREVENT httplib.py FROM **FREAKING OUT**
    IT APPEARS requests AND httplib.py SIMPLY CONCATENATE STRINGS BLINDLY, WHICH
    INCLUDES url AND headers
    """
    global _warning_sent
    if not default_headers and not _warning_sent:
        _warning_sent = True
        Log.warning(
            "The pyLibrary.env.http module was meant to add extra "
            "default headers to all requests, specifically the 'Referer' "
            "header with a URL to the project. Use the `pyLibrary.debug.constants.set()` "
            "function to set `pyLibrary.env.http.default_headers`")

    if isinstance(url, list):
        # TRY MANY URLS
        failures = []
        for remaining, u in jx.countdown(url):
            try:
                response = request(method, u, zip=zip, retry=retry, **kwargs)
                if Math.round(response.status_code,
                              decimal=-2) not in [400, 500]:
                    return response
                if not remaining:
                    return response
            except Exception, e:
                e = Except.wrap(e)
                failures.append(e)
        Log.error("Tried {{num}} urls", num=len(url), cause=failures)
Example #40
0
def _get_attr(obj, path):
    if not path:
        return obj

    attr_name = path[0]

    if isinstance(obj, ModuleType):
        if attr_name in obj.__dict__:
            return _get_attr(obj.__dict__[attr_name], path[1:])
        elif attr_name in dir(obj):
            return _get_attr(obj[attr_name], path[1:])

        # TRY FILESYSTEM
        from pyLibrary.env.files import File
        possible_error = None
        if File.new_instance(File(obj.__file__).parent, attr_name).set_extension("py").exists:
            try:
                # THIS CASE IS WHEN THE __init__.py DOES NOT IMPORT THE SUBDIR FILE
                # WE CAN STILL PUT THE PATH TO THE FILE IN THE from CLAUSE
                if len(path)==1:
                    # GET MODULE OBJECT
                    output = __import__(obj.__name__ + "." + attr_name, globals(), locals(), [path[0]], 0)
                    return output
                else:
                    # GET VARIABLE IN MODULE
                    output = __import__(obj.__name__ + "." + attr_name, globals(), locals(), [path[1]], 0)
                    return _get_attr(output, path[1:])
            except Exception, e:
                from pyLibrary.debugs.exceptions import Except
                possible_error = Except.wrap(e)

        # TRY A CASE-INSENSITIVE MATCH
        attr_name = lower_match(attr_name, dir(obj))
        if not attr_name:
            from pyLibrary.debugs.logs import Log
            Log.warning(PATH_NOT_FOUND + ". Returning None.", cause=possible_error)
        elif len(attr_name) > 1:
            from pyLibrary.debugs.logs import Log
            Log.error(AMBIGUOUS_PATH_FOUND + " {{paths}}", paths=attr_name)
        else:
            return _get_attr(obj[attr_name[0]], path[1:])
Example #41
0
def request(method, url, zip=None, retry=None, **kwargs):
    """
    JUST LIKE requests.request() BUT WITH DEFAULT HEADERS AND FIXES
    DEMANDS data IS ONE OF:
    * A JSON-SERIALIZABLE STRUCTURE, OR
    * LIST OF JSON-SERIALIZABLE STRUCTURES, OR
    * None

    Parameters
     * zip - ZIP THE REQUEST BODY, IF BIG ENOUGH
     * json - JSON-SERIALIZABLE STRUCTURE
     * retry - {"times": x, "sleep": y} STRUCTURE

    THE BYTE_STRINGS (b"") ARE NECESSARY TO PREVENT httplib.py FROM **FREAKING OUT**
    IT APPEARS requests AND httplib.py SIMPLY CONCATENATE STRINGS BLINDLY, WHICH
    INCLUDES url AND headers
    """
    global _warning_sent
    if not default_headers and not _warning_sent:
        _warning_sent = True
        Log.warning(
            "The pyLibrary.env.http module was meant to add extra "
            "default headers to all requests, specifically the 'Referer' "
            "header with a URL to the project. Use the `pyLibrary.debug.constants.set()` "
            "function to set `pyLibrary.env.http.default_headers`"
        )

    if isinstance(url, list):
        # TRY MANY URLS
        failures = []
        for remaining, u in jx.countdown(url):
            try:
                response = request(method, u, zip=zip, retry=retry, **kwargs)
                if Math.round(response.status_code, decimal=-2) not in [400, 500]:
                    return response
                if not remaining:
                    return response
            except Exception, e:
                e = Except.wrap(e)
                failures.append(e)
        Log.error("Tried {{num}} urls", num=len(url), cause=failures)
Example #42
0
    def query(self, _query):
        try:
            query = QueryOp.wrap(_query, schema=self)

            for n in self.namespaces:
                query = n.convert(query)
            if self.typed:
                query = Typed().convert(query)

            for s in listwrap(query.select):
                if not aggregates1_4.get(s.aggregate):
                    Log.error(
                        "ES can not aggregate {{name}} because {{aggregate|quote}} is not a recognized aggregate",
                        name=s.name,
                        aggregate=s.aggregate,
                    )

            frum = query["from"]
            if isinstance(frum, QueryOp):
                result = self.query(frum)
                q2 = query.copy()
                q2.frum = result
                return jx.run(q2)

            if is_deepop(self._es, query):
                return es_deepop(self._es, query)
            if is_aggsop(self._es, query):
                return es_aggsop(self._es, frum, query)
            if is_setop(self._es, query):
                return es_setop(self._es, query)
            if es09_setop.is_setop(query):
                return es09_setop.es_setop(self._es, None, query)
            if es09_aggop.is_aggop(query):
                return es09_aggop.es_aggop(self._es, None, query)
            Log.error("Can not handle")
        except Exception, e:
            e = Except.wrap(e)
            if "Data too large, data for" in e:
                http.post(self._es.cluster.path + "/_cache/clear")
                Log.error("Problem (Tried to clear Elasticsearch cache)", e)
            Log.error("problem", e)
Example #43
0
    def query(self, _query):
        try:
            query = QueryOp.wrap(_query, schema=self)

            for n in self.namespaces:
                query = n.convert(query)
            if self.typed:
                query = Typed().convert(query)

            for s in listwrap(query.select):
                if not aggregates1_4.get(s.aggregate):
                    Log.error(
                        "ES can not aggregate {{name}} because {{aggregate|quote}} is not a recognized aggregate",
                        name=s.name,
                        aggregate=s.aggregate)

            frum = query["from"]
            if isinstance(frum, QueryOp):
                result = self.query(frum)
                q2 = query.copy()
                q2.frum = result
                return jx.run(q2)

            if is_deepop(self._es, query):
                return es_deepop(self._es, query)
            if is_aggsop(self._es, query):
                return es_aggsop(self._es, frum, query)
            if is_setop(self._es, query):
                return es_setop(self._es, query)
            if es09_setop.is_setop(query):
                return es09_setop.es_setop(self._es, None, query)
            if es09_aggop.is_aggop(query):
                return es09_aggop.es_aggop(self._es, None, query)
            Log.error("Can not handle")
        except Exception, e:
            e = Except.wrap(e)
            if "Data too large, data for" in e:
                http.post(self._es.cluster.path + "/_cache/clear")
                Log.error("Problem (Tried to clear Elasticsearch cache)", e)
            Log.error("problem", e)
Example #44
0
    def _got_result(self, data, message):
        global count

        data = wrap(data)
        with count_locker:
            Log.note("{{count}} from {{exchange}}",
                     count=count,
                     exchange=self.pulse.exchange)
            data._meta.count = count
            data._meta.exchange = self.pulse.exchange
            count += 1

        if self.settings.debug:
            Log.note("{{data}}", data=data)
        if self.target_queue != None:
            try:
                self.target_queue.add(data)
                message.ack()
            except Exception, e:
                e = Except.wrap(e)
                if not self.target_queue.closed:  # EXPECTED TO HAPPEN, THIS THREAD MAY HAVE BEEN AWAY FOR A WHILE
                    raise e
Example #45
0
    def output(*args):
        with cache_store.locker:
            if using_self:
                self = args[0]
                args = args[1:]
            else:
                self = cache_store

            now = Date.now()
            try:
                _cache = getattr(self, attr_name)
            except Exception, _:
                _cache = {}
                setattr(self, attr_name, _cache)

            if Random.int(100) == 0:
                # REMOVE OLD CACHE
                _cache = {k: v for k, v in _cache.items() if v[0]==None or v[0] > now}
                setattr(self, attr_name, _cache)

            timeout, key, value, exception = _cache.get(args, (Null, Null, Null, Null))

            if now > timeout:
                value = func(self, *args)
                _cache[args] = (now + cache_store.timeout, args, value, None)
                return value

            if value == None:
                if exception == None:
                    try:
                        value = func(self, *args)
                        _cache[args] = (now + cache_store.timeout, args, value, None)
                        return value
                    except Exception, e:
                        e = Except.wrap(e)
                        _cache[args] = (now + cache_store.timeout, args, None, e)
                        raise e
                else:
                    raise exception
Example #46
0
def find_query(hash):
    """
    FIND QUERY BY HASH, RETURN Response OBJECT
    :param hash:
    :return: Response OBJECT
    """
    try:
        hash = hash.split("/")[0]
        query = query_finder.find(hash)

        if not query:
            return Response(b'{"type": "ERROR", "template": "not found"}',
                            status=404)
        else:
            return Response(convert.unicode2utf8(query), status=200)
    except Exception, e:
        e = Except.wrap(e)
        Log.warning("problem finding query with hash={{hash}}",
                    hash=hash,
                    cause=e)
        return Response(convert.unicode2utf8(convert.value2json(e)),
                        status=400)
Example #47
0
    def fatal(
        cls,
        template,  # human readable template
        default_params={},  # parameters for template
        cause=None,  # pausible cause
        stack_depth=0,
        **more_params
    ):
        """
        SEND TO STDERR
        """
        if default_params and isinstance(listwrap(default_params)[0], BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)

        cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
        trace = exceptions.extract_stack(stack_depth + 1)

        e = Except(exceptions.ERROR, template, params, cause, trace)
        str_e = unicode(e)

        error_mode = cls.error_mode
        try:
            if not error_mode:
                cls.error_mode = True
                Log.note(
                    "{{error}}",
                    error=e,
                    log_context={"context": exceptions.WARNING},
                    stack_depth=stack_depth + 1
                )
        except Exception:
            pass
        cls.error_mode = error_mode

        sys.stderr.write(str_e.encode('utf8'))
Example #48
0
 def worker(please_stop):
     while not please_stop:
         try:
             response = requests.get(
                 "http://169.254.169.254/latest/meta-data/spot/termination-time"
             )
             if response.status_code not in [400, 404]:
                 Log.warning("Shutdown AWS Spot Node {{name}} {{type}}",
                             name=machine_metadata.name,
                             type=machine_metadata.aws_instance_type)
                 please_stop.go()
                 return
         except Exception, e:
             e = Except.wrap(e)
             if "Failed to establish a new connection: [Errno 10060]" in e or "A socket operation was attempted to an unreachable network" in e:
                 Log.warning(
                     "AWS Spot Detection has shutdown, probably not a spot node, (http://169.254.169.254 is unreachable)"
                 )
                 return
             else:
                 Log.warning("AWS shutdown detection has problems", cause=e)
             Thread.sleep(seconds=61, please_stop=please_stop)
         Thread.sleep(seconds=11, please_stop=please_stop)
Example #49
0
    def get_treeherder_job(self):
        try:
            with Timer("Process Request"):
                args = Dict(**flask.request.args)

                # IS THE branch/revision PENDING?

                result = self.get_markup(
                    unwraplist(args.branch),
                    unwraplist(args.revision),
                    unwraplist(args.task_id),
                    unwraplist(args.buildername),
                    unwraplist(args.timestamp)
                )

                response_data = convert.unicode2utf8(convert.value2json(result))
                return Response(
                    response_data,
                    status=200,
                    headers={
                        "access-control-allow-origin": "*",
                        "content-type": "text/plain"
                    }
                )
        except Exception, e:
            e = Except.wrap(e)
            Log.warning("Could not process", cause=e)
            e = e.as_dict()

            return Response(
                convert.unicode2utf8(convert.value2json(e)),
                status=400,
                headers={
                    "access-control-allow-origin": "*",
                    "content-type": "application/json"
                }
            )
Example #50
0
    def warning(
        cls,
        template,
        default_params={},
        cause=None,
        stack_depth=0,
        log_context=None,
        **more_params
    ):
        if isinstance(default_params, BaseException):
            cause = default_params
            default_params = {}

        params = dict(unwrap(default_params), **more_params)
        cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
        trace = exceptions.extract_stack(stack_depth + 1)

        e = Except(exceptions.WARNING, template, params, cause, trace)
        Log.note(
            "{{error|unicode}}",
            error=e,
            log_context=set_default({"context": exceptions.WARNING}, log_context),
            stack_depth=stack_depth + 1
        )
Example #51
0
def int_list_packer(term, values):
    """
    return singletons, ranges and exclusions
    """
    DENSITY = 10  # a range can have holes, this is inverse of the hole density
    MIN_RANGE = 20  # min members before a range is allowed to be used

    singletons = set()
    ranges = []
    exclude = set()

    sorted = jx.sort(values)

    last = sorted[0]
    curr_start = last
    curr_excl = set()

    for v in sorted[1::]:
        if v <= last + 1:
            pass
        elif v - last > 3:
            # big step, how do we deal with it?
            if last == curr_start:
                # not a range yet, so just add as singlton
                singletons.add(last)
            elif last - curr_start - len(curr_excl) < MIN_RANGE or (
                (last - curr_start) < len(curr_excl) * DENSITY):
                # small ranges are singletons, sparse ranges are singletons
                singletons |= set(range(curr_start, last + 1))
                singletons -= curr_excl
            else:
                # big enough, and dense enough range
                ranges.append({"gte": curr_start, "lte": last})
                exclude |= curr_excl
            curr_start = v
            curr_excl = set()
        else:
            if 1 + last - curr_start >= len(curr_excl) * DENSITY:
                # high density, keep track of excluded and continue
                add_me = set(range(last + 1, v))
                curr_excl |= add_me
            elif 1 + last - curr_start - len(curr_excl) < MIN_RANGE:
                # not big enough, convert range to singletons
                new_singles = set(range(curr_start, last + 1)) - curr_excl
                singletons = singletons | new_singles

                curr_start = v
                curr_excl = set()
            else:
                ranges.append({"gte": curr_start, "lte": last})
                exclude |= curr_excl
                curr_start = v
                curr_excl = set()
        last = v

    if last == curr_start:
        # not a range yet, so just add as singlton
        singletons.add(last)
    elif last - curr_start - len(curr_excl) < MIN_RANGE or (
        (last - curr_start) < len(curr_excl) * DENSITY):
        # small ranges are singletons, sparse ranges are singletons
        singletons |= set(range(curr_start, last + 1))
        singletons -= curr_excl
    else:
        # big enough, and dense enough range
        ranges.append({"gte": curr_start, "lte": last})
        exclude |= curr_excl

    if ranges:
        r = {"or": [{"range": {term: r}} for r in ranges]}
        if exclude:
            r = {"and": [r, {"not": {"terms": {term: jx.sort(exclude)}}}]}
        if singletons:
            return {"or": [{"terms": {term: jx.sort(singletons)}}, r]}
        else:
            return r
    else:
        raise Except("no packing possible")
Example #52
0
            _to_ascii_dict(headers)
        else:
            _to_ascii_dict(headers)
    except Exception, e:
        Log.error("Request setup failure on {{url}}", url=url, cause=e)

    errors = []
    for r in range(retry.times):
        if r:
            Thread.sleep(retry.sleep)

        try:
            return session.request(method=method, url=url, **kwargs)
        except Exception, e:
            errors.append(Except.wrap(e))

    if " Read timed out." in errors[0]:
        Log.error("Tried {{times}} times: Timeout failure (timeout was {{timeout}}", timeout=timeout, times=retry.times, cause=errors[0])
    else:
        Log.error("Tried {{times}} times: Request failure of {{url}}", url=url, times=retry.times, cause=errors[0])


def _to_ascii_dict(headers):
    if headers is None:
        return
    for k, v in copy(headers).items():
        if isinstance(k, unicode):
            del headers[k]
            if isinstance(v, unicode):
                headers[k.encode("ascii")] = v.encode("ascii")
Example #53
0
            _Log.note("reading file {{path}}", path=path)
        content = File(path).read()
    except Exception, e:
        content = None
        _Log.error("Could not read file {{filename}}", filename=path, cause=e)

    try:
        new_value = _convert.json2value(content,
                                        params=ref.query,
                                        flexible=True,
                                        leaves=True)
    except Exception, e:
        if not _Except:
            _late_import()

        e = _Except.wrap(e)
        try:
            new_value = _convert.ini2value(content)
        except Exception, f:
            raise _Log.error("Can not read {{file}}", file=path, cause=e)
    new_value = _replace_ref(new_value, ref)
    return new_value


def get_http(ref, url):
    from pyLibrary.env import http

    params = url.query
    new_value = _convert.json2value(http.get(ref),
                                    params=params,
                                    flexible=True,
Example #54
0
        if params:
            # LOOKUP REFERENCES
            json_string = expand_template(json_string, params)

        try:
            value = wrap(json_decoder(unicode(json_string)))
        except Exception, e:
            Log.error("can not decode\n{{content}}", content=json_string, cause=e)

        if leaves:
            value = wrap_leaves(value)

        return value

    except Exception, e:
        e = Except.wrap(e)

        if not json_string.strip():
            Log.error("JSON string is only whitespace")

        c = e
        while "Expecting '" in c.cause and "' delimiter: line" in c.cause:
            c = c.cause

        if "Expecting '" in c and "' delimiter: line" in c:
            line_index = int(strings.between(c.message, " line ", " column ")) - 1
            column = int(strings.between(c.message, " column ", " ")) - 1
            line = json_string.split("\n")[line_index].replace("\t", " ")
            if column > 20:
                sample = "..." + line[column - 20:]
                pointer = "   " + (" " * 20) + "^"
Example #55
0
        else:
            _to_ascii_dict(headers)
    except Exception, e:
        Log.error("Request setup failure on {{url}}", url=url, cause=e)

    errors = []
    for r in range(retry.times):
        if r:
            Thread.sleep(retry.sleep)

        try:
            if DEBUG:
                Log.note("http {{method}} to {{url}}", method=method, url=url)
            return session.request(method=method, url=url, **kwargs)
        except Exception, e:
            errors.append(Except.wrap(e))

    if " Read timed out." in errors[0]:
        Log.error(
            "Tried {{times}} times: Timeout failure (timeout was {{timeout}}",
            timeout=timeout,
            times=retry.times,
            cause=errors[0])
    else:
        Log.error("Tried {{times}} times: Request failure of {{url}}",
                  url=url,
                  times=retry.times,
                  cause=errors[0])


def _to_ascii_dict(headers):
Example #56
0
class Sqlite(object):
    """
    Allows multi-threaded access
    Loads extension functions (like SQRT)
    """

    canonical = None

    def __init__(self, db=None):
        """
        :param db:  Optional, wrap a sqlite db in a thread
        :return: Multithread save database
        """
        if not _upgraded:
            _upgrade()

        self.db = None
        self.queue = Queue(
            "sql commands")  # HOLD (command, result, signal) PAIRS
        self.worker = Thread.run("sqlite db thread", self._worker)
        self.get_trace = DEBUG

    def execute(self, command):
        """
        COMMANDS WILL BE EXECUTED IN THE ORDER THEY ARE GIVEN
        BUT CAN INTERLEAVE WITH OTHER TREAD COMMANDS
        :param command: COMMAND FOR SQLITE
        :return: None
        """
        if self.get_trace:
            trace = extract_stack(1)
        else:
            trace = None
        self.queue.add((command, None, None, trace))

    def query(self, command):
        """
        WILL BLOCK CALLING THREAD UNTIL THE command IS COMPLETED
        :param command: COMMAND FOR SQLITE
        :return: list OF RESULTS
        """
        signal = Signal()
        result = Dict()
        self.queue.add((command, result, signal, None))
        signal.wait_for_go()
        if result.exception:
            Log.error("Problem with Sqlite call", cause=result.exception)
        return result

    def _worker(self, please_stop):
        if Sqlite.canonical:
            self.db = Sqlite.canonical
        else:
            self.db = sqlite3.connect(':memory:')
            try:
                full_path = File(
                    "pyLibrary/vendor/sqlite/libsqlitefunctions.so").abspath
                # self.db.execute("SELECT sqlite3_enable_load_extension(1)")
                self.db.enable_load_extension(True)
                self.db.execute("SELECT load_extension('" + full_path + "')")
            except Exception, e:
                Log.warning(
                    "loading sqlite extension functions failed, doing without. (no SQRT for you!)",
                    cause=e)

        try:
            while not please_stop:
                if DEBUG:
                    Log.note("begin pop")
                command, result, signal, trace = self.queue.pop()
                if DEBUG:
                    Log.note("done pop")

                if DEBUG:
                    Log.note("Running command\n{{command|indent}}",
                             command=command)
                with Timer("Run command", debug=DEBUG):
                    if signal is not None:
                        try:
                            curr = self.db.execute(command)
                            result.meta.format = "table"
                            result.header = [d[0] for d in curr.description
                                             ] if curr.description else None
                            result.data = curr.fetchall()
                        except Exception, e:
                            e = Except.wrap(e)
                            result.exception = Except(
                                ERROR,
                                "Problem with\n{{command|indent}}",
                                command=command,
                                cause=e)
                        finally:
Example #57
0
                    if signal is not None:
                        try:
                            curr = self.db.execute(command)
                            result.meta.format = "table"
                            result.header = [d[0] for d in curr.description
                                             ] if curr.description else None
                            result.data = curr.fetchall()
                        except Exception, e:
                            e = Except.wrap(e)
                            result.exception = Except(
                                ERROR,
                                "Problem with\n{{command|indent}}",
                                command=command,
                                cause=e)
                        finally:
                            signal.go()
                    else:
                        try:
                            self.db.execute(command)
                        except Exception, e:
                            e = Except.wrap(e)
                            e.cause = Except(type=ERROR,
                                             template="Bad call to Sqlite",
                                             trace=trace)
                            Log.warning("Failure to execute", cause=e)

        except Exception, e:
            Log.error("Problem with sql thread", e)
        finally:
            self.db.close()
Example #58
0
                            result.meta.format = "table"
                            result.header = [d[0] for d in curr.description] if curr.description else None
                            result.data = curr.fetchall()
                            if DEBUG and result.data:
                                text = convert.table2csv(list(result.data))
                                Log.note("Result:\n{{data}}", data=text)
                        except Exception, e:
                            e = Except.wrap(e)
                            result.exception = Except(ERROR, "Problem with\n{{command|indent}}", command=command, cause=e)
                        finally:
                            signal.go()
                    else:
                        try:
                            self.db.execute(command)
                        except Exception, e:
                            e = Except.wrap(e)
                            e.cause = Except(
                                type=ERROR,
                                template="Bad call to Sqlite",
                                trace=trace
                            )
                            Log.warning("Failure to execute", cause=e)

        except Exception, e:
            Log.error("Problem with sql thread", e)
        finally:
            if DEBUG:
                Log.note("Database is closed")
            self.db.close()

    def quote_column(self, column_name, table=None):
Example #59
0
    def extend(self, records):
        """
        records - MUST HAVE FORM OF
            [{"value":value}, ... {"value":value}] OR
            [{"json":json}, ... {"json":json}]
            OPTIONAL "id" PROPERTY IS ALSO ACCEPTED
        """
        if self.settings.read_only:
            Log.error("Index opened in read only mode, no changes allowed")
        lines = []
        try:
            for r in records:
                id = r.get("id")

                if id == None:
                    id = random_id()

                if "json" in r:
                    json_bytes = r["json"].encode("utf8")
                elif "value" in r:
                    json_bytes = convert.value2json(r["value"]).encode("utf8")
                else:
                    json_bytes = None
                    Log.error("Expecting every record given to have \"value\" or \"json\" property")

                lines.append(b'{"index":{"_id": ' + convert.value2json(id).encode("utf8") + b'}}')
                if self.settings.tjson:
                    lines.append(json2typed(json_bytes.decode('utf8')).encode('utf8'))
                else:
                    lines.append(json_bytes)
            del records

            if not lines:
                return

            with Timer("Add {{num}} documents to {{index}}", {"num": len(lines) / 2, "index":self.settings.index}, debug=self.debug):
                try:
                    data_bytes = b"\n".join(l for l in lines) + b"\n"
                except Exception, e:
                    Log.error("can not make request body from\n{{lines|indent}}", lines=lines, cause=e)

                response = self.cluster.post(
                    self.path + "/_bulk",
                    data=data_bytes,
                    headers={"Content-Type": "text"},
                    timeout=self.settings.timeout,
                    retry=self.settings.retry
                )
                items = response["items"]

                fails = []
                if self.cluster.version.startswith("0.90."):
                    for i, item in enumerate(items):
                        if not item.index.ok:
                            fails.append(i)
                elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])):
                    for i, item in enumerate(items):
                        if item.index.status not in [200, 201]:
                            fails.append(i)
                else:
                    Log.error("version not supported {{version}}", version=self.cluster.version)

                if fails:
                    Log.error("Problems with insert", cause=[
                        Except(
                            template="{{status}} {{error}} (and {{some}} others) while loading line id={{id}} into index {{index|quote}}:\n{{line}}",
                            status=items[i].index.status,
                            error=items[i].index.error,
                            some=len(fails) - 1,
                            line=strings.limit(lines[fails[0] * 2 + 1], 500 if not self.debug else 100000),
                            index=self.settings.index,
                            id=items[i].index._id
                        )
                        for i in fails
                    ])

        except Exception, e:
            if e.message.startswith("sequence item "):
                Log.error("problem with {{data}}", data=repr(lines[int(e.message[14:16].strip())]), cause=e)
            Log.error("problem sending to ES", e)