Пример #1
0
    def schedule_hash(cls, message):
        """
        Create a unique identifier for this message for storing
        and referencing later

        Args:
            message (str): The serialized message passed to the scheduler

        Returns:
            int: unique hash for the job
        """

        # Get the job portion of the message
        msg = deserialize(message[3])[1]

        # Use json to create the hash string, sorting the keys.
        schedule_hash_items = json.dumps(
            {'args': msg['args'],
             'kwargs': msg['kwargs'],
             'class_args': msg['class_args'],
             'class_kwargs': msg['class_kwargs'],
             'path': msg['path'],
             'callable': msg['callable']},
            sort_keys=True)

        # Hash the sorted, immutable set of items in our identifying dict
        schedule_hash = emq_hash(
            schedule_hash_items.encode('utf-8')).hexdigest()

        return schedule_hash
Пример #2
0
    def load_jobs(self):
        """
        Loads the jobs that need to be scheduled
        """
        raw_jobs = (
            # ('* * * * *', 'eventmq.scheduler.test_job'),
        )
        ts = int(timestamp())
        for job in raw_jobs:
            # Create the croniter iterator
            c = croniter(job[0])
            path = '.'.join(job[1].split('.')[:-1])
            callable_ = job[1].split('.')[-1]

            msg = ['run', {
                'path': path,
                'callable': callable_
            }]

            # Get the next time this job should be run
            c_next = next(c)
            if ts >= c_next:
                # If the next execution time has passed move the iterator to
                # the following time
                c_next = next(c)

            cron_hash = self.cron_hash(caller_id=CRON_CALLER_ID,
                                       path=path,
                                       callable_=callable_)

            self.cron_jobs[cron_hash] = [c_next, json.dumps(msg), c, None]

        try:
            interval_job_list = self.redis_server.lrange('interval_jobs',
                                                         0,
                                                         -1)
            if interval_job_list is not None:
                for i in interval_job_list:
                    logger.debug('Restoring job with hash %s' % i)
                    if (self.redis_server.get(i)):
                        self.load_job_from_redis(
                            message=deserialize(self.redis_server.get(i)))
                    else:
                        logger.warning('Expected scheduled job in redis,' +
                                       'but none was found with hash %s' % i)
        except redis.ConnectionError:
            logger.warning('Could not contact redis server')
        except Exception as e:
            logger.warning(str(e))
Пример #3
0
async def dispatch_pure(request: str, methods: Methods, *, context: Any,
                        convert_camel_case: bool, debug: bool) -> Response:
    try:
        deserialized = validate(deserialize(request), schema)
    except JSONDecodeError as exc:
        return InvalidJSONResponse(data=str(exc), debug=debug)
    except ValidationError as exc:
        return InvalidJSONRPCResponse(data=None, debug=debug)
    return await call_requests(
        create_requests(deserialized,
                        context=context,
                        convert_camel_case=convert_camel_case),
        methods,
        debug=debug,
    )
Пример #4
0
    def schedule_hash(self, message):
        """
        Create a unique identifier for this message for storing
        and referencing later
        """
        # Items to use for uniquely identifying this scheduled job
        # TODO: Pass caller_id in a more rigid place
        msg = deserialize(message[3])[1]
        schedule_hash_items = {'caller_id': msg['class_args'][0],
                               'path': msg['path'],
                               'callable': msg['callable']}

        # Hash the sorted, immutable set of items in our identifying dict
        schedule_hash = str(hash(tuple(frozenset(sorted(
            schedule_hash_items.items())))))

        return schedule_hash
Пример #5
0
    def send(
        self,
        request: Union[str, Dict, List],
        trim_log_values: bool = False,
        validate_against_schema: bool = True,
        **kwargs: Any
    ) -> Response:
        """
        Send a request, passing the whole JSON-RPC request object.

        After sending, logs, validates and parses.

        >>> client.send('{"jsonrpc": "2.0", "method": "ping", "id": 1}')
        <Response[1]>

        Args:
            request: The JSON-RPC request. Can be either a JSON-encoded string or a
                Request/Notification object.
            trim_log_values: Abbreviate the log entries of requests and responses.
            validate_against_schema: Validate response against the JSON-RPC schema.
            kwargs: Clients can use this to configure an single request. For example,
                HTTPClient passes this through to `requests.Session.send()`.
            in the case of a Notification.
        """
        # We need both the serialized and deserialized version of the request
        if isinstance(request, str):
            request_text = request
            request_deserialized = deserialize(request)
        else:
            request_text = serialize(request)
            request_deserialized = request
        batch = isinstance(request_deserialized, list)
        response_expected = batch or "id" in request_deserialized
        self.log_request(request_text, trim_log_values=trim_log_values)
        response = self.send_message(
            request_text, response_expected=response_expected, **kwargs
        )
        self.log_response(response, trim_log_values=trim_log_values)
        self.validate_response(response)
        response.data = parse(
            response.text, batch=batch, validate_against_schema=validate_against_schema
        )
        # If received a single error response, raise
        if isinstance(response.data, ErrorResponse):
            raise ReceivedErrorResponseError(response.data)
        return response
Пример #6
0
def parse(
    response_text: str,
    *,
    batch: bool,
    validate_against_schema: bool = True
) -> Union[JSONRPCResponse, List[JSONRPCResponse]]:
    """
    Parses response text, returning JSONRPCResponse objects.

    Args:
        response_text: JSON-RPC response string.
        batch: If the response_text is an empty string, this determines how to parse.
        validate_against_schema: Validate against the json-rpc schema.

    Returns:
        Either a JSONRPCResponse, or a list of them.

    Raises:
        json.JSONDecodeError: The response was not valid JSON.
        jsonschema.ValidationError: The response was not a valid JSON-RPC response
            object.
    """
    # If the response is empty, we can't deserialize it; an empty string is valid
    # JSON-RPC, but not valid JSON.
    if not response_text:
        if batch:
            # An empty string is a valid response to a batch request, when there were
            # only notifications in the batch.
            return []
        else:
            # An empty string is valid response to a Notification request.
            return NotificationResponse()

    # If a string, ensure it's json-deserializable
    deserialized = deserialize(response_text)

    # Validate the response against the Response schema (raises
    # jsonschema.ValidationError if invalid)
    if validate_against_schema:
        validator.validate(deserialized)

    # Batch response
    if isinstance(deserialized, list):
        return [get_response(r) for r in deserialized if "id" in r]
    # Single response
    return get_response(deserialized)
Пример #7
0
 def load_jobs(self):
     """
     Loads the jobs that need to be scheduled
     """
     try:
         interval_job_list = self.redis_server.lrange(
             'interval_jobs', 0, -1)
         if interval_job_list is not None:
             for i in interval_job_list:
                 logger.debug('Restoring job with hash %s' % i)
                 if (self.redis_server.get(i)):
                     self.load_job_from_redis(
                         message=deserialize(self.redis_server.get(i)))
                 else:
                     logger.warning('Expected scheduled job in redis,' +
                                    'but none was found with hash %s' % i)
     except redis.ConnectionError:
         logger.warning('Could not contact redis server')
     except Exception as e:
         logger.warning(str(e))
Пример #8
0
 def load_jobs(self):
     """
     Loads the jobs from redis that need to be scheduled
     """
     if self.redis_server:
         try:
             interval_job_list = self.redis_server.lrange(
                 'interval_jobs', 0, -1)
             if interval_job_list is not None:
                 for i in interval_job_list:
                     logger.debug('Restoring job with hash %s' % i)
                     if self.redis_server.get(i):
                         self.load_job_from_redis(
                             message=deserialize(self.redis_server.get(i)))
                     else:
                         logger.warning(
                             'Expected scheduled job in redis, but none '
                             'was found with hash {}'.format(i))
         except Exception as e:
             logger.warning(str(e), exc_info=True)
Пример #9
0
def dispatch_pure(
    request: str,
    methods: Methods,
    *,
    context: Any,
    convert_camel_case: bool,
    debug: bool,
) -> Response:
    """调用请求调用指定的方法
    Pure version of dispatch - no logging, no optional parameters.

    Does two things:
        1. Deserializes and validates the string.
        2. Calls each request.

    Args:
        request: The incoming request string.
        methods: Collection of methods that can be called.
        context: If specified, will be the first positional argument in all requests.
        convert_camel_case: Will convert the method name/any named params to snake case.
        debug: Include more information in error responses.
    Returns:
        A Response.
    """
    try:
        # 使用jsonschema验证请求json格式的请求
        deserialized = validate(deserialize(request), schema)
    except JSONDecodeError as exc:
        return InvalidJSONResponse(data=str(exc), debug=debug)
    except ValidationError as exc:
        return InvalidJSONRPCResponse(data=None, debug=debug)
    # 根据请求的method方法调用
    return call_requests(
        create_requests(deserialized,
                        context=context,
                        convert_camel_case=convert_camel_case),
        methods,
        debug=debug,
    )
Пример #10
0
"""Parse response text, returning JSONRPCResponse objects."""
from json import loads as deserialize
from typing import Any, Dict, List, Union

import jsonschema  # type: ignore
from pkg_resources import resource_string

from .response import (
    ErrorResponse,
    JSONRPCResponse,
    NotificationResponse,
    SuccessResponse,
)

schema = deserialize(resource_string(__name__, "response-schema.json").decode())


def get_response(response: Dict[str, Any]) -> JSONRPCResponse:
    """
    Converts a deserialized response into a JSONRPCResponse object.

    The dictionary be either an error or success response, never a notification.

    Args:
        response: Deserialized response dictionary. We can assume the response is valid
            JSON-RPC here, since it passed the jsonschema validation.
    """
    if "error" in response:
        return ErrorResponse(**response)
    return SuccessResponse(**response)
Пример #11
0
    def _start_event_loop(self):
        """
        Starts the actual event loop. Usually called by :meth:`Scheduler.start`
        """
        while True:
            if self.received_disconnect:
                break

            ts_now = int(timestamp())
            m_now = monotonic()
            events = self.poller.poll()

            if events.get(self.outgoing) == POLLIN:
                msg = self.outgoing.recv_multipart()
                self.process_message(msg)

            # TODO: distribute me!
            for hash_, cron in self.cron_jobs.items():
                # If the time is now, or passed
                if cron[0] <= ts_now:
                    msg = cron[1]
                    queue = cron[3]

                    # Run the msg
                    logger.debug("Time is: %s; Schedule is: %s - Running %s" %
                                 (ts_now, cron[0], msg))

                    self.send_request(msg, queue=queue)

                    # Update the next time to run
                    cron[0] = next(cron[2])
                    logger.debug("Next execution will be in %ss" %
                                 seconds_until(cron[0]))

            cancel_jobs = []
            for k, v in self.interval_jobs.iteritems():
                # TODO: Refactor this entire loop to be readable by humankind
                # The schedule time has elapsed
                if v[0] <= m_now:
                    msg = v[1]
                    queue = v[3]

                    logger.debug("Time is: %s; Schedule is: %s - Running %s" %
                                 (ts_now, v[0], msg))

                    # v[4] is the current remaining run_count
                    if v[4] != INFINITE_RUN_COUNT:
                        # If run_count was 0, we cancel the job
                        if v[4] <= 0:
                            cancel_jobs.append(k)
                        else:
                            # Decrement run_count
                            v[4] -= 1
                            # Persist the change to redis
                            try:
                                message = deserialize(self.redis_server.get(k))
                                new_headers = []
                                for header in message[1].split(','):
                                    if 'run_count:' in header:
                                        new_headers.append(
                                            'run_count:{}'.format(v[4]))
                                    else:
                                        new_headers.append(header)
                                message[1] = ",".join(new_headers)
                                self.redis_server.set(k, serialize(message))
                            except Exception as e:
                                logger.warning('Unable to update key in redis '
                                               'server: {}'.format(e))
                            # Perform the request since run_count still > 0
                            self.send_request(msg, queue=queue)
                            v[0] = next(v[2])
                    else:
                        # Scheduled job is in running infinitely
                        # Send job and update next schedule time
                        self.send_request(msg, queue=queue)
                        v[0] = next(v[2])

            for job in cancel_jobs:
                try:
                    logger.debug(
                        'Cancelling job due to run_count: {}'.format(k))
                    self.redis_server.delete(k)
                    self.redis_server.lrem('interval_jobs', 0, k)
                except Exception as e:
                    logger.warning('Unable to update key in redis '
                                   'server: {}'.format(e))
                del self.interval_jobs[k]

            if not self.maybe_send_heartbeat(events):
                break
Пример #12
0
    ExceptionResponse,
    InvalidJSONResponse,
    InvalidJSONRPCResponse,
    InvalidParamsResponse,
    MethodNotFoundResponse,
    NotificationResponse,
    Response,
    SuccessResponse,
)
from .exceptions import MethodNotFoundError, InvalidParamsError, ApiError

request_logger = logging.getLogger(__name__ + ".request")
response_logger = logging.getLogger(__name__ + ".response")

# Prepare the jsonschema validator
schema = deserialize(resource_string(__name__, "request-schema.json"))
klass = validator_for(schema)
klass.check_schema(schema)
validator = klass(schema)

DEFAULT_REQUEST_LOG_FORMAT = "--> %(message)s"
DEFAULT_RESPONSE_LOG_FORMAT = "<-- %(message)s"

config = ConfigParser(default_section="dispatch")
config.read([".jsonrpcserverrc", os.path.expanduser("~/.jsonrpcserverrc")])


def add_handlers() -> Tuple[logging.Handler, logging.Handler]:
    # Request handler
    request_handler = logging.StreamHandler()
    request_handler.setFormatter(