def execute(self, subscription, messages, **kwargs): subscriber = urllib_parse.urlparse(subscription['subscriber']) params = urllib_parse.parse_qs(subscriber.query) params = dict((k.lower(), v) for k, v in params.items()) conf = kwargs.get('conf') try: for message in messages: p = subprocess.Popen(conf.notification.smtp_command.split(' '), stdin=subprocess.PIPE) # NOTE(Eva-i): Unfortunately this will add 'queue_name' key to # our original messages(dicts) which will be later consumed in # the storage controller. It seems safe though. message['queue_name'] = subscription['source'] msg = text.MIMEText(json.dumps(message)) msg["to"] = subscriber.path msg["from"] = subscription['options'].get('from', '') subject_opt = subscription['options'].get('subject', '') msg["subject"] = params.get('subject', subject_opt) p.communicate(msg.as_string()) except OSError as err: LOG.exception( _LE('Failed to create process for sendmail, ' 'because %s.') % str(err)) except Exception as exc: LOG.exception(_LE('Failed to send email because %s.') % str(exc))
def transport(self): transport_name = self.driver_conf.transport LOG.debug(u'Loading transport driver: %s', transport_name) # FIXME(vkmc): Find a better way to init args if transport_name == 'websocket': args = [self.conf, self.api, self.cache] else: args = [ self.conf, self.storage, self.cache, self.control, ] try: mgr = driver.DriverManager('zaqar.transport', transport_name, invoke_on_load=True, invoke_args=args) return mgr.driver except RuntimeError as exc: LOG.exception(exc) LOG.error(_LE(u'Failed to load transport driver zaqar.transport.' u'%(driver)s with args %(args)s'), {'driver': transport_name, 'args': args}) raise errors.InvalidDriver(exc)
def wrapper(self, *args, **kwargs): # TODO(kgriffs): Figure out a way to not have to rely on the # presence of `mongodb_conf` max_attemps = self.driver.mongodb_conf.max_reconnect_attempts sleep_sec = self.driver.mongodb_conf.reconnect_sleep last_ex = None for attempt in range(max_attemps): try: return func(self, *args, **kwargs) break except errors.AutoReconnect as ex: LOG.warning( _LW(u'Caught AutoReconnect, retrying the ' 'call to {0}').format(func)) last_ex = ex time.sleep(sleep_sec * (2**attempt)) else: LOG.error( _LE(u'Caught AutoReconnect, maximum attempts ' 'to {0} exceeded.').format(func)) raise last_ex
def transport(self): transport_name = self.driver_conf.transport LOG.debug(u'Loading transport driver: %s', transport_name) if transport_name == consts.TRANSPORT_WEBSOCKET: args = [self.conf, self.api, self.cache] else: args = [ self.conf, self.storage, self.cache, self.control, ] try: mgr = driver.DriverManager('zaqar.transport', transport_name, invoke_on_load=True, invoke_args=args) return mgr.driver except RuntimeError as exc: LOG.exception(exc) LOG.error(_LE(u'Failed to load transport driver zaqar.transport.' u'%(driver)s with args %(args)s'), {'driver': transport_name, 'args': args}) raise errors.InvalidDriver(exc)
def _migrate_up(self, engine, version, with_data=False): """migrate up to a new version of the db. We allow for data insertion and post checks at every migration version with special _pre_upgrade_### and _check_### functions in the main test. """ # NOTE(sdague): try block is here because it's impossible to debug # where a failed data migration happens otherwise check_version = version try: if with_data: data = None pre_upgrade = getattr(self, "_pre_upgrade_%s" % check_version, None) if pre_upgrade: data = pre_upgrade(engine) self._migrate(engine, version, 'upgrade') self.assertEqual(version, self._get_version_from_db(engine)) if with_data: check = getattr(self, "_check_%s" % check_version, None) if check: check(engine, data) except Exception: LOG.error( _LE("Failed to migrate to version {version} on engine " "{engine}").format(version=version, engine=engine)) raise
def post(self, queue_name, messages, client_uuid, project=None): """Send messages to the subscribers.""" if self.subscription_controller: if not isinstance(self.subscription_controller, pooling.SubscriptionController): marker = None while True: subscribers = self.subscription_controller.list( queue_name, project, marker=marker) for sub in next(subscribers): LOG.debug("Notifying subscriber %r" % (sub,)) s_type = urllib_parse.urlparse( sub['subscriber']).scheme # If the subscriber doesn't contain 'confirmed', it # means that this kind of subscriber was created before # the confirm feature be introduced into Zaqar. We # should allow them be subscribed. if (self.require_confirmation and not sub.get('confirmed', True)): LOG.info(_LI('The subscriber %s is not ' 'confirmed.'), sub['subscriber']) continue for msg in messages: msg['Message_Type'] = MessageType.Notification.name self._execute(s_type, sub, messages) marker = next(subscribers) if not marker: break else: LOG.error(_LE('Failed to get subscription controller.'))
def post(self, queue_name, messages, client_uuid, project=None): """Send messages to the subscribers.""" if self.subscription_controller: if not isinstance(self.subscription_controller, pooling.SubscriptionController): marker = None while True: subscribers = self.subscription_controller.list( queue_name, project, marker=marker) for sub in next(subscribers): LOG.debug("Notifying subscriber %r" % (sub, )) s_type = urllib_parse.urlparse( sub['subscriber']).scheme # If the subscriber doesn't contain 'confirmed', it # means that this kind of subscriber was created before # the confirm feature be introduced into Zaqar. We # should allow them be subscribed. if (self.require_confirmation and not sub.get('confirmed', True)): LOG.info( _LI('The subscriber %s is not ' 'confirmed.'), sub['subscriber']) continue for msg in messages: msg['Message_Type'] = MessageType.Notification.name self._execute(s_type, sub, messages) marker = next(subscribers) if not marker: break else: LOG.error(_LE('Failed to get subscription controller.'))
def load_storage_driver(conf, cache, storage_type=None, control_mode=False, control_driver=None): """Loads a storage driver and returns it. The driver's initializer will be passed conf and cache as its positional args. :param conf: Configuration instance to use for loading the driver. Must include a 'drivers' group. :param cache: Cache instance that the driver can (optionally) use to reduce latency for some operations. :param storage_type: The storage_type to load. If None, then the `drivers` option will be used. :param control_mode: (Default False). Determines which driver type to load; if False, the data driver is loaded. If True, the control driver is loaded. :param control_driver: (Default None). The control driver instance to pass to the storage driver. Needed to access the queue controller, mainly. """ if control_mode: mode = 'control' storage_type = storage_type or conf['drivers'].management_store else: mode = 'data' storage_type = storage_type or conf['drivers'].message_store driver_type = 'zaqar.{0}.storage'.format(mode) _invoke_args = [conf, cache] if control_driver is not None: _invoke_args.append(control_driver) try: mgr = driver.DriverManager(driver_type, storage_type, invoke_on_load=True, invoke_args=_invoke_args) if conf.profiler.enabled: if ((mode == "control" and conf.profiler.trace_management_store) or (mode == "data" and conf.profiler.trace_message_store)): trace_name = '{0}_{1}_driver'.format(storage_type, mode) return profiler.trace_cls(trace_name, trace_private=True)(mgr.driver) else: return mgr.driver except Exception as exc: LOG.error( _LE('Failed to load "{}" driver for "{}"').format( driver_type, storage_type)) LOG.exception(exc) raise errors.InvalidDriver(exc)
def execute(self, subscription, messages, **kwargs): subscriber = urllib_parse.urlparse(subscription['subscriber']) params = urllib_parse.parse_qs(subscriber.query) params = dict((k.lower(), v) for k, v in params.items()) conf_n = kwargs.get('conf').notification try: for message in messages: p = subprocess.Popen(conf_n.smtp_command.split(' '), stdin=subprocess.PIPE) # Send confirmation email to subscriber. if (message.get('Message_Type') == MessageType.SubscriptionConfirmation.name): content = conf_n.subscription_confirmation_email_template msg = self._make_confirmation_email( content['body'], subscription, message, conf_n) msg["to"] = subscriber.path msg["from"] = content['sender'] msg["subject"] = content['topic'] elif (message.get('Message_Type') == MessageType.UnsubscribeConfirmation.name): content = conf_n.unsubscribe_confirmation_email_template msg = self._make_confirmation_email( content['body'], subscription, message, conf_n) msg["to"] = subscriber.path msg["from"] = content['sender'] msg["subject"] = content['topic'] else: # NOTE(Eva-i): Unfortunately this will add 'queue_name' key # to our original messages(dicts) which will be later # consumed in the storage controller. It seems safe though. message['queue_name'] = subscription['source'] msg = text.MIMEText(json.dumps(message)) msg["to"] = subscriber.path msg["from"] = subscription['options'].get('from', '') subject_opt = subscription['options'].get('subject', '') msg["subject"] = params.get('subject', subject_opt) p.communicate(msg.as_string()) LOG.debug("Send mail successfully: %s", msg.as_string()) except OSError as err: LOG.exception( _LE('Failed to create process for sendmail, ' 'because %s.') % str(err)) except Exception as exc: LOG.exception(_LE('Failed to send email because %s.') % str(exc))
def execute(self, subscription, messages, **kwargs): subscriber = urllib_parse.urlparse(subscription['subscriber']) params = urllib_parse.parse_qs(subscriber.query) params = dict((k.lower(), v) for k, v in params.items()) conf = kwargs.get('conf') try: for message in messages: p = subprocess.Popen(conf.notification.smtp_command.split(' '), stdin=subprocess.PIPE) msg = text.MIMEText(json.dumps(message)) msg["to"] = subscriber.path msg["from"] = subscription['options'].get('from', '') subject_opt = subscription['options'].get('subject', '') msg["subject"] = params.get('subject', subject_opt) p.communicate(msg.as_string()) except OSError as err: LOG.error(_LE('Failed to create process for sendmail, ' 'because %s') % str(err)) except Exception as exc: LOG.exception(_LE('Failed to send email')) LOG.exception(exc)
def on_post(self, req, resp, project_id, queue_name): LOG.debug( u'Pre-Signed URL Creation for queue: %(queue)s, ' u'project: %(project)s', { 'queue': queue_name, 'project': project_id }) try: document = wsgi_utils.deserialize(req.stream, req.content_length) except ValueError as ex: LOG.debug(ex) raise wsgi_errors.HTTPBadRequestAPI(six.text_type(ex)) diff = set(document.keys()) - _KNOWN_KEYS if diff: msg = six.text_type(_LE('Unknown keys: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) key = self._conf.signed_url.secret_key paths = document.pop('paths', None) if not paths: paths = [os.path.join(req.path[:-6], 'messages')] else: diff = set(paths) - _VALID_PATHS if diff: msg = six.text_type(_LE('Invalid paths: %s') % diff) raise wsgi_errors.HTTPBadRequestAPI(msg) paths = [os.path.join(req.path[:-6], path) for path in paths] try: data = urls.create_signed_url(key, paths, project=project_id, **document) except ValueError as err: raise wsgi_errors.HTTPBadRequestAPI(str(err)) resp.body = utils.to_json(data)
def execute(self, subscription, messages, **kwargs): subscriber = urllib_parse.urlparse(subscription['subscriber']) params = urllib_parse.parse_qs(subscriber.query) params = dict((k.lower(), v) for k, v in params.items()) conf = kwargs.get('conf') try: for message in messages: p = subprocess.Popen(conf.notification.smtp_command.split(' '), stdin=subprocess.PIPE) # NOTE(Eva-i): Unfortunately this will add 'queue_name' key to # our original messages(dicts) which will be later consumed in # the storage controller. It seems safe though. message['queue_name'] = subscription['source'] msg = text.MIMEText(json.dumps(message)) msg["to"] = subscriber.path msg["from"] = subscription['options'].get('from', '') subject_opt = subscription['options'].get('subject', '') msg["subject"] = params.get('subject', subject_opt) p.communicate(msg.as_string()) except OSError as err: LOG.exception(_LE('Failed to create process for sendmail, ' 'because %s.') % str(err)) except Exception as exc: LOG.exception(_LE('Failed to send email because %s.') % str(exc))
def load_storage_driver(conf, cache, storage_type=None, control_mode=False, control_driver=None): """Loads a storage driver and returns it. The driver's initializer will be passed conf and cache as its positional args. :param conf: Configuration instance to use for loading the driver. Must include a 'drivers' group. :param cache: Cache instance that the driver can (optionally) use to reduce latency for some operations. :param storage_type: The storage_type to load. If None, then the `drivers` option will be used. :param control_mode: (Default False). Determines which driver type to load; if False, the data driver is loaded. If True, the control driver is loaded. :param control_driver: (Default None). The control driver instance to pass to the storage driver. Needed to access the queue controller, mainly. """ if control_mode: mode = 'control' storage_type = storage_type or conf['drivers'].management_store else: mode = 'data' storage_type = storage_type or conf['drivers'].message_store driver_type = 'zaqar.{0}.storage'.format(mode) _invoke_args = [conf, cache] if control_driver is not None: _invoke_args.append(control_driver) try: mgr = driver.DriverManager(driver_type, storage_type, invoke_on_load=True, invoke_args=_invoke_args) return mgr.driver except Exception as exc: LOG.error(_LE('Failed to load "{}" driver for "{}"').format( driver_type, storage_type)) LOG.exception(exc) raise errors.InvalidDriver(exc)
def post(self, queue_name, messages, client_uuid, project=None): """Send messages to the subscribers.""" if (self.subscription_controller and not isinstance(self.subscription_controller, pooling.SubscriptionController)): subscribers = self.subscription_controller.list(queue_name, project) for sub in next(subscribers): s_type = urllib_parse.urlparse(sub['subscriber']).scheme data_driver = self.subscription_controller.driver mgr = driver.DriverManager('zaqar.notification.tasks', s_type, invoke_on_load=True) self.executor.submit(mgr.driver.execute, sub, messages, conf=data_driver.conf) else: LOG.error(_LE('Failed to get subscription controller.'))
def send_confirm_notification(self, queue, subscription, conf, project=None, expires=None, api_version=None): key = conf.signed_url.secret_key if not key: LOG.error(_LE("Can't send confirm notification due to the value of" " secret_key option is None")) return url = "/%s/queues/%s/subscriptions/%s/confirm" % (api_version, queue, subscription['id']) pre_url = urls.create_signed_url(key, [url], project=project, expires=expires, methods=['PUT']) message_type = MessageType.SubscriptionConfirmation.name messages = {} endpoint_dict = auth.get_public_endpoint() if endpoint_dict: wsgi_endpoint = endpoint_dict.get('zaqar', None) if wsgi_endpoint: wsgi_subscribe_url = urllib_parse.urljoin( wsgi_endpoint, url) messages['WSGISubscribeURL'] = wsgi_subscribe_url websocket_endpoint = endpoint_dict.get('zaqar-websocket', None) if websocket_endpoint: websocket_subscribe_url = urllib_parse.urljoin( websocket_endpoint, url) messages['WebSocketSubscribeURL'] = websocket_subscribe_url messages.update({'Message_Type': message_type, 'Message': 'You have chosen to subscribe to the ' 'queue: %s' % queue, 'URL-Signature': pre_url['signature'], 'URL-Methods': pre_url['methods'][0], 'URL-Paths': pre_url['paths'][0], 'X-Project-ID': pre_url['project'], 'URL-Expires': pre_url['expires'], 'SubscribeBody': {'confirmed': True}, 'UnsubscribeBody': {'confirmed': False}}) s_type = urllib_parse.urlparse(subscription['subscriber']).scheme LOG.info(_LI('Begin to send %(type)s confirm notification. The request' 'body is %(messages)s'), {'type': s_type, 'messages': messages}) self._execute(s_type, subscription, [messages], conf)
def execute(self, subscription, messages, headers=None, **kwargs): if headers is None: headers = {'Content-Type': 'application/json'} headers.update(subscription['options'].get('post_headers', {})) try: for msg in messages: # NOTE(Eva-i): Unfortunately this will add 'queue_name' key to # our original messages(dicts) which will be later consumed in # the storage controller. It seems safe though. msg['queue_name'] = subscription['source'] if 'post_data' in subscription['options']: data = subscription['options']['post_data'] data = data.replace('"$zaqar_message$"', json.dumps(msg)) else: data = json.dumps(msg) requests.post(subscription['subscriber'], data=data, headers=headers) except Exception as e: LOG.exception(_LE('webhook task got exception: %s.') % str(e))
def wrapper(self, *args, **kwargs): # TODO(prashanthr_) : Try to reuse this utility. Violates DRY # Can pass config parameters into the decorator and create a # storage level utility. max_attemps = self.driver.redis_conf.max_reconnect_attempts sleep_sec = self.driver.redis_conf.reconnect_sleep for attempt in range(max_attemps): try: return func(self, *args, **kwargs) except redis.exceptions.ConnectionError: # NOTE(kgriffs): redis-py will retry once itself, # but if the command cannot be sent the second time after # disconnecting and reconnecting, the error is raised # and we will catch it here. # # NOTE(kgriffs): When using a sentinel, if a master fails # the initial retry will gracefully fail over to the # new master if the sentinel failover delay is low enough; # if the delay is too long, then redis-py will get a # MasterNotFoundError (a subclass of ConnectionError) on # it's retry, which will then just get raised and caught # here, in which case we will keep retrying until the # sentinel completes the failover and stops raising # MasterNotFoundError. ex = sys.exc_info()[1] LOG.warning( _LW(u'Caught ConnectionError, retrying the ' 'call to {0}').format(func)) time.sleep(sleep_sec * (2**attempt)) else: LOG.error( _LE(u'Caught ConnectionError, maximum attempts ' 'to {0} exceeded.').format(func)) raise ex
def wrapper(self, *args, **kwargs): # TODO(kgriffs): Figure out a way to not have to rely on the # presence of `mongodb_conf` max_attemps = self.driver.mongodb_conf.max_reconnect_attempts sleep_sec = self.driver.mongodb_conf.reconnect_sleep last_ex = None for attempt in range(max_attemps): try: return func(self, *args, **kwargs) break except errors.AutoReconnect as ex: LOG.warning(_LW(u'Caught AutoReconnect, retrying the ' 'call to {0}').format(func)) last_ex = ex time.sleep(sleep_sec * (2 ** attempt)) else: LOG.error(_LE(u'Caught AutoReconnect, maximum attempts ' 'to {0} exceeded.').format(func)) raise last_ex
def wrapper(self, *args, **kwargs): # TODO(prashanthr_) : Try to reuse this utility. Violates DRY # Can pass config parameters into the decorator and create a # storage level utility. max_attemps = self.driver.redis_conf.max_reconnect_attempts sleep_sec = self.driver.redis_conf.reconnect_sleep for attempt in range(max_attemps): try: return func(self, *args, **kwargs) except redis.exceptions.ConnectionError: # NOTE(kgriffs): redis-py will retry once itself, # but if the command cannot be sent the second time after # disconnecting and reconnecting, the error is raised # and we will catch it here. # # NOTE(kgriffs): When using a sentinel, if a master fails # the initial retry will gracefully fail over to the # new master if the sentinel failover delay is low enough; # if the delay is too long, then redis-py will get a # MasterNotFoundError (a subclass of ConnectionError) on # it's retry, which will then just get raised and caught # here, in which case we will keep retrying until the # sentinel completes the failover and stops raising # MasterNotFoundError. ex = sys.exc_info()[1] LOG.warning(_LW(u'Caught ConnectionError, retrying the ' 'call to {0}').format(func)) time.sleep(sleep_sec * (2 ** attempt)) else: LOG.error(_LE(u'Caught ConnectionError, maximum attempts ' 'to {0} exceeded.').format(func)) raise ex
def send_confirm_notification(self, queue, subscription, conf, project=None, expires=None, api_version=None, is_unsubscribed=False): # NOTE(flwang): If the confirmation feature isn't enabled, just do # nothing. Here we're getting the require_confirmation from conf # object instead of using self.require_confirmation, because the # variable from self object really depends on the kwargs when # initializing the NotifierDriver object. See bug 1655812 for more # information. if not conf.notification.require_confirmation: return key = conf.signed_url.secret_key if not key: LOG.error( _LE("Can't send confirm notification due to the value of" " secret_key option is None")) return url = "/%s/queues/%s/subscriptions/%s/confirm" % (api_version, queue, subscription['id']) pre_url = urls.create_signed_url(key, [url], project=project, expires=expires, methods=['PUT']) message = None if is_unsubscribed: message_type = MessageType.UnsubscribeConfirmation.name message = ('You have unsubscribed successfully to the queue: %s, ' 'you can resubscribe it by using confirmed=True.' % queue) else: message_type = MessageType.SubscriptionConfirmation.name message = 'You have chosen to subscribe to the queue: %s' % queue messages = {} endpoint_dict = auth.get_public_endpoint() if endpoint_dict: wsgi_endpoint = endpoint_dict.get('zaqar', None) if wsgi_endpoint: wsgi_subscribe_url = urllib_parse.urljoin(wsgi_endpoint, url) messages['WSGISubscribeURL'] = wsgi_subscribe_url websocket_endpoint = endpoint_dict.get('zaqar-websocket', None) if websocket_endpoint: websocket_subscribe_url = urllib_parse.urljoin( websocket_endpoint, url) messages['WebSocketSubscribeURL'] = websocket_subscribe_url messages.update({ 'Message_Type': message_type, 'Message': message, 'URL-Signature': pre_url['signature'], 'URL-Methods': pre_url['methods'][0], 'URL-Paths': pre_url['paths'][0], 'X-Project-ID': pre_url['project'], 'URL-Expires': pre_url['expires'], 'SubscribeBody': { 'confirmed': True }, 'UnsubscribeBody': { 'confirmed': False } }) s_type = urllib_parse.urlparse(subscription['subscriber']).scheme LOG.info( _LI('Begin to send %(type)s confirm/unsubscribe notification.' ' The request body is %(messages)s'), { 'type': s_type, 'messages': messages }) self._execute(s_type, subscription, [messages], conf)
def create_signed_url(key, paths, project=None, expires=None, methods=None): """Creates a signed url for the specified path This function will create a pre-signed URL for `path` using the specified `options` or the default ones. The signature will be the hex value of the hmac created using `key` :param key: A string to use as a `key` for the hmac generation. :param paths: A list of strings representing URL paths. :param project: (Default None) The ID of the project this URL belongs to. :param methods: (Default ['GET']) A list of methods that will be supported by the generated URL. :params expires: (Default time() + 86400) The expiration date for the generated URL. """ methods = methods or ['GET'] if key is None: raise ValueError(_LE('The `key` can\'t be None')) if not isinstance(paths, list) or not paths: raise ValueError(_LE('`paths` must be a non-empty list')) if not isinstance(methods, list): raise ValueError(_LE('`methods` should be a list')) # NOTE(flaper87): The default expiration time is 1day # Evaluate whether this should be configurable. We may # also want to have a "maximum" expiration time. Food # for thoughts. if expires is not None: # NOTE(flaper87): Verify if the format is correct # and normalize the value to UTC. check_expires = None try: check_expires = int(expires) except ValueError: pass if check_expires: raise ValueError(_LE('`expires` should be date format, ' 'for example 2016-01-01T00:00:00, ' 'not integer value: %s') % check_expires) parsed = timeutils.parse_isotime(expires) expires = timeutils.normalize_time(parsed) else: delta = datetime.timedelta(days=1) expires = timeutils.utcnow() + delta if expires <= timeutils.utcnow(): raise ValueError(_LE('`expires` is lower than the current time')) methods = sorted(methods) paths = sorted(paths) expires_str = expires.strftime(_DATE_FORMAT) hmac_body = six.b(r'%(paths)s\n%(methods)s\n%(project)s\n%(expires)s' % {'paths': ','.join(paths), 'methods': ','.join(methods), 'project': project, 'expires': expires_str}) if not isinstance(key, six.binary_type): key = six.binary_type(key.encode('utf-8')) return {'paths': paths, 'methods': methods, 'project': project, 'expires': expires_str, 'signature': hmac.new(key, hmac_body, hashlib.sha256).hexdigest()}
def create_signed_url(key, paths, project=None, expires=None, methods=None): """Creates a signed url for the specified path This function will create a pre-signed URL for `path` using the specified `options` or the default ones. The signature will be the hex value of the hmac created using `key` :param key: A string to use as a `key` for the hmac generation. :param paths: A list of strings representing URL paths. :param project: (Default None) The ID of the project this URL belongs to. :param methods: (Default ['GET']) A list of methods that will be supported by the generated URL. :params expires: (Default time() + 86400) The expiration date for the generated URL. """ methods = methods or ['GET'] if key is None: raise ValueError(_LE('The `key` can\'t be None')) if not isinstance(paths, list) or not paths: raise ValueError(_LE('`paths` must be a non-empty list')) if not isinstance(methods, list): raise ValueError(_LE('`methods` should be a list')) # NOTE(flaper87): The default expiration time is 1day # Evaluate whether this should be configurable. We may # also want to have a "maximum" expiration time. Food # for thoughts. if expires is not None: # NOTE(flaper87): Verify if the format is correct # and normalize the value to UTC. check_expires = None try: check_expires = int(expires) except ValueError: pass if check_expires: raise ValueError( _LE('`expires` should be date format, ' 'for example 2016-01-01T00:00:00, ' 'not integer value: %s') % check_expires) parsed = timeutils.parse_isotime(expires) expires = timeutils.normalize_time(parsed) else: delta = datetime.timedelta(days=1) expires = timeutils.utcnow() + delta if expires <= timeutils.utcnow(): raise ValueError(_LE('`expires` is lower than the current time')) methods = sorted(methods) paths = sorted(paths) expires_str = expires.strftime(_DATE_FORMAT) hmac_body = six.b( r'%(paths)s\n%(methods)s\n%(project)s\n%(expires)s' % { 'paths': ','.join(paths), 'methods': ','.join(methods), 'project': project, 'expires': expires_str }) if not isinstance(key, six.binary_type): key = six.binary_type(key.encode('utf-8')) return { 'paths': paths, 'methods': methods, 'project': project, 'expires': expires_str, 'signature': hmac.new(key, hmac_body, hashlib.sha256).hexdigest() }
def send_confirm_notification(self, queue, subscription, conf, project=None, expires=None, api_version=None): key = conf.signed_url.secret_key if not key: LOG.error( _LE("Can't send confirm notification due to the value of" " secret_key option is None")) return url = "/%s/queues/%s/subscriptions/%s/confirm" % (api_version, queue, subscription['id']) pre_url = urls.create_signed_url(key, [url], project=project, expires=expires, methods=['PUT']) message_type = MessageType.SubscriptionConfirmation.name messages = {} endpoint_dict = auth.get_public_endpoint() if endpoint_dict: wsgi_endpoint = endpoint_dict.get('zaqar', None) if wsgi_endpoint: wsgi_subscribe_url = urllib_parse.urljoin(wsgi_endpoint, url) messages['WSGISubscribeURL'] = wsgi_subscribe_url websocket_endpoint = endpoint_dict.get('zaqar-websocket', None) if websocket_endpoint: websocket_subscribe_url = urllib_parse.urljoin( websocket_endpoint, url) messages['WebSocketSubscribeURL'] = websocket_subscribe_url messages.update({ 'Message_Type': message_type, 'Message': 'You have chosen to subscribe to the ' 'queue: %s' % queue, 'URL-Signature': pre_url['signature'], 'URL-Methods': pre_url['methods'][0], 'URL-Paths': pre_url['paths'][0], 'X-Project-ID': pre_url['project'], 'URL-Expires': pre_url['expires'], 'SubscribeBody': { 'confirmed': True }, 'UnsubscribeBody': { 'confirmed': False } }) s_type = urllib_parse.urlparse(subscription['subscriber']).scheme LOG.info( _LI('Begin to send %(type)s confirm notification. The request' 'body is %(messages)s'), { 'type': s_type, 'messages': messages }) self._execute(s_type, subscription, [messages], conf)