def _index_query(self, order=None, offset=0, limit=10, tree=False, depth=10, **kwargs): """Query a list of categories""" if asbool(tree): query = Category.query.roots() else: query = Category.query if not order: order = 'id asc' query = query.order_by(get_order_by(order, order_columns)) start = int(offset) limit = min(int(limit), int(app_globals.settings['api_media_max_results'])) depth = min(int(depth), int(app_globals.settings['api_tree_max_depth'])) # get the total of all the matches count = query.count() query = query.offset(start).limit(limit) categories = self._expand(query.all(), asbool(tree), depth) return dict( categories=categories, count=count, )
def raw_send_email(sender, recipients, message): if isinstance(recipients, basestring): recipients = [recipients] hold_emails = asbool(config.get('hold_emails', False)) force_emails_to = aslist(os.environ.get("ututi_force_emails_to", []), ',', strip=True) force_emails_to = [e for e in force_emails_to if e] if hold_emails and force_emails_to: recipients = [address for address in recipients if address in force_emails_to] if recipients: hold_emails = False log.debug("Recipients: %r" % recipients) log.debug("Hold emails: %r" % asbool(config.get('hold_emails', False))) # Send the message via SMTP to localhost:25 if not hold_emails: # send the email if we are not told to hold it server = config.get('smtp_host', 'localhost') smtp = SMTP(server) try: smtp.sendmail(config['ututi_email_from'], recipients, message) except SMTPRecipientsRefused: log.warn(sender) log.warn(recipients) log.warn(repr(message)) finally: smtp.quit() else: mail_queue.append(EmailInfo(sender, recipients, message))
def convert(self, param, param_name, value, has_simple_io_config, date_time_format=None): try: if any(param_name.startswith(prefix) for prefix in self.bool_parameter_prefixes) or isinstance(param, Boolean): value = asbool(value or None) # value can be an empty string and asbool chokes on that if value and value is not None: # Can be a 0 if isinstance(param, Boolean): value = asbool(value) elif isinstance(param, CSV): value = value.split(',') elif isinstance(param, Integer): value = int(value) elif isinstance(param, Unicode): value = unicode(value) elif isinstance(param, UTC): value = value.replace('+00:00', '') else: if value and value != ZATO_NONE and has_simple_io_config: if any(param_name==elem for elem in self.int_parameters) or \ any(param_name.endswith(suffix) for suffix in self.int_parameter_suffixes): value = int(value) if date_time_format and isinstance(value, datetime): value = value.strftime(date_time_format) if isinstance(param, CSV) and not value: value = [] return value except Exception, e: msg = 'Conversion error, param:[{}], param_name:[{}], repr(value):[{}], e:[{}]'.format( param, param_name, repr(value), format_exc(e)) logger.error(msg) raise ZatoException(msg=msg)
def _index_query(self, order=None, offset=0, limit=10, tree=False, depth=10, **kwargs): """Query a list of categories""" if asbool(tree): query = Category.query.roots() else: query = Category.query if not order: order = 'id asc' query = query.order_by(get_order_by(order, order_columns)) start = int(offset) limit = min(int(limit), int(app_globals.settings['api_media_max_results'])) depth = min(int(depth), int(app_globals.settings['api_tree_max_depth'])) # get the total of all the matches count = query.count() query = query.offset(start).limit(limit) categories = self._expand(query.all(), asbool(tree), depth) return dict( categories = categories, count = count, )
def set_up_pickup(self): empty = [] # Fix up booleans and paths for stanza, stanza_config in self.pickup_config.items(): # user_config_items is empty by default if not stanza_config: empty.append(stanza) continue stanza_config.read_on_pickup = asbool(stanza_config.get('read_on_pickup', True)) stanza_config.parse_on_pickup = asbool(stanza_config.get('parse_on_pickup', True)) stanza_config.delete_after_pickup = asbool(stanza_config.get('delete_after_pickup', True)) stanza_config.case_insensitive = asbool(stanza_config.get('case_insensitive', True)) stanza_config.pickup_from = absolutize(stanza_config.pickup_from, self.base_dir) stanza_config.is_service_hot_deploy = False mpt = stanza_config.get('move_processed_to') stanza_config.move_processed_to = absolutize(mpt, self.base_dir) if mpt else None services = stanza_config.get('services') or [] stanza_config.services = [services] if not isinstance(services, list) else services topics = stanza_config.get('topics') or [] stanza_config.topics = [topics] if not isinstance(topics, list) else topics flags = globre.EXACT if stanza_config.case_insensitive: flags |= IGNORECASE patterns = stanza_config.patterns stanza_config.patterns = [patterns] if not isinstance(patterns, list) else patterns stanza_config.patterns = [globre.compile(elem, flags) for elem in stanza_config.patterns] if not os.path.exists(stanza_config.pickup_from): logger.warn('Pickup dir `%s` does not exist (%s)', stanza_config.pickup_from, stanza) for item in empty: del self.pickup_config[item] # Ok, now that we have configured everything that pickup.conf had # we still need to make it aware of services and how to pick them up from FS. stanza = 'zato_internal_service_hot_deploy' stanza_config = Bunch({ 'pickup_from': self.hot_deploy_config.pickup_dir, 'patterns': [globre.compile('*.py', globre.EXACT | IGNORECASE)], 'read_on_pickup': False, 'parse_on_pickup': False, 'delete_after_pickup': self.hot_deploy_config.delete_after_pickup, 'is_service_hot_deploy': True, }) self.pickup_config[stanza] = stanza_config self.pickup = PickupManager(self, self.pickup_config) spawn_greenlet(self.pickup.run)
def convert(self, param, param_name, value, has_simple_io_config, is_xml, date_time_format=None): try: if any(param_name.startswith(prefix) for prefix in self.bool_parameter_prefixes) or isinstance(param, Boolean): value = asbool(value or None) # value can be an empty string and asbool chokes on that if value and value is not None: # Can be a 0 if isinstance(param, Boolean): value = asbool(value) elif isinstance(param, CSV): value = value.split(',') elif isinstance(param, List): if is_xml: # We are parsing XML to create a SIO request if isinstance(value, EtreeElement): return [elem.text for elem in value.getchildren()] # We are producing XML out of an SIO response else: wrapper = Element(param_name) for item_value in value: xml_item = Element('item') wrapper.append(xml_item) wrapper.item[-1] = item_value return wrapper # This is a JSON list return value elif isinstance(param, Integer): value = int(value) elif isinstance(param, Unicode): value = unicode(value) elif isinstance(param, UTC): value = value.replace('+00:00', '') else: if value and value != ZATO_NONE and has_simple_io_config: if any(param_name==elem for elem in self.int_parameters) or \ any(param_name.endswith(suffix) for suffix in self.int_parameter_suffixes): value = int(value) if date_time_format and isinstance(value, datetime): value = value.strftime(date_time_format) if isinstance(param, CSV) and not value: value = [] return value except Exception, e: msg = 'Conversion error, param:[{}], param_name:[{}], repr(value):[{}], e:[{}]'.format( param, param_name, repr(value), format_exc(e)) logger.error(msg) raise ZatoException(msg=msg)
def serialize(dataset): fix_map_attributes(dataset) fig = Figure(figsize=figsize, dpi=dpi) ax = fig.add_axes([0.0, 0.0, 1.0, 1.0]) # Set transparent background; found through http://sparkplot.org/browser/sparkplot.py. if asbool(query.get('TRANSPARENT', 'true')): fig.figurePatch.set_alpha(0.0) ax.axesPatch.set_alpha(0.0) # Plot requested grids (or all if none requested). layers = [layer for layer in query.get('LAYERS', '').split(',') if layer] or [var.id for var in walk(dataset, GridType)] for layer in layers: names = [dataset] + layer.split('.') grid = reduce(operator.getitem, names) if is_valid(grid, dataset): self._plot_grid(dataset, grid, time, bbox, (w, h), ax, cmap) # Save to buffer. ax.axis( [bbox[0], bbox[2], bbox[1], bbox[3]] ) ax.axis('off') canvas = FigureCanvas(fig) output = StringIO() # Optionally convert to paletted png paletted = asbool(environ.get('pydap.responses.wms.paletted', 'false')) if paletted: # Read image buf, size = canvas.print_to_buffer() im = Image.frombuffer('RGBA', size, buf, 'raw', 'RGBA', 0, 1) # Find number of colors colors = im.getcolors(256) # Only convert if the number of colors is less than 256 if colors is not None: ncolors = len(colors) # Get alpha band alpha = im.split()[-1] # Convert to paletted image im = im.convert("RGB") im = im.convert("P", palette=Image.ADAPTIVE, colors=ncolors) # Set all pixel values below ncolors to 1 and the rest to 0 mask = Image.eval(alpha, lambda a: 255 if a <=128 else 0) # Paste the color of index ncolors and use alpha as a mask im.paste(ncolors, mask) # Truncate palette to actual size to save space im.palette.palette = im.palette.palette[:3*(ncolors+1)] im.save(output, 'png', optimize=False, transparency=ncolors) else: canvas.print_png(output) else: canvas.print_png(output) if hasattr(dataset, 'close'): dataset.close() return [ output.getvalue() ]
def __init__(self, application, global_conf=None, debug=NoDefault, error_email=None, error_log=None, show_exceptions_in_wsgi_errors=NoDefault, from_address=None, smtp_server=None, smtp_username=None, smtp_password=None, smtp_use_tls=False, error_subject_prefix=None, error_message=None, xmlhttp_key=None): from paste.util import converters self.application = application # @@: global_conf should be handled elsewhere in a separate # function for the entry point if global_conf is None: global_conf = {} if debug is NoDefault: debug = converters.asbool(global_conf.get('debug')) if show_exceptions_in_wsgi_errors is NoDefault: show_exceptions_in_wsgi_errors = converters.asbool( global_conf.get('show_exceptions_in_wsgi_errors')) self.debug_mode = converters.asbool(debug) if error_email is None: error_email = (global_conf.get('error_email') or global_conf.get('admin_email') or global_conf.get('webmaster_email') or global_conf.get('sysadmin_email')) self.error_email = converters.aslist(error_email) self.error_log = error_log self.show_exceptions_in_wsgi_errors = show_exceptions_in_wsgi_errors if from_address is None: from_address = global_conf.get('error_from_address', 'errors@localhost') self.from_address = from_address if smtp_server is None: smtp_server = global_conf.get('smtp_server', 'localhost') self.smtp_server = smtp_server self.smtp_username = smtp_username or global_conf.get('smtp_username') self.smtp_password = smtp_password or global_conf.get('smtp_password') self.smtp_use_tls = smtp_use_tls or converters.asbool( global_conf.get('smtp_use_tls')) self.error_subject_prefix = error_subject_prefix or '' if error_message is None: error_message = global_conf.get('error_message') self.error_message = error_message if xmlhttp_key is None: xmlhttp_key = global_conf.get('xmlhttp_key', '_') self.xmlhttp_key = xmlhttp_key
def handle(self): with closing(self.odb.session()) as session: payload_req = self.request.payload item = session.query(HTTSOAPAudit).filter_by(cid=payload_req['cid']).one() item.invoke_ok = asbool(payload_req['invoke_ok']) item.auth_ok = asbool(payload_req['auth_ok']) item.resp_time = parse(payload_req['resp_time']) item.resp_headers = payload_req['resp_headers'].encode('utf-8') item.resp_payload = payload_req['resp_payload'].encode('utf-8') session.add(item) session.commit()
def __init__( self, application, global_conf=None, debug=NoDefault, error_email=None, error_log=None, show_exceptions_in_wsgi_errors=NoDefault, from_address=None, smtp_server=None, error_subject_prefix=None, error_message=None, xmlhttp_key=None, ): from paste.util import converters self.application = application # @@: global_conf should be handled elsewhere in a separate # function for the entry point if global_conf is None: global_conf = {} if debug is NoDefault: debug = converters.asbool(global_conf.get("debug")) if show_exceptions_in_wsgi_errors is NoDefault: show_exceptions_in_wsgi_errors = converters.asbool(global_conf.get("show_exceptions_in_wsgi_errors")) self.debug_mode = converters.asbool(debug) if error_email is None: error_email = ( global_conf.get("error_email") or global_conf.get("admin_email") or global_conf.get("webmaster_email") or global_conf.get("sysadmin_email") ) self.error_email = converters.aslist(error_email) self.error_log = error_log self.show_exceptions_in_wsgi_errors = show_exceptions_in_wsgi_errors if from_address is None: from_address = global_conf.get("error_from_address", "errors@localhost") self.from_address = from_address if smtp_server is None: smtp_server = global_conf.get("smtp_server", "localhost") self.smtp_server = smtp_server self.error_subject_prefix = error_subject_prefix or "" if error_message is None: error_message = global_conf.get("error_message") self.error_message = error_message if xmlhttp_key is None: xmlhttp_key = global_conf.get("xmlhttp_key", "_") self.xmlhttp_key = xmlhttp_key
def __init__(self, application, global_conf=None, debug=NoDefault, error_email=None, error_log=None, show_exceptions_in_wsgi_errors=NoDefault, from_address=None, smtp_server=None, smtp_username=None, smtp_password=None, smtp_use_tls=False, error_subject_prefix=None, error_message=None, xmlhttp_key=None): from paste.util import converters self.application = application # @@: global_conf should be handled elsewhere in a separate # function for the entry point if global_conf is None: global_conf = {} if debug is NoDefault: debug = converters.asbool(global_conf.get('debug')) if show_exceptions_in_wsgi_errors is NoDefault: show_exceptions_in_wsgi_errors = converters.asbool(global_conf.get('show_exceptions_in_wsgi_errors')) self.debug_mode = converters.asbool(debug) if error_email is None: error_email = (global_conf.get('error_email') or global_conf.get('admin_email') or global_conf.get('webmaster_email') or global_conf.get('sysadmin_email')) self.error_email = converters.aslist(error_email) self.error_log = error_log self.show_exceptions_in_wsgi_errors = show_exceptions_in_wsgi_errors if from_address is None: from_address = global_conf.get('error_from_address', 'errors@localhost') self.from_address = from_address if smtp_server is None: smtp_server = global_conf.get('smtp_server', 'localhost') self.smtp_server = smtp_server self.smtp_username = smtp_username or global_conf.get('smtp_username') self.smtp_password = smtp_password or global_conf.get('smtp_password') self.smtp_use_tls = smtp_use_tls or converters.asbool(global_conf.get('smtp_use_tls')) self.error_subject_prefix = error_subject_prefix or '' if error_message is None: error_message = global_conf.get('error_message') self.error_message = error_message if xmlhttp_key is None: xmlhttp_key = global_conf.get('xmlhttp_key', '_') self.xmlhttp_key = xmlhttp_key
def files(self, id=None, slug=None, secret_key=None, **kwargs): """List all files related to specific media. :param id: A :attr:`mediacore.model.media.Media.id` for lookup :type id: int :param slug: A :attr:`mediacore.model.media.Media.slug` for lookup :type slug: str :param api_key: The api access key if required in settings :type api_key: unicode or None :raises webob.exc.HTTPNotFound: If the media doesn't exist. :returns: JSON dict """ if asbool(app_globals.settings['api_secret_key_required']) \ and secret_key != app_globals.settings['api_secret_key']: return dict(error='Authentication Error') query = Media.query.published() if id: query = query.filter_by(id=id) else: query = query.filter_by(slug=slug) try: media = query.one() except orm.exc.NoResultFound: return dict(error='No match found') return dict( files = [self._file_info(f, media) for f in media.files], )
def index(self, order=None, offset=0, limit=10, api_key=None, **kwargs): """Query for a flat list of categories. :param id: An :attr:`id <mediacore.model.media.Category.id>` for lookup :type id: int :param name: A :attr:`name <mediacore.model.media.Category.name>` for lookup :type name: str :param slug: A :attr:`slug <mediacore.model.media.Category.slug>` for lookup :type slug: str :param order: A column name and 'asc' or 'desc', seperated by a space. The column name can be any one of the returned columns. Defaults to newest category first (id desc). :type order: str :param offset: Where in the complete resultset to start returning results. Defaults to 0, the very beginning. This is useful if you've already fetched the first 50 results and want to fetch the next 50 and so on. :type offset: int :param limit: Number of results to return in each query. Defaults to 10. The maximum allowed value defaults to 50 and is set via :attr:`app_globals.settings['api_media_max_results']`. :type limit: int :param api_key: The api access key if required in settings :type api_key: unicode or None :rtype: JSON-ready dict :returns: The returned dict has the following fields: count (int) The total number of results that match this query. categories (list of dicts) A list of **category_info** dicts, as generated by the :meth:`_info <mediacore.controllers.api.categories.CategoriesController._info>` method. The number of dicts in this list will be the lesser of the number of matched items and the requested limit. """ if asbool(app_globals.settings['api_secret_key_required']) \ and api_key != app_globals.settings['api_secret_key']: return dict(error='Authentication Error') if any(key in kwargs for key in ('id', 'slug', 'name')): kwargs['offset'] = offset kwargs['limit'] = limit kwargs['tree'] = False return self._get_query(**kwargs) return self._index_query(order, offset, limit, tree=False)
def init(self, *ignored_args, **ignored_kwargs): self.cfg.set('post_fork', self.zato_wsgi_app.post_fork) # Initializes a worker self.cfg.set('on_starting', self.zato_wsgi_app.on_starting) # Generates the deployment key self.cfg.set('worker_exit', self.zato_wsgi_app.worker_exit) # Cleans up after the worker for k, v in self.config_main.items(): if k.startswith('gunicorn') and v: k = k.replace('gunicorn_', '') if k == 'bind': if not ':' in v: raise ValueError('No port found in main.gunicorn_bind `{}`, such as `{}:17010`'.format(v)) else: host, port = v.split(':') self.zato_host = host self.zato_port = port self.cfg.set(k, v) else: if 'deployment_lock' in k: v = int(v) self.zato_config[k] = v for name in('deployment_lock_expires', 'deployment_lock_timeout'): setattr(self.zato_wsgi_app, name, self.zato_config[name]) if asbool(self.crypto_config.use_tls): self.cfg.set('ssl_version', getattr(ssl, 'PROTOCOL_{}'.format(self.crypto_config.tls_protocol))) self.cfg.set('ciphers', self.crypto_config.tls_ciphers) self.cfg.set('cert_reqs', getattr(ssl, 'CERT_{}'.format(self.crypto_config.tls_client_certs.upper()))) self.cfg.set('ca_certs', absjoin(self.repo_location, self.crypto_config.ca_certs_location)) self.cfg.set('keyfile', absjoin(self.repo_location, self.crypto_config.priv_key_location)) self.cfg.set('certfile', absjoin(self.repo_location, self.crypto_config.cert_location)) self.cfg.set('do_handshake_on_connect', True) self.zato_wsgi_app.has_gevent = 'gevent' in self.cfg.settings['worker_class'].value
def add_startup_jobs(self): sleep( 40 ) # To make sure that at least one server is running if the environment was started from quickstart scripts cluster_conf = self.config.main.cluster add_startup_jobs(cluster_conf.id, self.odb, self.startup_jobs, asbool(cluster_conf.stats_enabled))
def slow_response_details(req, cid, service_name): item = None service = _get_service(req, service_name) pretty_print = asbool(req.GET.get('pretty_print')) input_dict = { 'cid': cid, 'name': service_name, } response = req.zato.client.invoke('zato.service.slow-response.get', input_dict) if response.has_data: cid = response.data.cid if cid != ZATO_NONE: item = SlowResponse() item.cid = response.data.cid item.req_ts = from_utc_to_user(response.data.req_ts + '+00:00', req.zato.user_profile) item.resp_ts = from_utc_to_user(response.data.resp_ts + '+00:00', req.zato.user_profile) item.proc_time = response.data.proc_time item.service_name = service_name item.threshold = service.slow_threshold for name in ('req', 'resp'): value = getattr(response.data, name) if value: if isinstance(value, dict): value = dumps(value) data_format = 'json' else: data_format = known_data_format(value) if data_format: if pretty_print: value = get_pretty_print(value, data_format) attr_name = name + '_html' setattr( item, attr_name, highlight(value, data_format_lexer[data_format](), HtmlFormatter(linenos='table'))) else: # Regular raw value setattr(item, name, value) # We do not have an HTML version but we need to populate it anyway for pretty-print toggling setattr(item, name + '_html', value) return_data = { 'cluster_id': req.zato.cluster_id, 'service': service, 'item': item, 'pretty_print': not pretty_print, } return TemplateResponse(req, 'zato/service/slow-response-details.html', return_data)
def init(self, *ignored_args, **ignored_kwargs): self.cfg.set('post_fork', self.zato_wsgi_app.post_fork) # Initializes a worker self.cfg.set( 'on_starting', self.zato_wsgi_app.on_starting) # Generates the deployment key self.cfg.set( 'worker_exit', self.zato_wsgi_app.worker_exit) # Cleans up after the worker for k, v in self.config_main.items(): if k.startswith('gunicorn') and v: k = k.replace('gunicorn_', '') if k == 'bind': if not ':' in v: raise ValueError( 'No port found in main.gunicorn_bind [{v}]; a proper value is, for instance, [{v}:17010]' .format(v=v)) else: host, port = v.split(':') self.zato_host = host self.zato_port = port self.cfg.set(k, v) else: if 'deployment_lock' in k: v = int(v) self.zato_config[k] = v for name in ('deployment_lock_expires', 'deployment_lock_timeout'): setattr(self.zato_wsgi_app, name, self.zato_config[name]) # TLS is new in 2.0 and we need to assume it's not enabled. In Zato 2.1 or later # this will be changed to assume that we are always over TLS by default. if asbool(self.crypto_config.get('use_tls', False)): self.cfg.set( 'ssl_version', getattr(ssl, 'PROTOCOL_{}'.format(self.crypto_config.tls_protocol))) self.cfg.set('ciphers', self.crypto_config.tls_ciphers) self.cfg.set( 'cert_reqs', getattr( ssl, 'CERT_{}'.format( self.crypto_config.tls_client_certs.upper()))) self.cfg.set( 'ca_certs', absjoin(self.repo_location, self.crypto_config.ca_certs_location)) self.cfg.set( 'keyfile', absjoin(self.repo_location, self.crypto_config.priv_key_location)) self.cfg.set( 'certfile', absjoin(self.repo_location, self.crypto_config.cert_location)) self.cfg.set('do_handshake_on_connect', True) self.zato_wsgi_app.has_gevent = 'gevent' in self.cfg.settings[ 'worker_class'].value
def tree(self, depth=10, secret_key=None, **kwargs): """Query for an expanded tree of categories. :param id: A :attr:`mediacore.model.media.Category.id` to lookup the parent node :type id: int :param name: A :attr:`mediacore.model.media.Category.name` to lookup the parent node :type name: str :param slug: A :attr:`mediacore.model.media.Category.slug` to lookup the parent node :type slug: str :param depth: Number of level deep in children to expand. Defaults to 10. The maximum allowed value defaults to 10 and is set via :attr:`app_globals.settings['api_tree_max_depth']`. :type limit: int :param api_key: The api access key if required in settings :type api_key: unicode or None :returns: JSON dict """ if asbool(app_globals.settings['api_secret_key_required']) \ and secret_key != app_globals.settings['api_secret_key']: return dict(error='Authentication Error') if any(key in kwargs for key in ('id', 'slug', 'name')): kwargs['depth'] = depth kwargs['tree'] = True return self._get_query(**kwargs) return self._index_query(depth=depth, tree=True)
def details(req, source_type, cluster_id, msg_id, topic_name): item = None pretty_print = asbool(req.GET.get('pretty_print')) input_dict = { 'cluster_id': cluster_id, 'msg_id': msg_id, } response = req.zato.client.invoke('zato.pubsub.message.get', input_dict) if response.has_data: item = Message() for name in('topic', 'producer', 'priority', 'mime_type', 'expiration', 'creation_time_utc', 'expire_at_utc', 'payload'): setattr(item, name, getattr(response.data, name, None)) item.creation_time = from_utc_to_user(item.creation_time_utc+'+00:00', req.zato.user_profile) item.expire_at = from_utc_to_user(item.expire_at_utc+'+00:00', req.zato.user_profile) return_data = { 'cluster_id': req.zato.cluster_id, 'item': item, 'pretty_print': not pretty_print, 'msg_id': msg_id, 'topic_name': topic_name, 'source_type': source_type, 'sub_key': req.GET.get('sub_key') } return TemplateResponse(req, 'zato/pubsub/message/details.html', return_data)
def read_config(self, config): self.config = config order = config.get('order', FALSE_TRUE) self.order1, self.order2 = (True, False) if order == TRUE_FALSE else (False, True) for key, value in config.items(): # Ignore meta key(s) if key == 'order': continue value = asbool(value) # Add new items self.items[value].append(key) # Now sort everything lexicographically, the way it will be used in run-time for key in self.items: self.items[key] = list(reversed(sorted(self.items[key]))) for empty, non_empty in ((True, False), (False, True)): if not self.items[empty] and '*' in self.items[non_empty]: self.special_case = non_empty break
def init(self): config = {} has_sentinel = asbool(self.config.get('use_redis_sentinels', False)) if has_sentinel: sentinels = self._parse_sentinels(self.config.get('redis_sentinels')) if not sentinels: raise ValueError('kvdb.redis_sentinels must be provided') sentinel_master = self.config.get('redis_sentinels_master', None) if not sentinel_master: raise ValueError('kvdb.redis_sentinels_master must be provided') config['sentinels'] = sentinels config['sentinel_master'] = sentinel_master else: if self.config.get('host'): config['host'] = self.config.host if self.config.get('port'): config['port'] = int(self.config.port) if self.config.get('unix_socket_path'): config['unix_socket_path'] = self.config.unix_socket_path if self.config.get('db'): config['db'] = int(self.config.db) if self.config.get('password'): config['password'] = self.decrypt_func(self.config.password) if self.config.get('socket_timeout'): config['socket_timeout'] = float(self.config.socket_timeout) if self.config.get('connection_pool'): split = self.config.connection_pool.split('.') module, class_name = split[:-1], split[-1] mod = import_module(module) config['connection_pool'] = getattr(mod, class_name) if self.config.get('charset'): config['charset'] = self.config.charset if self.config.get('errors'): config['errors'] = self.config.errors self.conn_class = self._get_connection_class(has_sentinel) if has_sentinel: instance = self.conn_class(config['sentinels'], config.get('password'), config.get('socket_timeout')) self.conn = instance.master_for(config['sentinel_master']) else: self.conn = self.conn_class(**config) self.lua_container.kvdb = self.conn
def validate_input(self): if not asbool(self.server.fs_server_config.apispec.pub_enabled): # Note that we are using the same format that regular 404 does raise NotFound( self.cid, '[{}] Unknown URL:[{}] or SOAP action:[]'.format( self.cid, self.wsgi_environ['zato.channel_item']['url_path']))
def require_api_key_if_necessary(func, *args, **kwargs): api_key = kwargs.get('api_key') if asbool(request.settings['api_secret_key_required']) \ and api_key != request.settings['api_secret_key']: return dict(error='Authentication Error') return func(*args, **kwargs)
def convert_config(config): result = {} for key, value in config.items(): if key in BOOL_CONFIG: result[key] = asbool(value) else: result[key] = value return result
def get_data(self, session): return elems_with_opaque( self._search( http_soap_list, session, self.request.input.cluster_id, self.request.input.connection, self.request.input.transport, asbool( self.server.fs_server_config.misc.return_internal_objects), False))
def convert(self, param, param_name, value, has_simple_io_config, date_time_format=None): try: if any( param_name.startswith(prefix) for prefix in self.bool_parameter_prefixes) or isinstance( param, Boolean): value = asbool( value or None ) # value can be an empty string and asbool chokes on that if value and value is not None: # Can be a 0 if isinstance(param, Boolean): value = asbool(value) elif isinstance(param, CSV): value = value.split(',') elif isinstance(param, Integer): value = int(value) elif isinstance(param, Unicode): value = unicode(value) elif isinstance(param, UTC): value = value.replace('+00:00', '') else: if value and value != ZATO_NONE and has_simple_io_config: if any(param_name==elem for elem in self.int_parameters) or \ any(param_name.endswith(suffix) for suffix in self.int_parameter_suffixes): value = int(value) if date_time_format and isinstance(value, datetime): value = value.strftime(date_time_format) if isinstance(param, CSV) and not value: value = [] return value except Exception, e: msg = 'Conversion error, param:[{}], param_name:[{}], repr(value):[{}], e:[{}]'.format( param, param_name, repr(value), format_exc(e)) logger.error(msg) raise ZatoException(msg=msg)
def request_response(req, service_name): service = Service(name=service_name) pretty_print = asbool(req.GET.get('pretty_print')) input_dict = {'name': service_name, 'cluster_id': req.zato.cluster_id} service_response = req.zato.client.invoke( 'zato.service.get-request-response', input_dict) if service_response.ok: request = b64decode(service_response.data.sample_req if service_response.data.sample_req else '') request = request.decode('utf8') request_data_format = known_data_format(request) if request_data_format: if pretty_print: request = get_pretty_print(request, request_data_format) service.sample_req_html = highlight( request, data_format_lexer[request_data_format](), HtmlFormatter(linenos='table')) response = b64decode(service_response.data.sample_resp if service_response.data.sample_resp else '') response = response.decode('utf8') response_data_format = known_data_format(response) if response_data_format: if pretty_print: response = get_pretty_print(response, response_data_format) service.sample_resp_html = highlight( response, data_format_lexer[response_data_format](), HtmlFormatter(linenos='table')) service.sample_req = request service.sample_resp = response ts = {} for name in ('req', 'resp'): full_name = 'sample_{}_ts'.format(name) value = getattr(service_response.data, full_name, '') if value: value = from_utc_to_user(value + '+00:00', req.zato.user_profile) ts[full_name] = value service.id = service_response.data.service_id service.sample_cid = service_response.data.sample_cid service.sample_req_ts = ts['sample_req_ts'] service.sample_resp_ts = ts['sample_resp_ts'] service.sample_req_resp_freq = service_response.data.sample_req_resp_freq return_data = { 'cluster_id': req.zato.cluster_id, 'service': service, 'pretty_print': not pretty_print, } return TemplateResponse(req, 'zato/service/request-response.html', return_data)
def init(self): config = {} has_sentinel = asbool(self.config.get('use_redis_sentinels', False)) if has_sentinel: sentinels = self._parse_sentinels(self.config.get('redis_sentinels')) if not sentinels: raise ValueError('kvdb.redis_sentinels must be provided') sentinel_master = self.config.get('redis_sentinels_master', None) if not sentinel_master: raise ValueError('kvdb.redis_sentinels_master must be provided') config['sentinels'] = sentinels config['sentinel_master'] = sentinel_master else: if self.config.get('host'): config['host'] = self.config.host if self.config.get('port'): config['port'] = int(self.config.port) if self.config.get('unix_socket_path'): config['unix_socket_path'] = self.config.unix_socket_path if self.config.get('db'): config['db'] = int(self.config.db) if self.config.get('password'): config['password'] = self.decrypt_func(self.config.password) if self.config.get('socket_timeout'): config['socket_timeout'] = float(self.config.socket_timeout) if self.config.get('connection_pool'): split = self.config.connection_pool.split('.') module, class_name = split[:-1], split[-1] mod = import_module(module) config['connection_pool'] = getattr(mod, class_name) if self.config.get('charset'): config['charset'] = self.config.charset if self.config.get('errors'): config['errors'] = self.config.errors self.conn_class = self._get_connection_class(has_sentinel) if has_sentinel: instance = self.conn_class(config['sentinels'], config.get('password'), config.get('socket_timeout')) self.conn = instance.master_for(config['sentinel_master']) else: self.conn = self.conn_class(**config)
def run(base_dir): os.chdir(base_dir) # We're doing it here even if someone doesn't use PostgreSQL at all # so we're not suprised when someone suddenly starts using PG. # TODO: Make sure it's registered for each of the subprocess psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY) repo_location = os.path.join(base_dir, 'config', 'repo') # Configure the logging first, before configuring the actual server. logging.addLevelName('TRACE1', TRACE1) logging.config.fileConfig(os.path.join(repo_location, 'logging.conf')) config = get_config(repo_location, 'server.conf') app_context = get_app_context(config) crypto_manager = get_crypto_manager(repo_location, app_context, config) parallel_server = app_context.get_object('parallel_server') zato_gunicorn_app = ZatoGunicornApplication( parallel_server, repo_location, config.main, config.crypto) parallel_server.crypto_manager = crypto_manager parallel_server.odb_data = config.odb parallel_server.host = zato_gunicorn_app.zato_host parallel_server.port = zato_gunicorn_app.zato_port parallel_server.repo_location = repo_location parallel_server.base_dir = base_dir parallel_server.fs_server_config = config parallel_server.startup_jobs = app_context.get_object('startup_jobs') parallel_server.app_context = app_context # Remove all locks possibly left over by previous server instances clear_locks(app_context.get_object('kvdb'), config.main.token, config.kvdb, crypto_manager.decrypt) # Turn the repo dir into an actual repository and commit any new/modified files RepoManager(repo_location).ensure_repo_consistency() # This is new in 1.2 so is optional profiler_enabled = config.get('profiler', {}).get('enabled', False) if asbool(profiler_enabled): profiler_dir = os.path.abspath(os.path.join(base_dir, config.profiler.profiler_dir)) parallel_server.on_wsgi_request = ProfileMiddleware( parallel_server.on_wsgi_request, log_filename = os.path.join(profiler_dir, config.profiler.log_filename), cachegrind_filename = os.path.join(profiler_dir, config.profiler.cachegrind_filename), discard_first_request = config.profiler.discard_first_request, flush_at_shutdown = config.profiler.flush_at_shutdown, path = config.profiler.url_path, unwind = config.profiler.unwind) # Run the app at last zato_gunicorn_app.run()
def convert_sio(cid, param, param_name, value, has_simple_io_config, is_xml, bool_parameter_prefixes, int_parameters, int_parameter_suffixes, encrypt_func, date_time_format=None, data_format=ZATO_NONE, from_sio_to_external=False, special_values=(ZATO_NONE, ZATO_SEC_USE_RBAC), _is_bool=is_bool, _is_int=is_int, _is_secret=is_secret): try: if _is_bool(param, param_name, bool_parameter_prefixes): value = asbool( value or None) # value can be an empty string and asbool chokes on that if value is not None: if isinstance(param, ForceType): value = param.convert(value, param_name, data_format, from_sio_to_external) else: # Empty string sent in lieu of integers are equivalent to None, # as though they were never sent - this is need for internal metaclasses if value == '' and _is_int(param_name, int_parameters, int_parameter_suffixes): value = None if value and (value not in special_values) and has_simple_io_config: if _is_int(param_name, int_parameters, int_parameter_suffixes): value = int(value) elif _is_secret(param_name): # It will be None in SIO responses if encrypt_func: value = encrypt_func(value) return value except Exception, e: if isinstance(e, Reportable): e.cid = cid raise else: msg = 'Conversion error, param:`{}`, param_name:`{}`, repr:`{}`, type:`{}`, e:`{}`'.format( param, param_name, repr(value), type(value), format_exc(e)) logger.error(msg) raise ZatoException(msg=msg)
def impl(): try: servers = [ elem.strip() for elem in config.servers.splitlines() ] cache = _MemcachedClient(servers, asbool(config.is_debug), **parse_extra_into_dict(config.extra)) self._add_cache(config, cache) except Exception, e: logger.warn(format_exc(e))
def make_filter(app, global_conf, forceStart=False): config = ConfigParser.RawConfigParser() config.optionxform = str config.read(global_conf["__file__"]) if config.has_option("cipher.longrequest", "duration-level-1"): global DURATION_LEVEL_1 DURATION_LEVEL_1 = config.getint("cipher.longrequest", "duration-level-1") if config.has_option("cipher.longrequest", "duration-level-2"): global DURATION_LEVEL_2 DURATION_LEVEL_2 = config.getint("cipher.longrequest", "duration-level-2") if config.has_option("cipher.longrequest", "duration-level-3"): global DURATION_LEVEL_3 DURATION_LEVEL_3 = config.getint("cipher.longrequest", "duration-level-3") if config.has_option("cipher.longrequest", "tick"): global TICK TICK = config.getint("cipher.longrequest", "tick") if config.has_option("cipher.longrequest", "initial-delay"): global INITIAL_DELAY INITIAL_DELAY = config.getint("cipher.longrequest", "initial-delay") if config.has_option("cipher.longrequest", "finished-log-level"): global FINISHED_LOG_LEVEL value = config.get("cipher.longrequest", "finished-log-level").lower() if value == "info": FINISHED_LOG_LEVEL = logging.INFO elif value == "warn": FINISHED_LOG_LEVEL = logging.WARN if value == "error": FINISHED_LOG_LEVEL = logging.ERROR if config.has_option("cipher.longrequest", "verbose"): global VERBOSE_LOG VERBOSE_LOG = asbool(config.get("cipher.longrequest", "verbose")) global IGNORE_URLS i = 1 while config.has_option("cipher.longrequest", "exclude-url-%i" % i): url = config.get("cipher.longrequest", "exclude-url-%i" % i) patt = re.compile(url) # no flags, use `(?iLmsux)` IGNORE_URLS.append(patt) i += 1 start = forceStart if not forceStart: if config.has_option("cipher.longrequest", "start-thread"): start = config.getboolean("cipher.longrequest", "start-thread") if start: startThread(None, None, None, None) return ThreadpoolCatcher(app)
def setup_app(command, conf, vars): """Place any commands to setup ututi here""" load_environment(conf.global_conf, conf.local_conf) if asbool(conf.get("reset_database", "false")): reset_db(meta.engine) initialize_dictionaries(meta.engine) initialize_db_defaults(meta.engine)
def request_response(req, service_name): service = Service(name=service_name) pretty_print = asbool(req.GET.get('pretty_print')) input_dict = { 'name': service_name, 'cluster_id': req.zato.cluster_id } zato_message, soap_response = invoke_admin_service(req.zato.cluster, 'zato:service.get-request-response', input_dict) if zato_path('response.item').get_from(zato_message) is not None: item = zato_message.response.item request = (item.sample_req.text if item.sample_req.text else '').decode('base64') request_data_format = known_data_format(request) if request_data_format: if pretty_print: request = get_pretty_print(request, request_data_format) service.sample_req_html = highlight(request, data_format_lexer[request_data_format](), HtmlFormatter(linenos='table')) response = (item.sample_resp.text if item.sample_resp.text else '').decode('base64') response_data_format = known_data_format(response) if response_data_format: if pretty_print: response = get_pretty_print(response, response_data_format) service.sample_resp_html = highlight(response, data_format_lexer[response_data_format](), HtmlFormatter(linenos='table')) service.sample_req = request service.sample_resp = response ts = {} for name in('req', 'resp'): full_name = 'sample_{}_ts'.format(name) value = getattr(item, full_name).text or '' if value: value = from_utc_to_user(value+'+00:00', req.zato.user_profile) ts[full_name] = value service.id = item.service_id.text service.sample_cid = item.sample_cid.text service.sample_req_ts = ts['sample_req_ts'] service.sample_resp_ts = ts['sample_resp_ts'] service.sample_req_resp_freq = item.sample_req_resp_freq.text return_data = { 'cluster_id': req.zato.cluster_id, 'service': service, 'pretty_print': not pretty_print, } return TemplateResponse(req, 'zato/service/request-response.html', return_data)
def normalizeconfig(config): """Convert the string representation of config parameters into programmable types. It is assumed that all config parameters are atleast initialized with default value. """ config_ = dict(defaultconfig.items()) config_.update(config) config = config_ config['devmod'] = asbool(config.get('devmod', DEVMOD)) config['strict_undefined'] = asbool(config['strict_undefined']) try: config['directories'] = [ x.strip() for x in config['directories'].split(',') if x.strip() ] except: pass config['module_directory'] = config['module_directory'] or None try: config['escape_filters'] = [ x.strip() for x in config['escape_filters'].split(',') if x.strip() ] except: pass try: config['plugin_packages'] = [ x.strip() for x in config['plugin_packages'].split(',') if x.strip() ] except: pass booleans = [ 'include_skin', 'obfuscatemail', 'nested', 'nested.paragraph', 'stripscript', 'ashtml', 'memcache', 'text_as_hashkey' ] for x in booleans: config[x] = asbool(config[x]) return config
def convert(self, param, param_name, value, has_simple_io_config, date_time_format=None): if any(param_name.startswith(prefix) for prefix in self.bool_parameter_prefixes): value = asbool(value) if isinstance(param, Boolean): value = asbool(value) elif isinstance(param, Integer): value = int(value) elif isinstance(param, Unicode): value = unicode(value) elif isinstance(param, UTC): value = value.replace('+00:00', '') else: if value and value != ZATO_NONE and has_simple_io_config: if any(param_name==elem for elem in self.int_parameters) or \ any(param_name.endswith(suffix) for suffix in self.int_parameter_suffixes): value = int(value) if date_time_format and isinstance(value, datetime): value = value.strftime(date_time_format) return value
def audit_set_state(req, **kwargs): try: request = {'id':kwargs['id'], 'audit_enabled': not asbool(req.POST['audit_enabled'])} response = req.zato.client.invoke('zato.http-soap.set-audit-state', request) if not response.ok: raise Exception(response.details) return HttpResponse('OK') except Exception, e: msg = format_exc(e) logger.error(msg) return HttpResponseServerError(msg)
def __init__(self, app, mapper, global_conf=None, **params): if global_conf is None: global_conf = {} # @@: global_conf shouldn't really come in here, only in a # separate make_status_based_forward function if global_conf: self.debug = converters.asbool(global_conf.get("debug", False)) else: self.debug = False self.application = app self.mapper = mapper self.global_conf = global_conf self.params = params
def __init__(self, app, mapper, global_conf=None, **params): if global_conf is None: global_conf = {} # @@: global_conf shouldn't really come in here, only in a # separate make_status_based_forward function if global_conf: self.debug = converters.asbool(global_conf.get('debug', False)) else: self.debug = False self.application = app self.mapper = mapper self.global_conf = global_conf self.params = params
def request_response(req, service_name): service = Service(name=service_name) pretty_print = asbool(req.GET.get('pretty_print')) input_dict = { 'name': service_name, 'cluster_id': req.zato.cluster_id } service_response = req.zato.client.invoke('zato.service.get-request-response', input_dict) if service_response.ok: request = (service_response.data.sample_req if service_response.data.sample_req else '').decode('base64') request_data_format = known_data_format(request) if request_data_format: if pretty_print: request = get_pretty_print(request, request_data_format) service.sample_req_html = highlight(request, data_format_lexer[request_data_format](), HtmlFormatter(linenos='table')) response = (service_response.data.sample_resp if service_response.data.sample_resp else '').decode('base64') response_data_format = known_data_format(response) if response_data_format: if pretty_print: response = get_pretty_print(response, response_data_format) service.sample_resp_html = highlight(response, data_format_lexer[response_data_format](), HtmlFormatter(linenos='table')) service.sample_req = request service.sample_resp = response ts = {} for name in('req', 'resp'): full_name = 'sample_{}_ts'.format(name) value = getattr(service_response.data, full_name, '') if value: value = from_utc_to_user(value+'+00:00', req.zato.user_profile) ts[full_name] = value service.id = service_response.data.service_id service.sample_cid = service_response.data.sample_cid service.sample_req_ts = ts['sample_req_ts'] service.sample_resp_ts = ts['sample_resp_ts'] service.sample_req_resp_freq = service_response.data.sample_req_resp_freq return_data = { 'cluster_id': req.zato.cluster_id, 'service': service, 'pretty_print': not pretty_print, } return TemplateResponse(req, 'zato/service/request-response.html', return_data)
def init_jobs(self): sleep( initial_sleep ) # To make sure that at least one server is running if the environment was started from quickstart scripts cluster_conf = self.config.main.cluster add_startup_jobs(cluster_conf.id, self.odb, self.startup_jobs, asbool(cluster_conf.stats_enabled)) # Actually start jobs now, including any added above if self._add_scheduler_jobs: add_scheduler_jobs(self.api, self.odb, self.config.main.cluster.id, spawn=False)
def make_cgi_application(global_conf, script, path=None, include_os_environ=None, query_string=None): """ This object acts as a proxy to a CGI application. You pass in the script path (``script``), an optional path to search for the script (if the name isn't absolute) (``path``). If you don't give a path, then ``$PATH`` will be used. """ if path is None: path = global_conf.get('path') or global_conf.get('PATH') include_os_environ = converters.asbool(include_os_environ) return CGIApplication( script, path=path, include_os_environ=include_os_environ, query_string=query_string)
def robots(self): response.headers['Content-Type'] = 'text/plain' if asbool(config.get('testing', False)): return 'User-agent: *\nDisallow: /' else: robots = ['User-agent: *', 'Allow: /', '', 'User-agent: Googlebot', 'Disallow: /passwords', 'Disallow: /news/hourly', 'Disallow: /news/weekly', 'Allow: /'] return '\n'.join(robots)
def send_sms(number, text, sender, recipient=None, parent=None): """Send sms using the vertex sms gateway.""" from ututi.model import SMS msg = SMS(recipient_number=number, message_text=text, sender=sender, recipient=recipient) if parent: msg.outgoing_group_message = parent meta.Session.add(msg) log.debug("%s -> %r" % (number, text)) hold_emails = asbool(config.get('hold_emails', False)) if hold_emails: sms_queue.append((msg.recipient_number, msg.message_text))
def __init__(self, app, config=None, loglevel='DEBUG', **kwargs): """Stores logging statements per request, and includes a bar on the page that shows the logging statements ''loglevel'' Default log level for messages that should be caught. Note: the root logger's log level also matters! If you do logging.getLogger('').setLevel(logging.INFO), no DEBUG messages will make it to Logview's handler anyway. """ self.app = app tmpl_dir = os.path.join(here_dir, 'templates') self.mako = TemplateLookup(directories=[tmpl_dir]) self.inupy_config = config config.update(kwargs) if loglevel is None: self.loglevel = logging.getLogger('').level elif isinstance(loglevel, basestring): self.loglevel = getattr(logging, loglevel) else: self.loglevel = loglevel self.keep_tracebacks = asbool(config.get( 'keep_tracebacks', RequestHandler.keep_tracebacks)) self.keep_tracebacks_limit = int(config.get( 'keep_tracebacks_limit', RequestHandler.keep_tracebacks_limit)) self.skip_first_n_frames = int(config.get( 'skip_first_n_frames', RequestHandler.skip_first_n_frames)) self.skip_last_n_frames = int(config.get( 'skip_last_n_frames', RequestHandler.skip_last_n_frames)) reqhandler = RequestHandler() reqhandler.setLevel(self.loglevel) reqhandler.keep_tracebacks = self.keep_tracebacks reqhandler.keep_tracebacks_limit = self.keep_tracebacks_limit reqhandler.skip_first_n_frames = self.skip_first_n_frames reqhandler.skip_last_n_frames = self.skip_last_n_frames logging.getLogger('').addHandler(reqhandler) self.reqhandler = reqhandler self.logger = logging.getLogger(__name__) self.logger.propagate = False self.logger.setLevel(self.loglevel) self.logger.addHandler(reqhandler)
def slow_response_details(req, cid, service_name): item = None service = _get_service(req, service_name) pretty_print = asbool(req.GET.get("pretty_print")) input_dict = {"cid": cid, "name": service_name} response = req.zato.client.invoke("zato.service.slow-response.get", input_dict) if response.has_data: cid = response.data.cid if cid != ZATO_NONE: item = SlowResponse() item.cid = response.data.cid item.req_ts = from_utc_to_user(response.data.req_ts + "+00:00", req.zato.user_profile) item.resp_ts = from_utc_to_user(response.data.resp_ts + "+00:00", req.zato.user_profile) item.proc_time = response.data.proc_time item.service_name = service_name item.threshold = service.slow_threshold for name in ("req", "resp"): value = getattr(response.data, name) if value: # value = value.decode('base64') if isinstance(value, dict): value = dumps(value) data_format = "json" else: data_format = known_data_format(value) if data_format: if pretty_print: value = get_pretty_print(value, data_format) attr_name = name + "_html" setattr( item, attr_name, highlight(value, data_format_lexer[data_format](), HtmlFormatter(linenos="table")), ) return_data = { "cluster_id": req.zato.cluster_id, "service": service, "item": item, "pretty_print": not pretty_print, } return TemplateResponse(req, "zato/service/slow-response-details.html", return_data)
def slow_response_details(req, cid, service_name): item = None service = _get_service(req, service_name) pretty_print = asbool(req.GET.get('pretty_print')) input_dict = { 'cid': cid, 'name': service_name, } response = req.zato.client.invoke('zato.service.slow-response.get', input_dict) if response.has_data: cid = response.data.cid if cid != ZATO_NONE: item = SlowResponse() item.cid = response.data.cid item.req_ts = from_utc_to_user(response.data.req_ts+'+00:00', req.zato.user_profile) item.resp_ts = from_utc_to_user(response.data.resp_ts+'+00:00', req.zato.user_profile) item.proc_time = response.data.proc_time item.service_name = service_name item.threshold = service.slow_threshold for name in('req', 'resp'): value = getattr(response.data, name) if value: #value = value.decode('base64') if isinstance(value, dict): value = dumps(value) data_format = 'json' else: data_format = known_data_format(value) if data_format: if pretty_print: value = get_pretty_print(value, data_format) attr_name = name + '_html' setattr(item, attr_name, highlight(value, data_format_lexer[data_format](), HtmlFormatter(linenos='table'))) return_data = { 'cluster_id': req.zato.cluster_id, 'service': service, 'item': item, 'pretty_print': not pretty_print, } return TemplateResponse(req, 'zato/service/slow-response-details.html', return_data)
def index(self, order=None, offset=0, limit=10, secret_key=None, **kwargs): """Query for a flat list of categories. :param id: A :attr:`mediacore.model.media.Category.id` for lookup :type id: int :param name: A :attr:`mediacore.model.media.Category.name` for lookup :type name: str :param slug: A :attr:`mediacore.model.media.Category.slug` for lookup :type slug: str :param order: A column name and 'asc' or 'desc', seperated by a space. The column name can be any one of the returned columns. Defaults to newest category first (id desc). :param offset: Where in the complete resultset to start returning results. Defaults to 0, the very beginning. This is useful if you've already fetched the first 50 results and want to fetch the next 50 and so on. :type offset: int :param limit: Number of results to return in each query. Defaults to 10. The maximum allowed value defaults to 50 and is set via :attr:`app_globals.settings['api_media_max_results']`. :type limit: int :param api_key: The api access key if required in settings :type api_key: unicode or None :returns: JSON dict """ if asbool(app_globals.settings['api_secret_key_required']) \ and secret_key != app_globals.settings['api_secret_key']: return dict(error='Authentication Error') if any(key in kwargs for key in ('id', 'slug', 'name')): kwargs['offset'] = offset kwargs['limit'] = limit kwargs['tree'] = False return self._get_query(**kwargs) return self._index_query(order, offset, limit, tree=False)
def __init__(self, app, global_conf=None, display=NoDefault, logdir=None, context=5, format="html"): self.app = app if global_conf is None: global_conf = {} if display is NoDefault: display = global_conf.get('debug') if isinstance(display, basestring): display = converters.asbool(display) self.display = display self.logdir = logdir self.context = int(context) self.format = format
def _authenticate(self): bearer_token_prefix = 'Bearer ' auth = request.headers.get('Authorization') if auth and auth.startswith(bearer_token_prefix): access_token = auth[len(bearer_token_prefix):] else: access_token = request.params.get('access_token') if access_token: # handle bearer tokens # skip https check if auth invoked from tests testing = request.environ.get('paste.testing', False) debug = asbool(config.get('debug', False)) if not any((testing, request.scheme == 'https', request.environ.get('HTTP_X_FORWARDED_SSL') == 'on', request.environ.get('HTTP_X_FORWARDED_PROTO') == 'https', debug)): request.environ['tg.status_code_redirect'] = True raise exc.HTTPUnauthorized('HTTPS is required to use bearer tokens %s' % request.environ) access_token = M.OAuthAccessToken.query.get(api_key=access_token) if not (access_token and access_token.is_bearer): request.environ['tg.status_code_redirect'] = True raise exc.HTTPUnauthorized return access_token req = oauth.Request.from_request( request.method, request.url.split('?')[0], headers=request.headers, parameters=dict(request.params), query_string=request.query_string ) consumer_token = M.OAuthConsumerToken.query.get(api_key=req['oauth_consumer_key']) access_token = M.OAuthAccessToken.query.get(api_key=req['oauth_token']) if consumer_token is None: log.error('Invalid consumer token') return None if access_token is None: log.error('Invalid access token') raise exc.HTTPUnauthorized consumer = consumer_token.consumer try: self.server.verify_request(req, consumer, access_token.as_token()) except oauth.Error as e: log.error('Invalid signature %s %s', type(e), e) raise exc.HTTPUnauthorized return access_token
def audit_set_state(req, **kwargs): try: request = { 'id': kwargs['id'], 'audit_enabled': not asbool(req.POST['audit_enabled']) } response = req.zato.client.invoke('zato.http-soap.set-audit-state', request) if not response.ok: raise Exception(response.details) return HttpResponse('OK') except Exception, e: msg = format_exc(e) logger.error(msg) return HttpResponseServerError(msg)
def get(self, id=None, slug=None, api_key=None, format="json", **kwargs): """Expose info on a specific media item by ID or slug. :param id: An :attr:`id <mediacore.model.media.Media.id>` for lookup :type id: int :param slug: A :attr:`slug <mediacore.model.media.Media.slug>` for lookup :type slug: str :param api_key: The api access key if required in settings :type api_key: unicode or None :raises webob.exc.HTTPNotFound: If the media doesn't exist. :rtype: JSON-ready dict :returns: The returned dict is a **media_info** dict, generated by the :meth:`_info <mediacore.controllers.api.media.MediaController._info>` method. """ if asbool(request.settings['api_secret_key_required']) \ and api_key != request.settings['api_secret_key']: return dict(error=AUTHERROR) if format not in ("json", "mrss"): return dict(error=INVALIDFORMATERROR % format) query = Media.query.published() if id: query = query.filter_by(id=id) else: query = query.filter_by(slug=slug) try: media = query.one() except orm.exc.NoResultFound: return dict(error="No match found") if format == "mrss": request.override_template = "sitemaps/mrss.xml" return dict( media=[media], title="Media Entry", ) return self._info(media, include_embed=True)
def make_filter(app, global_conf, logger_name='wsgi', format=None, logging_level=logging.INFO, setup_console_handler=True, set_logger_level=logging.DEBUG): from paste.util.converters import asbool if isinstance(logging_level, str): logging_level = logging._levelNames[logging_level] if isinstance(set_logger_level, str): set_logger_level = logging._levelNames[set_logger_level] return TransLogger(app, format=format or None, logging_level=logging_level, logger_name=logger_name, setup_console_handler=asbool(setup_console_handler), set_logger_level=set_logger_level)