def screenshots(kwargs, host, args): ''' Loop through all entries that don't have a thumbnail and create it. ''' try: # Get ID for all entries without a thumbnail pending = gramex.data.filter(url=var.FORMS_URL, table=var.FORMS_TABLE, args=args) width, height = 300, 300 # TODO: Change dimensions later for index, row in pending.iterrows(): id = row[var.FORMS_ID] url = f'{host}/form/{id}' # TODO: Use delay='renderComplete' content = capture.png(url, selector=".container", width=width, height=height, delay=1000) # Save under GRAMEXDATA/uifactory/thumbnail/<id>.png, cropped to width and height target = os.path.join(var.GRAMEXDATA, 'uifactory', 'thumbnail', f'{id}.png') Image.open(BytesIO(content)).crop((0, 0, width, height)).save(target) # Update the database with the thumbnail filename gramex.data.update( url=var.FORMS_URL, table=var.FORMS_TABLE, id=var.FORMS_ID, args={var.FORMS_ID: [id], 'thumbnail': [f'thumbnail/{id}.png']}) # Exceptions in a thread are not logged by default. Log them explicitly on console. # Otherwise, we won't know WHY something failed except Exception: app_log.exception('Screenshot failed') raise
def initialize(self, **kwargs): # self.request.arguments does not handle unicode keys well. # In Py2, it returns a str (not unicode). In Py3, it returns latin-1 unicode. # Convert this to proper unicode using UTF-8 and store in self.args self.args = {} for k in self.request.arguments: key = (k if isinstance(k, six.binary_type) else k.encode('latin-1')).decode('utf-8') # Invalid unicode (e.g. ?x=%f4) throws HTTPError. This disrupts even # error handlers. So if there's invalid unicode, log & continue. try: self.args[key] = self.get_arguments(k) except HTTPError: app_log.exception('Invalid URL argument %s' % k) self._session, self._session_json = None, 'null' if self.cache: self.cachefile = self.cache() self.original_get = self.get self.get = self._cached_get if self._set_xsrf: self.xsrf_token # Set the method to the ?x-http-method-overrride argument or the # X-HTTP-Method-Override header if they exist if 'x-http-method-override' in self.args: self.request.method = self.args.pop('x-http-method-override')[0].upper() elif 'X-HTTP-Method-Override' in self.request.headers: self.request.method = self.request.headers['X-HTTP-Method-Override'].upper()
def cache(conf): '''Set up caches''' for name, config in conf.items(): cache_type = config['type'] if cache_type not in _cache_defaults: app_log.warning('cache: %s has unknown type %s', name, config.type) continue config = merge(dict(config), _cache_defaults[cache_type], mode='setdefault') if cache_type == 'memory': info.cache[name] = urlcache.MemoryCache( maxsize=config['size'], getsizeof=gramex.cache.sizeof) elif cache_type == 'disk': path = config.get('path', '.cache-' + name) info.cache[name] = urlcache.DiskCache( path, size_limit=config['size'], eviction_policy='least-recently-stored') atexit.register(info.cache[name].close) elif cache_type == 'redis': path = config['path'] if 'path' in config else None try: info.cache[name] = urlcache.RedisCache(path=path, maxsize=config['size']) except Exception: app_log.exception('cache: %s cannot connect to redis', name) # if default: true, make this the default cache for gramex.cache.{open,query} if config.get('default'): for key in ['_OPEN_CACHE', '_QUERY_CACHE']: val = gramex.cache.set_cache(info.cache[name], getattr(gramex.cache, key)) setattr(gramex.cache, key, val)
def alert(conf): from . import scheduler _stop_all_tasks(info.alert) schedule_keys = 'minutes hours dates months weekdays years startup utc'.split( ) for name, alert in conf.items(): _key = cache_key('alert', alert) if _key in _cache: task = info.alert[name] = _cache[_key] task.call_later() continue app_log.info('Initialising alert: %s', name) schedule = {key: alert[key] for key in schedule_keys if key in alert} if 'thread' in alert: schedule['thread'] = alert['thread'] schedule['function'] = create_alert(name, alert) if schedule['function'] is not None: try: _cache[_key] = scheduler.Task(name, schedule, info.threadpool, ioloop=info._main_ioloop) info.alert[name] = _cache[_key] except Exception: app_log.exception('Failed to initialize alert: %s', name)
def _fit(model, x, y, path=None, name=None): app_log.info('Starting training...') try: getattr(model, 'partial_fit', model.fit)(x, y) app_log.info('Done training...') joblib.dump(model, path) app_log.info(f'{name}: Model saved at {path}.') except Exception as exc: app_log.exception(exc) return model
def header_callback(self, line): try: if self.headers is None: start_line = parse_response_start_line(line) self.http_version, self.status_code, self.http_reason = start_line self.headers = HTTPHeaders() else: self.headers.parse_line(line) except Exception: app_log.exception('Cannot parse header %s' % line)
def get(self, *path_args, **path_kwargs): meta, futures = AttrDict(), AttrDict() for key, dataset in self.datasets.items(): meta[key] = AttrDict() opt = self._options(dataset, self.args, path_args, path_kwargs, key) opt.filter_kwargs.pop('id', None) # Run query in a separate threadthread futures[key] = gramex.service.threadpool.submit( self.data_filter_method, args=opt.args, meta=meta[key], **opt.filter_kwargs) # gramex.data.filter() should set the schema only on first load. Pop it once done dataset.pop('schema', None) result = AttrDict() for key, val in futures.items(): try: result[key] = yield val except ValueError as e: app_log.exception('%s: filter failed' % self.name) raise HTTPError(BAD_REQUEST, reason=e.args[0]) except Exception as e: app_log.exception('%s: filter failed' % self.name) raise HTTPError(INTERNAL_SERVER_ERROR, reason=repr(e)) modify = self.datasets[key].get('modify', None) if callable(modify): result[key] = modify(data=result[key], key=key, handler=self) # modify the result for multiple datasets if hasattr(self, 'modify_all'): result = self.modify_all(data=result, key=None, handler=self) format_options = self.set_format(opt.fmt, meta) format_options['args'] = opt.args params = {k: v[0] for k, v in opt.args.items() if len(v) > 0} for key, val in format_options.items(): if isinstance(val, str): format_options[key] = val.format(**params) # In PY2, the values are binary. TODO: ensure that format values are in Unicode elif isinstance(val, bytes): format_options[key] = val.decode('utf-8').format(**params) if opt.download: self.set_header('Content-Disposition', 'attachment;filename=%s' % opt.download) if opt.meta_header: self.set_meta_headers(meta) result = result['data'] if self.single else result # If modify has changed the content type from a dataframe, write it as-is if isinstance(result, (pd.DataFrame, dict)): self.write(gramex.data.download(result, **format_options)) elif result: self.write(result)
def _start(self): ''' Check if capture is already running at ``url``. If not, start ``cmd`` and check again. Print logs from ``cmd``. ''' self.started = False script = self.engine.script try: # Check if capture.js is at the url specified app_log.info('Pinging %s at %s', script, self.url) r = requests.get(self.url, timeout=self.timeout) self._validate_server(r) self.started = True except requests.ReadTimeout: # If capture.js doesn't respond immediately, we haven't started app_log.error('url: %s timed out', self.url) except requests.ConnectionError: # Try starting the process again app_log.info('Starting %s via %s', script, self.cmd) self.close() # self.cmd is taken from the YAML configuration. Safe to run self.proc = Popen(shlex.split(self.cmd), stdout=PIPE, stderr=STDOUT) # nosec self.proc.poll() atexit.register(self.close) # TODO: what if readline() does not return quickly? line = self.proc.stdout.readline().strip() if not self.first_line_re.search(line): return app_log.error('cmd: %s invalid. Returned "%s"', self.cmd, line) app_log.info('Pinging %s at %s', script, self.url) try: r = requests.get(self.url, timeout=self.timeout) self._validate_server(r) pid = self.proc.pid app_log.info(line.decode('utf-8') + ' live (pid=%s)', pid) self.started = True # Keep logging capture.js output until proc is killed by another thread while hasattr(self, 'proc'): line = self.proc.stdout.readline().strip() if len(line) == 0: app_log.info('%s terminated: pid=%d', script, pid) self.started = False break # Capture won't print anything, unless there's a problem, or if debug is on. # So log it at warning level not info. app_log.warning(line.decode('utf-8')) except Exception: app_log.exception('Ran %s. But %s not at %s', self.cmd, script, self.url) except Exception: app_log.exception('Cannot start Capture')
def _migrate_h5(self, old_store_path): try: old_store = HDF5Store(old_store_path, flush=5) old_info = [(key, old_store.load(key)) for key in old_store.keys()] for key, val in old_info: self.store.dump(key, val) self.store.flush() old_store.close() os.remove(old_store_path) except Exception: import sys app_log.exception( 'FATAL: Cannot migrate: {}'.format(old_store_path)) sys.exit(1)
def push(): for app, app_config in info.gramexlog.apps.items(): for item in app_config.queue: item['_index'] = app_config.get('index', app) try: helpers.bulk(app_config.conn, app_config.queue) app_config.queue.clear() except Exception: # TODO: If the connection broke, re-create it # This generic exception should be caught for thread to continue its execution app_log.exception('gramexlog: push to %s failed', app) if 'handle' in info.gramexlog: ioloop.remove_timeout(info.gramexlog.handle) # Call again after flush seconds info.gramexlog.handle = ioloop.call_later(flush, push)
def _stream(self, data): buf = self.buf buf.extend(data) while len(buf): index = buf.find(b'\r\n') if index < 0: break data = bytes(buf[:index]) del buf[:index + 2] # Ignore stall warnings if len(data) == 0: continue try: self.process_bytes(data) except Exception: app_log.exception('TwitterStream could not process: %s' % data)
def process_bytes(self, data): try: text = six.text_type(data, encoding='utf-8') message = json.loads(text) except UnicodeError: app_log.error('TwitterStream unicode error: %s', data) return except ValueError: # When rate limited, text="Exceeded connection limit for user" app_log.error('TwitterStream non-JSON data: %s', text) return # Process the message (which is usually, but not always, a tweet) try: self.process_json(message) except Exception: app_log.exception('TwitterStream could not process message: %s' % text)
def _google_translate(q, source, target, key): import requests params = {'q': q, 'target': target, 'key': key} if source: params['source'] = source try: r = requests.post( 'https://translation.googleapis.com/language/translate/v2', data=params) except requests.RequestException: return app_log.exception('Cannot connect to Google Translate') response = r.json() if 'error' in response: return app_log.error('Google Translate API error: %s', response['error']) return { 'q': q, 't': [t['translatedText'] for t in response['data']['translations']], 'source': [ t.get('detectedSourceLanguage', params.get('source', None)) for t in response['data']['translations'] ], 'target': [target] * len(q), }
def schedule(conf): '''Set up the Gramex PeriodicCallback scheduler''' # Create tasks running on ioloop for the given schedule, store it in info.schedule from . import scheduler _stop_all_tasks(info.schedule) for name, sched in conf.items(): _key = cache_key('schedule', sched) if _key in _cache: task = info.schedule[name] = _cache[_key] task.call_later() continue try: app_log.info('Initialising schedule:%s', name) _cache[_key] = scheduler.Task(name, sched, info.threadpool, ioloop=info._main_ioloop) info.schedule[name] = _cache[_key] except Exception as e: app_log.exception(e)
def _write_custom_error(self, status_code, **kwargs): if status_code in self.error: try: result = self.error[status_code]['function']( status_code=status_code, kwargs=kwargs, handler=self) headers = self.error[status_code].get('conf', {}).get('headers', {}) self._write_headers(headers.items()) # result may be a generator / list from build_transform, # or a str/bytes/unicode from Template.generate. Handle both if isinstance(result, (six.string_types, six.binary_type)): self.write(result) else: for item in result: self.write(item) return except Exception: app_log.exception('url:%s.error.%d error handler raised an exception:', self.name, status_code) # If error was not written, use the default error self._write_error(status_code, **kwargs)
def _predict(self, data=None, score_col=''): if data is None: data = self._parse_data(False) data = self._transform(data, drop_duplicates=False) self.model = cache.open(self.model_path, joblib.load) try: target = data.pop(score_col) metric = self.get_argument('_metric', False) if metric: scorer = get_scorer(metric) return scorer(self.model, data, target) return self.model.score(data, target) except KeyError: # Set data in the same order as the transformer requests try: data = data[ self.model.named_steps['transform']._feature_names_in] data[self.get_opt('target_col', '_prediction')] = self.model.predict(data) except Exception as exc: app_log.exception(exc) return data
def log(conf): '''Set up logging using Python's standard logging.config.dictConfig()''' # Create directories for directories mentioned by handlers if logs are used active_handlers = set(conf.get('root', {}).get('handlers', [])) for logger in conf.get('loggers', {}).values(): active_handlers |= set(logger.get('handlers', [])) for handler, handler_conf in conf.get('handlers', {}).items(): if handler in active_handlers: filename = handler_conf.get('filename', None) if filename is not None: folder = os.path.dirname(os.path.abspath( handler_conf.filename)) if not os.path.exists(folder): try: os.makedirs(folder) except OSError: app_log.exception('log: %s: cannot create folder %s', handler, folder) try: logging.config.dictConfig(conf) except (ValueError, TypeError, AttributeError, ImportError): app_log.exception('Error in log: configuration')
def init(force_reload=False, **kwargs): ''' Update Gramex configurations and start / restart the instance. ``gramex.init()`` can be called any time to refresh configuration files. ``gramex.init(key=val)`` adds ``val`` as a configuration layer named ``key``. If ``val`` is a Path, it is converted into a PathConfig. (If it is Path directory, use ``gramex.yaml``.) Services are re-initialised if their configurations have changed. Service callbacks are always re-run (even if the configuration hasn't changed.) ''' try: setup_secrets(paths['base'] / '.secrets.yaml') except Exception as e: app_log.exception(e) # Reset variables variables.clear() variables.update(setup_variables()) # Initialise configuration layers with provided configurations # AttrDicts are updated as-is. Paths are converted to PathConfig paths.update(kwargs) for key, val in paths.items(): if isinstance(val, Path): if val.is_dir(): val = val / 'gramex.yaml' val = PathConfig(val) config_layers[key] = val # Locate all config files config_files = set() for path_config in config_layers.values(): if hasattr(path_config, '__info__'): for pathinfo in path_config.__info__.imports: config_files.add(pathinfo.path) config_files = list(config_files) # Add config file folders to sys.path sys.path[:] = _sys_path + [ str(path.absolute().parent) for path in config_files ] from . import services globals( )['service'] = services.info # gramex.service = gramex.services.info # Override final configurations appconfig.clear() appconfig.update(+config_layers) # --settings.debug => log.root.level = True if appconfig.app.get('settings', {}).get('debug', False): appconfig.log.root.level = logging.DEBUG # Set up a watch on config files (including imported files) if appconfig.app.get('watch', True): from services import watcher watcher.watch('gramex-reconfig', paths=config_files, on_modified=lambda event: init()) # Run all valid services. (The "+" before config_chain merges the chain) # Services may return callbacks to be run at the end for key, val in appconfig.items(): if key not in conf or conf[key] != val or force_reload: if hasattr(services, key): app_log.debug('Loading service: %s', key) conf[key] = prune_keys(val, {'comment'}) callback = getattr(services, key)(conf[key]) if callable(callback): callbacks[key] = callback else: app_log.error('No service named %s', key) # Run the callbacks. Specifically, the app service starts the Tornado ioloop for key in (+config_layers).keys(): if key in callbacks: app_log.debug('Running callback: %s', key) callbacks[key]()
def run_alert(callback=None, args=None): ''' Runs the configured alert. If a callback is specified, calls the callback with all email arguments. Else sends the email. If args= is specified, add it as data['args']. ''' app_log.info('alert: %s running', name) data, each, fail = { 'config': alert, 'args': {} if args is None else args }, [], [] try: load_datasets(data, each) except Exception as e: app_log.exception('alert: %s data processing failed', name) fail.append({'error': e}) retval = [] for index, row in each: data['index'], data['row'], data['config'] = index, row, alert try: retval.append( AttrDict(index=index, row=row, mail=create_mail(data))) except Exception as e: app_log.exception('alert: %s[%s] templating (row=%r)', name, index, row) fail.append({'index': index, 'row': row, 'error': e}) callback = mailer.mail if not callable(callback) else callback done = [] for v in retval: try: callback(**v.mail) except Exception as e: fail.append({ 'index': v.index, 'row': v.row, 'mail': v.mail, 'error': e }) app_log.exception('alert: %s[%s] delivery (row=%r)', name, v.index, v.row) else: done.append(v) event = { 'alert': name, 'service': service, 'from': mailer.email or '', 'to': '', 'cc': '', 'bcc': '', 'subject': '', 'datetime': datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%SZ") } event.update({k: v for k, v in v.mail.items() if k in event}) event['attachments'] = ', '.join(v.mail.get('attachments', [])) alert_logger.info(event) # Run notifications args = {'done': done, 'fail': fail} for notification_name in alert.get('notify', []): notify = info.alert.get(notification_name) if notify is not None: notify.run(callback=callback, args=args) else: app_log.error('alert: %s.notify: alert %s not defined', name, notification_name) return args
def translate(*q, **kwargs): ''' Translate strings using the Google Translate API. Example:: translate('Hello', 'World', source='en', target='de', key='...') returns a DataFrame:: source target q t en de Hello ... en de World ... The results can be cached via a ``cache={...}`` that has parameters for :py:func:`gramex.data.filter`. Example:: translate('Hello', key='...', cache={'url': 'translate.xlsx'}) :arg str q: one or more strings to translate :arg str source: 2-letter source language (e.g. en, fr, es, hi, cn, etc). If empty or None, auto-detects source :arg str target: 2-letter target language (e.g. en, fr, es, hi, cn, etc). :arg str key: Google Translate API key :arg dict cache: kwargs for :py:func:`gramex.data.filter`. Has keys such as url (required), table (for databases), sheet_name (for Excel), etc. Reference: https://cloud.google.com/translate/docs/apis ''' import gramex.data source = kwargs.pop('source', None) target = kwargs.pop('target', None) key = kwargs.pop('key', None) cache = kwargs.pop('cache', None) api = kwargs.pop('api', 'google') if cache is not None: if not isinstance(cache, dict): raise ValueError('cache= must be a FormHandler dict config, not %r' % cache) # Store data in cache with fixed columns: source, target, q, t result = pd.DataFrame(columns=['source', 'target', 'q', 't']) if not q: return result original_q = q # Fetch from cache, if any if cache: try: args = {'q': q, 'target': [target] * len(q)} if source: args['source'] = [source] * len(q) with _translate_cache_lock: result = gramex.data.filter(args=args, **cache) except Exception: app_log.exception('Cannot query %r in translate cache: %r', args, dict(cache)) # Remove already cached results from q q = [v for v in q if v not in set(result.get('q', []))] if len(q): new_data = translate_api[api](q, source, target, key) if new_data is not None: result = result.append(pd.DataFrame(new_data), sort=False) if cache: with _translate_cache_lock: gramex.data.insert(id=['source', 'target', 'q'], args=new_data, **cache) # Sort results by q result['order'] = result['q'].map(original_q.index) result.sort_values('order', inplace=True) result.drop_duplicates(subset=['q'], inplace=True) del result['order'] return result
def url(conf): '''Set up the tornado web app URL handlers''' handlers = [] # Sort the handlers in descending order of priority specs = sorted(conf.items(), key=_sort_url_patterns, reverse=True) for name, spec in specs: _key = cache_key('url', spec) if _key in _cache: handlers.append(_cache[_key]) continue if 'handler' not in spec: app_log.error('url: %s: no handler specified') continue app_log.debug('url: %s (%s) %s', name, spec.handler, spec.get('priority', '')) urlspec = AttrDict(spec) handler = locate(spec.handler, modules=['gramex.handlers']) if handler is None: app_log.error('url: %s: ignoring missing handler %s', name, spec.handler) continue # Create a subclass of the handler with additional attributes. class_vars = {'name': name, 'conf': spec} # If there's a cache section, get the cache method for use by BaseHandler if 'cache' in urlspec: class_vars['cache'] = _cache_generator(urlspec['cache'], name=name) else: class_vars['cache'] = None # PY27 type() requires the class name to be a string, not unicode urlspec.handler = type(str(spec.handler), (handler, ), class_vars) # If there's a setup method, call it to initialize the class kwargs = urlspec.get('kwargs', {}) if hasattr(handler, 'setup'): try: urlspec.handler.setup_default_kwargs() urlspec.handler.setup(**kwargs) except Exception: app_log.exception('url: %s: setup exception in handler %s', name, spec.handler) # Since we can't set up the handler, all requests must report the error instead class_vars['exc_info'] = sys.exc_info() error_handler = locate('SetupFailedHandler', modules=['gramex.handlers']) urlspec.handler = type(str(spec.handler), (error_handler, ), class_vars) urlspec.handler.setup(**kwargs) try: handler_entry = tornado.web.URLSpec( name=name, pattern=_url_normalize(urlspec.pattern), handler=urlspec.handler, kwargs=kwargs, ) except re.error: app_log.error('url: %s: pattern: %s is invalid', name, urlspec.pattern) continue except Exception: app_log.exception('url: %s: invalid', name) continue _cache[_key] = handler_entry handlers.append(handler_entry) info.app.clear_handlers() info.app.add_handlers('.*$', handlers)
def run_alert(callback=None): ''' Runs the configured alert. If a callback is specified, calls the callback with all email arguments. Else sends the email. ''' app_log.info('alert: %s running', name) data = {'config': alert} for key, dataset in datasets.items(): # Allow raw data in lists as-is. Treat dicts as {url: ...} data[key] = dataset if isinstance( dataset, list) else gramex.data.filter(**dataset) result = condition(**data) # Avoiding isinstance(result, pd.DataFrame) to avoid importing pandas if type(result).__name__ == 'DataFrame': data['data'] = result elif isinstance(result, dict): data.update(result) elif not result: app_log.debug('alert: %s stopped. condition = %s', name, result) return each = [(None, None)] if 'each' in alert: each_data = data[alert['each']] if isinstance(each_data, dict): each = list(each_data.items()) elif isinstance(each_data, list): each = list(enumerate(each_data)) elif hasattr(each_data, 'iterrows'): each = list(each_data.iterrows()) else: app_log.error( 'alert: %s: each: requires data.%s to be a dict/list/DataFrame', name, alert['each']) return kwargslist = [] for index, row in each: data['index'], data['row'], data['config'] = index, row, alert # Generate email content kwargs = {} kwargslist.append(kwargs) for key in ['bodyfile', 'htmlfile', 'markdownfile']: target = key.replace('file', '') if key in templates and target not in templates: path = templates[key].generate(**data).decode('utf-8') tmpl = gramex.cache.open(path, 'template') kwargs[target] = tmpl.generate(**data).decode('utf-8') try: for key in [ 'to', 'cc', 'bcc', 'from', 'subject', 'body', 'html', 'markdown' ]: if key in templates: tmpl = templates[key] if isinstance(tmpl, list): kwargs[key] = [] for subtmpl in tmpl: kwargs[key].append( subtmpl.generate(**data).decode('utf-8')) else: kwargs[key] = tmpl.generate(**data).decode('utf-8') except Exception: # If any template raises an exception, log it and continue with next email app_log.exception('alert: %s(#%s).%s: Template exception', name, index, key) continue headers = {} # user: {id: ...} creates an X-Gramex-User header to mimic the user if 'user' in alert: user = json.dumps(alert['user'], ensure_ascii=True, separators=(',', ':')) headers['X-Gramex-User'] = tornado.web.create_signed_value( info.app.settings['cookie_secret'], 'user', user) if 'markdown' in kwargs: kwargs['html'] = _markdown_convert(kwargs.pop('markdown')) if 'images' in templates: kwargs['images'] = {} for cid, val in templates['images'].items(): urlpath = val.generate(**data).decode('utf-8') urldata = urlfetch(urlpath, info=True, headers=headers) if urldata['content_type'].startswith('image/'): kwargs['images'][cid] = urldata['name'] else: with io.open(urldata['name'], 'rb') as temp_file: bytestoread = 80 first_line = temp_file.read(bytestoread) app_log.error( 'alert: %s: %s: %d (%s) not an image: %s\n%r', name, cid, urldata['r'].status_code, urldata['content_type'], urlpath, first_line) if 'attachments' in templates: kwargs['attachments'] = [ urlfetch(attachment.generate(**data).decode('utf-8'), headers=headers) for attachment in templates['attachments'] ] if callable(callback): return callback(**kwargs) # Email recipient. TODO: run this in a queue. (Anand) mailer.mail(**kwargs) # Log the event event = { 'alert': name, 'service': service, 'from': mailer.email or '', 'to': '', 'cc': '', 'bcc': '', 'subject': '', 'datetime': datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%SZ") } event.update({k: v for k, v in kwargs.items() if k in event}) event['attachments'] = ', '.join(kwargs.get('attachments', [])) alert_logger.info(event) return kwargslist