def test_collector(self): start = time.time() def a(end): while time.time() < end: pass c(time.time() + 0.1) def b(end): while time.time() < end: pass c(time.time() + 0.1) def c(end): while time.time() < end: pass collector = Collector(interval=0.01, mode='prof') collector.start() a(time.time() + 0.1) b(time.time() + 0.2) c(time.time() + 0.3) end = time.time() collector.stop() elapsed = end - start self.assertTrue(0.8 < elapsed < 0.9, elapsed) counts = self.filter_stacks(collector) expected = { ('a', 'test_collector'): 10, ('c', 'a', 'test_collector'): 10, ('b', 'test_collector'): 20, ('c', 'b', 'test_collector'): 10, ('c', 'test_collector'): 30, } self.check_counts(counts, expected) # cost depends on stack depth; for this tiny test I see 40-80usec time_per_sample = float(collector.sample_time) / collector.samples_taken self.assertTrue(time_per_sample < 0.000100, time_per_sample)
def plop_profile(app, seconds = 10, filename = None): if platform == 'win': return if not filename: filename = '%s-%s.plop' % ('client', time.strftime('%Y%m%d-%H%M-%S')) from plop.collector import Collector collector = Collector() @assert_message_queue def stop_profile(): TRACE('Done profiling') collector.stop() if collector.samples_taken: path = os.path.expanduser(u'~/dropbox_profiles') app.safe_makedirs(path) path = os.path.join(path, filename) with open(path, 'w') as f: f.write(repr(dict(collector.stack_counts))) TRACE('profile output saved to %s', path) overhead = float(collector.sample_time) / collector.samples_taken TRACE('overhead was %s per sample (%s%%)', overhead, overhead / collector.interval) app.ui_kit.show_bubble(Bubble(BubbleKind.DEBUG_BUBBLE_LONG, u'Profile data saved to %s. Overhead was %s per sample (%s%%)' % (path, overhead, overhead / collector.interval), u'Love bdarns!')) else: app.ui_kit.show_bubble(Bubble(BubbleKind.DEBUG_BUBBLE_LONG, u'No data collected.', u'Miss bdarns!')) collector.start() TRACE('Profiling for %f seconds', seconds) app.ui_kit.call_later(seconds, stop_profile)
def test_collect_threads(self): start = time.time() def a(end): while time.time() < end: pass def thread1_func(): a(time.time() + 0.2) def thread2_func(): a(time.time() + 0.3) collector = Collector(interval=0.01, mode='prof') collector.start() thread1 = threading.Thread(target=thread1_func) thread2 = threading.Thread(target=thread2_func) thread1.start() thread2.start() a(time.time() + 0.1) while thread1.isAlive(): pass while thread2.isAlive(): pass thread1.join() thread2.join() end = time.time() collector.stop() elapsed = end - start self.assertTrue(0.3 < elapsed < 0.4, elapsed) counts = self.filter_stacks(collector) expected = { ('a', 'test_collect_threads'): 10, ('a', 'thread1_func'): 20, ('a', 'thread2_func'): 30, } self.check_counts(counts, expected)
def prepare(self): starlight.data.reset_statistics() self.did_trigger_update = starlight.update.check_version() super().prepare() if self.get_argument("profile", None) and os.environ.get("ALLOW_PROFILING"): self.collector = Collector() self.collector.start()
def wrapper(*args, **kwargs): log = os.path.join(log_to,log_file) plop = Collector() plop.start() result = function(*args, **kwargs) plop.stop() with open(log, 'a') as f: f.write(repr(dict(plop.stack_counts))) return result
class ProfileHandler(RequestHandler): @asynchronous def get(self): self.collector = Collector() self.collector.start() IOLoop.instance().add_timeout(datetime.timedelta(seconds=60), self.finish_profile) def finish_profile(self): self.collector.stop() formatter = PlopFormatter() self.finish(formatter.format(self.collector))
class ProfileHandler(RequestHandler): @asynchronous def get(self): self.collector = Collector() self.collector.start() IOLoop.instance().add_timeout(datetime.timedelta(seconds=60), self.finish_profile) def finish_profile(self): self.collector.stop() self.finish(repr(dict(self.collector.stack_counts)))
class ProfileHandler(tornado.web.RequestHandler): async def get(self): self.collector = Collector() self.collector.start() return await self.finish_profile() async def finish_profile(self): time.sleep(1) self.collector.stop() formatter = PlopFormatter() self.finish(formatter.format(self.collector))
def initialize(self): self.session = self.application.my_settings.get("db_session")() self.graph = Graph() if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.incr("requests") stats.incr("requests_{}".format(self.__class__.__name__))
def __call__(self, environ, start_response): if not self.should_profile(): return self.application(environ, start_response) collector = Collector() collector.start() start = time.time() try: return self.application(environ, start_response) finally: stop = time.time() collector.stop() try: self.save_data(environ, start, stop, collector) except Exception, e: self.logger.exception(e)
def start_profile(callback): ProfileHandler.waiters.append(callback) if ProfileHandler.profile_running: return logging.info('starting profiler, will run for %ds' % ProfileHandler.DURATION) ProfileHandler.profile_running = True collector = Collector() collector.start(ProfileHandler.DURATION) yield gen.Task(IOLoop.current().add_timeout, datetime.timedelta(seconds=ProfileHandler.DURATION)) collector.stop() data = repr(dict(collector.stack_counts)) for waiter in ProfileHandler.waiters: waiter(data) logging.info('finished profile collection') ProfileHandler.profile_running = False
def prepare(self): starlight.data.reset_statistics() starlight.check_version() super().prepare() if self.get_argument("profile", None) and os.environ.get("ALLOW_PROFILING"): self.collector = Collector() self.collector.start()
class HandlerSyncedWithMaster(tornado.web.RequestHandler): def prepare(self): starlight.data.reset_statistics() starlight.check_version() super().prepare() if self.get_argument("profile", None) and os.environ.get("ALLOW_PROFILING"): self.collector = Collector() self.collector.start() def finish(self, *args, **kw): super().finish(*args, **kw) if self.get_argument("profile", None) and os.environ.get("ALLOW_PROFILING"): self.collector.stop() formatter = PlopFormatter(max_stacks=9001) if self.collector.samples_taken: formatter.store(self.collector, "{0}_{1}.profile".format(self.__class__.__name__, time.time()))
def initialize(self, *args: Any, **kwargs: Any) -> None: self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings[ "template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow()
class HandlerSyncedWithMaster(tornado.web.RequestHandler): def prepare(self): starlight.data.reset_statistics() self.did_trigger_update = starlight.update.check_version() super().prepare() if self.get_argument("profile", None) and os.environ.get("ALLOW_PROFILING"): self.collector = Collector() self.collector.start() def finish(self, *args, **kw): super().finish(*args, **kw) if self.get_argument("profile", None) and os.environ.get("ALLOW_PROFILING"): self.collector.stop() formatter = PlopFormatter(max_stacks=9001) if self.collector.samples_taken: formatter.store(self.collector, "{0}_{1}.profile".format(self.__class__.__name__, time.time()))
def initialize(self, *args, **kwargs): # type: (*Any, **Any) -> None self.graph = Graph() self.session = kwargs["session"]() # type: Session self.template_env = kwargs["template_env"] # type: Environment self.usecase_factory = kwargs[ "usecase_factory"] # type: UseCaseFactory if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.log_rate("requests", 1) stats.log_rate("requests_{}".format(self.__class__.__name__), 1)
def disabled_test_collect_threads(self): start = time.time() def a(end): while time.time() < end: pass def thread1_func(): a(time.time() + 0.2) def thread2_func(): a(time.time() + 0.3) collector = Collector(interval=0.01, mode='prof') collector.start() thread1 = threading.Thread(target=thread1_func) thread2 = threading.Thread(target=thread2_func) thread1.start() thread2.start() a(time.time() + 0.1) while thread1.isAlive(): pass while thread2.isAlive(): pass thread1.join() thread2.join() end = time.time() collector.stop() elapsed = end - start self.assertTrue(0.3 < elapsed < 0.4, elapsed) counts = self.filter_stacks(collector) expected = { ('a', 'test_collect_threads'): 10, ('a', 'thread1_func'): 20, ('a', 'thread2_func'): 30, } self.check_counts(counts, expected)
class PlopProfileHandler(RequestHandler): def __init__(self, application, request, **kwargs): super(PlopProfileHandler, self).__init__(application, request, **kwargs) self.interval = 60 self.output = '/tmp/plop.out' self.collector = Collector() @asynchronous def get(self): self.interval = int(self.get_argument('interval', self.interval)) self.output = self.get_argument('output', self.output) self.collector.start() tornado.ioloop.IOLoop.instance().add_timeout(datetime.timedelta( seconds=self.interval), self.finish_profile) def finish_profile(self): log.debug('stop profile using interval={0} and output={1}'.format( self.interval, self.output)) self.collector.stop() with open(self.output, 'w') as f: stats = repr(dict(self.collector.stack_counts)) f.write(stats) self.finish(stats)
class PlopProfileHandler(RequestHandler): def __init__(self, application, request, **kwargs): super(PlopProfileHandler, self).__init__(application, request, **kwargs) self.interval = 60 self.output = '/tmp/plop.out' self.collector = Collector() @asynchronous def get(self): self.interval = int(self.get_argument('interval', self.interval)) self.output = self.get_argument('output', self.output) self.collector.start() tornado.ioloop.IOLoop.instance().add_timeout( datetime.timedelta(seconds=self.interval), self.finish_profile) def finish_profile(self): log.debug('stop profile using interval={0} and output={1}'.format( self.interval, self.output)) self.collector.stop() with open(self.output, 'w') as f: stats = repr(dict(self.collector.stack_counts)) f.write(stats) self.finish(stats)
def initialize(self, *args, **kwargs): # type: (*Any, **Any) -> None self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings[ "template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.log_rate("requests", 1) stats.log_rate("requests_{}".format(self.__class__.__name__), 1) logging.error("initialized")
def __call__(self, environ, start_response): if not self.should_profile(): return self.application(environ, start_response) collector = Collector() collector.start() start = time.time() try: return self.application(environ, start_response) finally: stop = time.time() collector.stop() try: self.save_data(environ, start, stop, collector) except Exception as e: self.logger.exception(e)
def initialize(self, *args, **kwargs): # type: (*Any, **Any) -> None self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings["template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory ) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.log_rate("requests", 1) stats.log_rate("requests_{}".format(self.__class__.__name__), 1)
class GrouperHandler(SentryHandler): def initialize(self, *args, **kwargs): # type: (*Any, **Any) -> None self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings["template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory ) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.log_rate("requests", 1) stats.log_rate("requests_{}".format(self.__class__.__name__), 1) def set_default_headers(self): # type: () -> None self.set_header("Content-Security-Policy", self.settings["template_engine"].csp_header()) self.set_header("Referrer-Policy", "same-origin") def write_error(self, status_code, **kwargs): # type: (int, **Any) -> None """Override for custom error page.""" message = kwargs.get("message", "Unknown error") if status_code >= 500 and status_code < 600: template = self.template_engine.get_template("errors/5xx.html") self.write( template.render({"is_active": self.is_active, "static_url": self.static_url}) ) else: template = self.template_engine.get_template("errors/generic.html") self.write( template.render( { "status_code": status_code, "message": message, "is_active": self.is_active, "trace_uuid": self.perf_trace_uuid, "static_url": self.static_url, } ) ) self.finish() def is_refresh(self): # type: () -> bool """Indicates whether the refresh argument for this handler has been set to yes. This is used to force a refresh of the cached graph so that we don't show inconsistent state to the user. Returns: a boolean indicating whether this handler should refresh the graph """ return self.get_argument("refresh", "no").lower() == "yes" # The refresh argument can be added to any page. If the handler for that # route calls this function, it will sync its graph from the database if # requested. def handle_refresh(self): # type: () -> None if self.is_refresh(): self.graph.update_from_db(self.session) def redirect(self, url, *args, **kwargs): # type: (str, *Any, **Any) -> None if self.is_refresh(): url = urljoin(url, "?refresh=yes") alerts = kwargs.pop("alerts", []) # type: List[Alert] self.set_alerts(alerts) super(GrouperHandler, self).redirect(url, *args, **kwargs) def get_or_create_user(self, username): # type: (str) -> Optional[User] """Retrieve or create the User object for the authenticated user. This is done in a separate method called by prepare instead of in the magic Tornado get_current_user method because exceptions thrown by the latter are caught by Tornado and not propagated to the caller, and we want to use exceptions to handle invalid users and then return an error page in prepare. """ if not username: return None # Users must be fully qualified if not re.match("^{}$".format(USERNAME_VALIDATION), username): raise InvalidUser("{} does not match {}".format(username, USERNAME_VALIDATION)) # User must exist in the database and be active try: user, created = User.get_or_create(self.session, username=username) if created: logging.info("Created new user %s", username) self.session.commit() # Because the graph doesn't initialize until the updates table # is populated, we need to refresh the graph here in case this # is the first update. self.graph.update_from_db(self.session) except sqlalchemy.exc.OperationalError: # Failed to connect to database or create user, try to reconfigure the db. This invokes # the fetcher to try to see if our URL string has changed. Session.configure(bind=get_db_engine(settings().database)) raise DatabaseFailure() # service accounts are, by definition, not interactive users if user.is_service_account: raise InvalidUser("{} is a service account".format(username)) return user def prepare(self): # type: () -> None username = self.request.headers.get(settings().user_auth_header) try: user = self.get_or_create_user(username) except InvalidUser as e: self.baduser(str(e)) self.finish() return if user and user.enabled: self.current_user = user else: self.baduser("{} is not an active account".format(username)) self.finish() def on_finish(self): # type: () -> None if self.perf_collector: self.perf_collector.stop() record_trace(self.session, self.perf_collector, self.perf_trace_uuid) self.session.close() # log request duration duration = datetime.utcnow() - self._request_start_time duration_ms = int(duration.total_seconds() * 1000) stats.log_rate("duration_ms", duration_ms) stats.log_rate("duration_ms_{}".format(self.__class__.__name__), duration_ms) # log response status code response_status = self.get_status() stats.log_rate("response_status_{}".format(response_status), 1) stats.log_rate("response_status_{}_{}".format(self.__class__.__name__, response_status), 1) def update_qs(self, **kwargs): # type: (**Any) -> str qs = self.request.arguments.copy() qs.update(kwargs) return "?" + urlencode(sorted(qs.items()), True) def is_active(self, test_path): # type: (str) -> str path = self.request.path if path == test_path: return "active" return "" def get_template_namespace(self): # type: () -> Dict[str, Any] namespace = super(GrouperHandler, self).get_template_namespace() namespace.update( { "update_qs": self.update_qs, "is_active": self.is_active, "perf_trace_uuid": self.perf_trace_uuid, "xsrf_form": self.xsrf_form_html, "alerts": self.get_alerts(), "static_url": self.static_url, } ) return namespace def render_template(self, template_name, **kwargs): # type: (str, **Any) -> Text template = self.template_engine.get_template(template_name) content = template.render(kwargs) return content def render(self, template_name, **kwargs): # type: (str, **Any) -> None defaults = self.get_template_namespace() context = {} context.update(defaults) context.update(kwargs) # Merge alerts context["alerts"] = [] context["alerts"].extend(defaults.get("alerts", [])) context["alerts"].extend(kwargs.get("alerts", [])) self.write(self.render_template(template_name, **context)) def set_alerts(self, alerts): # type: (List[Alert]) -> None if len(alerts) > 0: self.set_cookie("_alerts", _serialize_alerts(alerts)) else: self.clear_cookie("_alerts") def get_alerts(self): # type: () -> List[Alert] serialized_alerts = self.get_cookie("_alerts", default="[]") alerts = _deserialize_alerts(serialized_alerts) self.clear_cookie("_alerts") return alerts def get_form_alerts(self, errors): # type: (Dict[str, List[str]]) -> List[Alert] alerts = [] for field, field_errors in iteritems(errors): for error in field_errors: alerts.append(Alert("danger", error, field)) return alerts def raise_and_log_exception(self, exc): # type: (Exception) -> None try: raise exc except Exception: self.log_exception(*sys.exc_info()) def log_message(self, message, **kwargs): # type: (str, **Any) -> None if getattr(self, "captureMessage", None): self.captureMessage(message, **kwargs) else: logging.info("{}, kwargs={}".format(message, kwargs)) def badrequest(self): # type: () -> None self.set_status(400) self.raise_and_log_exception(tornado.web.HTTPError(400)) self.render("errors/badrequest.html") def baduser(self, message): # type: (str) -> None self.set_status(403) self.raise_and_log_exception(tornado.web.HTTPError(403)) how_to_get_help = settings().how_to_get_help self.render("errors/baduser.html", message=message, how_to_get_help=how_to_get_help) def forbidden(self): # type: () -> None self.set_status(403) self.raise_and_log_exception(tornado.web.HTTPError(403)) self.render("errors/forbidden.html", how_to_get_help=settings().how_to_get_help) def notfound(self): # type: () -> None self.set_status(404) self.raise_and_log_exception(tornado.web.HTTPError(404)) self.render("errors/notfound.html") def get_sentry_user_info(self): # type: () -> Dict[str, Optional[str]] if self.current_user: return {"username": self.current_user.username} else: return {"username": None}
class GrouperHandler(RequestHandler): def initialize(self, *args: Any, **kwargs: Any) -> None: self.graph = Graph() self.session = self.settings["session"]() # type: Session self.template_engine = self.settings[ "template_engine"] # type: FrontendTemplateEngine self.plugins = get_plugin_proxy() session_factory = SingletonSessionFactory(self.session) self.usecase_factory = create_graph_usecase_factory( settings(), self.plugins, session_factory) if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) # type: Optional[str] self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() def set_default_headers(self) -> None: self.set_header("Content-Security-Policy", self.settings["template_engine"].csp_header()) self.set_header("Referrer-Policy", "same-origin") def log_exception( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: if isinstance(exc_value, HTTPError): status_code = exc_value.status_code else: status_code = 500 self.plugins.log_exception(self.request, status_code, exc_type, exc_value, exc_tb) super().log_exception(exc_type, exc_value, exc_tb) def write_error(self, status_code: int, **kwargs: Any) -> None: """Override for custom error page.""" message = kwargs.get("message", "Unknown error") if status_code >= 500 and status_code < 600: template = self.template_engine.get_template("errors/5xx.html") self.write( template.render({ "is_active": self.is_active, "static_url": self.static_url })) else: template = self.template_engine.get_template("errors/generic.html") self.write( template.render({ "status_code": status_code, "message": message, "is_active": self.is_active, "trace_uuid": self.perf_trace_uuid, "static_url": self.static_url, })) self.finish() def is_refresh(self) -> bool: """Indicates whether the refresh argument for this handler has been set to yes. This is used to force a refresh of the cached graph so that we don't show inconsistent state to the user. Returns: a boolean indicating whether this handler should refresh the graph """ return self.get_argument("refresh", "no").lower() == "yes" # The refresh argument can be added to any page. If the handler for that # route calls this function, it will sync its graph from the database if # requested. def handle_refresh(self) -> None: if self.is_refresh(): self.graph.update_from_db(self.session) def redirect(self, url: str, *args: Any, **kwargs: Any) -> None: if self.is_refresh(): url = urljoin(url, "?refresh=yes") alerts = kwargs.pop("alerts", []) # type: List[Alert] self.set_alerts(alerts) super().redirect(url, *args, **kwargs) def get_or_create_user(self, username: str) -> Optional[User]: """Retrieve or create the User object for the authenticated user. This is done in a separate method called by prepare instead of in the magic Tornado get_current_user method because exceptions thrown by the latter are caught by Tornado and not propagated to the caller, and we want to use exceptions to handle invalid users and then return an error page in prepare. """ if not username: return None # Users must be fully qualified if not re.match("^{}$".format(USERNAME_VALIDATION), username): raise InvalidUser("{} does not match {}".format( username, USERNAME_VALIDATION)) # User must exist in the database and be active user, created = User.get_or_create(self.session, username=username) if created: logging.info("Created new user %s", username) self.session.commit() # Because the graph doesn't initialize until the updates table is populated, we need to # refresh the graph here in case this is the first update. self.graph.update_from_db(self.session) # service accounts are, by definition, not interactive users if user.is_service_account: raise InvalidUser("{} is a service account".format(username)) return user def get_path_argument(self, name: str) -> str: """Get a URL path argument. Parallel to get_request_argument() and get_body_argument(), this uses path_kwargs to find an argument to the handler, undo any URL quoting, and return it. Use this uniformly instead of kwargs for all handler get() and post() methods to handle escaping properly. """ value: str = self.path_kwargs[name] return unquote(value) def prepare(self) -> None: username = self.request.headers.get(settings().user_auth_header) try: user = self.get_or_create_user(username) except InvalidUser as e: self.baduser(str(e)) self.finish() return if user and user.enabled: self.current_user = user else: self.baduser("{} is not an active account".format(username)) self.finish() def on_finish(self) -> None: if self.perf_collector: self.perf_collector.stop() record_trace(self.session, self.perf_collector, self.perf_trace_uuid) self.session.close() handler = self.__class__.__name__ duration_ms = int( (datetime.utcnow() - self._request_start_time).total_seconds() * 1000) response_status = self.get_status() self.plugins.log_request(handler, response_status, duration_ms) def update_qs(self, **kwargs: Any) -> str: qs = self.request.arguments.copy() qs.update(kwargs) return "?" + urlencode(sorted(qs.items()), True) def is_active(self, test_path: str) -> str: path = self.request.path if path == test_path: return "active" return "" def get_template_namespace(self) -> Dict[str, Any]: namespace = super().get_template_namespace() namespace.update({ "alerts": self.get_alerts(), "is_active": self.is_active, "static_url": self.static_url, "perf_trace_uuid": self.perf_trace_uuid, "update_qs": self.update_qs, "xsrf_form": self.xsrf_form_html, }) return namespace def render_template(self, template_name: str, **kwargs: Any) -> str: template = self.template_engine.get_template(template_name) content = template.render(kwargs) return content def render(self, template_name: str, **kwargs: Any) -> None: defaults = self.get_template_namespace() context = {} context.update(defaults) context.update(kwargs) # Merge alerts context["alerts"] = [] context["alerts"].extend(defaults.get("alerts", [])) context["alerts"].extend(kwargs.get("alerts", [])) self.write(self.render_template(template_name, **context)) def render_template_class( self, template: BaseTemplate, alerts: Optional[List[Alert]] = None) -> Future[None]: return self.finish(template.render(self, alerts)) def set_alerts(self, alerts: Sequence[Alert]) -> None: if len(alerts) > 0: self.set_cookie("_alerts", _serialize_alerts(alerts)) else: self.clear_cookie("_alerts") def get_alerts(self) -> List[Alert]: serialized_alerts = self.get_cookie("_alerts", default="[]") alerts = _deserialize_alerts(serialized_alerts) self.clear_cookie("_alerts") return alerts def get_form_alerts(self, errors: Dict[str, List[str]]) -> List[Alert]: alerts = [] for field, field_errors in errors.items(): for error in field_errors: alerts.append(Alert("danger", error, field)) return alerts def raise_and_log_exception(self, exc: Exception) -> None: try: raise exc except Exception: self.log_exception(*sys.exc_info()) def log_message(self, message: str, **kwargs: Any) -> None: logging.info("{}, kwargs={}".format(message, kwargs)) def badrequest(self) -> None: self.set_status(400) self.raise_and_log_exception(HTTPError(400)) self.render("errors/badrequest.html") def baduser(self, message) -> None: self.set_status(403) self.raise_and_log_exception(HTTPError(403)) how_to_get_help = settings().how_to_get_help self.render("errors/baduser.html", message=message, how_to_get_help=how_to_get_help) def forbidden(self) -> None: self.set_status(403) self.raise_and_log_exception(HTTPError(403)) self.render("errors/forbidden.html", how_to_get_help=settings().how_to_get_help) def notfound(self) -> None: self.set_status(404) self.raise_and_log_exception(HTTPError(404)) self.render("errors/notfound.html")
class GrouperHandler(RequestHandler): def initialize(self): self.session = self.application.my_settings.get("db_session")() self.graph = Graph() if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.incr("requests") stats.incr("requests_{}".format(self.__class__.__name__)) def write_error(self, status_code, **kwargs): """Override for custom error page.""" if status_code >= 500 and status_code < 600: template = self.application.my_settings[ "template_env"].get_template("errors/5xx.html") self.write(template.render({"is_active": self.is_active})) else: template = self.application.my_settings[ "template_env"].get_template("errors/generic.html") self.write( template.render({ "status_code": status_code, "message": self._reason, "is_active": self.is_active, "trace_uuid": self.perf_trace_uuid, })) self.finish() def is_refresh(self): # type: () -> bool """Indicates whether the refresh argument for this handler has been set to yes. This is used to force a refresh of the cached graph so that we don't show inconsistent state to the user. Returns: a boolean indicating whether this handler should refresh the graph """ return self.get_argument("refresh", "no").lower() == "yes" # The refresh argument can be added to any page. If the handler for that # route calls this function, it will sync its graph from the database if # requested. def handle_refresh(self): if self.is_refresh(): self.graph.update_from_db(self.session) def redirect(self, url, *args, **kwargs): if self.is_refresh(): url = urlparse.urljoin(url, "?refresh=yes") self.set_alerts(kwargs.pop("alerts", [])) return super(GrouperHandler, self).redirect(url, *args, **kwargs) def get_current_user(self): username = self.request.headers.get(settings.user_auth_header) if not username: return # Users must be fully qualified if not re.match("^{}$".format(USERNAME_VALIDATION), username): raise InvalidUser() try: user, created = User.get_or_create(self.session, username=username) if created: logging.info("Created new user %s", username) self.session.commit() # Because the graph doesn't initialize until the updates table # is populated, we need to refresh the graph here in case this # is the first update. self.graph.update_from_db(self.session) except sqlalchemy.exc.OperationalError: # Failed to connect to database or create user, try to reconfigure the db. This invokes # the fetcher to try to see if our URL string has changed. Session.configure(bind=get_db_engine(get_database_url(settings))) raise DatabaseFailure() return user def prepare(self): if not self.current_user or not self.current_user.enabled: self.forbidden() self.finish() return def on_finish(self): if self.perf_collector: self.perf_collector.stop() perf_profile.record_trace(self.session, self.perf_collector, self.perf_trace_uuid) self.session.close() # log request duration duration = datetime.utcnow() - self._request_start_time duration_ms = int(duration.total_seconds() * 1000) stats.incr("duration_ms", duration_ms) stats.incr("duration_ms_{}".format(self.__class__.__name__), duration_ms) # log response status code response_status = self.get_status() stats.incr("response_status_{}".format(response_status)) stats.incr("response_status_{}_{}".format(self.__class__.__name__, response_status)) def update_qs(self, **kwargs): qs = self.request.arguments.copy() qs.update(kwargs) return "?" + urllib.urlencode(qs, True) def is_active(self, test_path): path = self.request.path if path == test_path: return "active" return "" def get_template_namespace(self): namespace = super(GrouperHandler, self).get_template_namespace() namespace.update({ "update_qs": self.update_qs, "is_active": self.is_active, "perf_trace_uuid": self.perf_trace_uuid, "xsrf_form": self.xsrf_form_html, "alerts": self.get_alerts(), }) return namespace def render_template(self, template_name, **kwargs): template = self.application.my_settings["template_env"].get_template( template_name) content = template.render(kwargs) return content def render(self, template_name, **kwargs): defaults = self.get_template_namespace() context = {} context.update(defaults) context.update(kwargs) # Merge alerts context["alerts"] = defaults.get("alerts", []) + kwargs.get( "alerts", []) self.write(self.render_template(template_name, **context)) def set_alerts(self, alerts): # type: (List[Alert]) -> None if len(alerts) > 0: self.set_cookie("_alerts", _serialize_alerts(alerts)) else: self.clear_cookie("_alerts") def get_alerts(self): # type: () -> List[Alert] serialized_alerts = self.get_cookie("_alerts", default="[]") alerts = _deserialize_alerts(serialized_alerts) self.clear_cookie("_alerts") return alerts def get_form_alerts(self, errors): alerts = [] for field, field_errors in errors.items(): for error in field_errors: alerts.append(Alert("danger", error, field)) return alerts def raise_and_log_exception(self, exc): try: raise exc except Exception: self.log_exception(*sys.exc_info()) def log_message(self, message, **kwargs): if self.captureMessage: self.captureMessage(message, **kwargs) else: logging.info("{}, kwargs={}".format(message, kwargs)) # TODO(gary): Add json error responses. def badrequest(self, format_type=None): self.set_status(400) self.raise_and_log_exception(tornado.web.HTTPError(400)) self.render("errors/badrequest.html") def forbidden(self, format_type=None): self.set_status(403) self.raise_and_log_exception(tornado.web.HTTPError(403)) self.render("errors/forbidden.html") def notfound(self, format_type=None): self.set_status(404) self.raise_and_log_exception(tornado.web.HTTPError(404)) self.render("errors/notfound.html") def get_sentry_user_info(self): user = self.get_current_user() return { 'username': user.username, }
def __init__(self, application, request, **kwargs): super(PlopProfileHandler, self).__init__(application, request, **kwargs) self.interval = 60 self.output = '/tmp/plop.out' self.collector = Collector()
def get(self): self.collector = Collector() self.collector.start() IOLoop.instance().add_timeout(datetime.timedelta(seconds=60), self.finish_profile)
def get_and_run_plop_collector(): from plop.collector import Collector collector = Collector() collector.start() return collector
async def get(self): self.collector = Collector() self.collector.start() return await self.finish_profile()
def main(): """ Set up environment for processing """ import argparse parser = argparse.ArgumentParser(description='X12 Validation') parser.add_argument('--config-file', '-c', action='store', dest="configfile", default=None) parser.add_argument( '--log-file', '-l', action='store', dest="logfile", default=None) #parser.add_argument( # '--map-path', '-m', action='store', dest="map_path", default=None) parser.add_argument('--verbose', '-v', action='count') parser.add_argument('--debug', '-d', action='store_true') parser.add_argument('--quiet', '-q', action='store_true') parser.add_argument('--html', '-H', action='store_true') parser.add_argument('--exclude-external-codes', '-x', action='append', dest="exclude_external", default=[], help='External Code Names to ignore') parser.add_argument('--charset', '-s', choices=( 'b', 'e'), help='Specify X12 character set: b=basic, e=extended') #parser.add_argument('--background', '-b', action='store_true') #parser.add_argument('--test', '-t', action='store_true') parser.add_argument('--profile', action='store_true', help='Profile the code with plop') parser.add_argument('--version', action='version', version='{prog} {version}'.format(prog=parser.prog, version=__version__)) parser.add_argument('input_files', nargs='*') args = parser.parse_args() logger = logging.getLogger('pyx12') formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') stdout_hdlr = logging.StreamHandler() stdout_hdlr.setFormatter(formatter) logger.addHandler(stdout_hdlr) logger.setLevel(logging.INFO) param = pyx12.params.params(args.configfile) if args.debug: logger.setLevel(logging.DEBUG) param.set('debug', True) if args.verbose > 0: logger.setLevel(logging.DEBUG) if args.quiet: logger.setLevel(logging.ERROR) fd_997 = None fd_html = None flag_997 = True param.set('exclude_external_codes', ','.join(args.exclude_external)) #if args.map_path: # param.set('map_path', args.map_path) if args.logfile: try: hdlr = logging.FileHandler(args.logfile) hdlr.setFormatter(formatter) logger.addHandler(hdlr) except IOError: logger.exception('Could not open log file: %s' % (args.logfile)) for src_filename in args.input_files: try: if not os.path.isfile(src_filename): logger.error('Could not open file "%s"' % (src_filename)) continue #fd_src = open(src_filename, 'U') if flag_997: fd_997 = tempfile.TemporaryFile() if args.html: if os.path.splitext(src_filename)[1] == '.txt': target_html = os.path.splitext(src_filename)[0] + '.html' else: target_html = src_filename + '.html' fd_html = open(target_html, 'w') if args.profile: from plop.collector import Collector p = Collector() p.start() if pyx12.x12n_document.x12n_document( param=param, src_file=src_filename, fd_997=fd_997, fd_html=fd_html, fd_xmldoc=None): sys.stderr.write('%s: OK\n' % (src_filename)) else: sys.stderr.write('%s: Failure\n' % (src_filename)) #import profile #prof_str = 'pyx12.x12n_document.x12n_document(param, src_filename, ' \ # + 'fd_997, fd_html, None, xslt_files)' #print prof_str #print param #profile.run(prof_str, 'pyx12.prof') p.stop() try: pfile = os.path.splitext(os.path.basename( src_filename))[0] + '.plop.out' pfull = os.path.join(os.path.expanduser( '~/.plop.profiles'), pfile) print(pfull) with open(pfull, 'w') as fdp: fdp.write(repr(dict(p.stack_counts))) except Exception: logger.exception('Failed to write profile data') sys.stderr.write('%s: bad profile save\n' % (src_filename)) else: if pyx12.x12n_document.x12n_document( param=param, src_file=src_filename, fd_997=fd_997, fd_html=fd_html, fd_xmldoc=None): sys.stderr.write('%s: OK\n' % (src_filename)) else: sys.stderr.write('%s: Failure\n' % (src_filename)) if flag_997 and fd_997.tell() != 0: fd_997.seek(0) if os.path.splitext(src_filename)[1] == '.txt': target_997 = os.path.splitext(src_filename)[0] + '.997' else: target_997 = src_filename + '.997' codecs.open(target_997, mode='w', encoding='ascii').write(fd_997.read()) if fd_997: fd_997.close() if fd_html: fd_html.close() except IOError: logger.exception('Could not open files') return False except KeyboardInterrupt: print("\n[interrupt]") return True
def main(): """ Set up environment for processing """ parser = argparse.ArgumentParser(description='X12 Validation') parser.add_argument('--config-file', '-c', action='store', dest="configfile", default=None) parser.add_argument('--log-file', '-l', action='store', dest="logfile", default=None) parser.add_argument('--map-path', '-m', action='store', dest="map_path", default=None, type=check_map_path_arg) parser.add_argument('--verbose', '-v', action='count') parser.add_argument('--debug', '-d', action='store_true') parser.add_argument('--quiet', '-q', action='store_true') parser.add_argument('--html', '-H', action='store_true') parser.add_argument('--exclude-external-codes', '-x', action='append', dest="exclude_external", default=[], help='External Code Names to ignore') parser.add_argument('--charset', '-s', choices=('b', 'e'), help='Specify X12 character set: b=basic, e=extended') #parser.add_argument('--background', '-b', action='store_true') #parser.add_argument('--test', '-t', action='store_true') parser.add_argument('--profile', action='store_true', help='Profile the code with plop') parser.add_argument('input_files', nargs='*') args = parser.parse_args() logger = logging.getLogger('pyx12') formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') stdout_hdlr = logging.StreamHandler() stdout_hdlr.setFormatter(formatter) logger.addHandler(stdout_hdlr) logger.setLevel(logging.INFO) param = params.params(args.configfile) if args.debug: logger.setLevel(logging.DEBUG) param.set('debug', True) if args.verbose and args.verbose > 0: logger.setLevel(logging.DEBUG) if args.quiet: logger.setLevel(logging.ERROR) fd_997 = None fd_html = None flag_997 = True param.set('exclude_external_codes', ','.join(args.exclude_external)) if args.map_path: param.set('map_path', args.map_path) if args.logfile: try: hdlr = logging.FileHandler(args.logfile) hdlr.setFormatter(formatter) logger.addHandler(hdlr) except IOError: logger.exception('Could not open log file: %s' % (args.logfile)) for src_filename in args.input_files: try: if not os.path.isfile(src_filename): logger.error('Could not open file "%s"' % (src_filename)) continue #fd_src = open(src_filename, 'U') if flag_997: fd_997 = tempfile.TemporaryFile() if args.html: if os.path.splitext(src_filename)[1] == '.txt': target_html = os.path.splitext(src_filename)[0] + '.html' else: target_html = src_filename + '.html' fd_html = open(target_html, 'w') if args.profile: from plop.collector import Collector p = Collector() p.start() if x12n_document.x12n_document(param=param, src_file=src_filename, fd_997=fd_997, fd_html=fd_html, fd_xmldoc=None, map_path=args.map_path): sys.stderr.write('%s: OK\n' % (src_filename)) else: sys.stderr.write('%s: Failure\n' % (src_filename)) #import profile #prof_str = 'pyx12.x12n_document.x12n_document(param, src_filename, ' \ # + 'fd_997, fd_html, None, None)' #print prof_str #print param #profile.run(prof_str, 'pyx12.prof') p.stop() try: pfile = os.path.splitext( os.path.basename(src_filename))[0] + '.plop.out' pfull = os.path.join( os.path.expanduser('~/.plop.profiles'), pfile) print(pfull) with open(pfull, 'w') as fdp: fdp.write(repr(dict(p.stack_counts))) except Exception: logger.exception('Failed to write profile data') sys.stderr.write('%s: bad profile save\n' % (src_filename)) else: if x12n_document.x12n_document(param=param, src_file=src_filename, fd_997=fd_997, fd_html=fd_html, fd_xmldoc=None, map_path=args.map_path): sys.stderr.write('%s: OK\n' % (src_filename)) else: sys.stderr.write('%s: Failure\n' % (src_filename)) if flag_997 and fd_997.tell() != 0: fd_997.seek(0) if os.path.splitext(src_filename)[1] == '.txt': target_997 = os.path.splitext(src_filename)[0] + '.997' else: target_997 = src_filename + '.997' codecs.open(target_997, mode='w', encoding='ascii').write(fd_997.read()) if fd_997: fd_997.close() if fd_html: fd_html.close() except IOError: logger.exception('Could not open files') return False except KeyboardInterrupt: print("\n[interrupt]") return True
class GrouperHandler(RequestHandler): def initialize(self): self.session = self.application.my_settings.get("db_session")() self.graph = Graph() if self.get_argument("_profile", False): self.perf_collector = Collector() self.perf_trace_uuid = str(uuid4()) self.perf_collector.start() else: self.perf_collector = None self.perf_trace_uuid = None self._request_start_time = datetime.utcnow() stats.incr("requests") stats.incr("requests_{}".format(self.__class__.__name__)) def write_error(self, status_code, **kwargs): """Override for custom error page.""" if status_code >= 500 and status_code < 600: template = self.application.my_settings["template_env"].get_template("errors/5xx.html") self.write(template.render({"is_active": self.is_active})) else: template = self.application.my_settings["template_env"].get_template("errors/generic.html") self.write( template.render( { "status_code": status_code, "message": self._reason, "is_active": self.is_active, "trace_uuid": self.perf_trace_uuid, } ) ) self.finish() def is_refresh(self): # type: () -> bool """Indicates whether the refresh argument for this handler has been set to yes. This is used to force a refresh of the cached graph so that we don't show inconsistent state to the user. Returns: a boolean indicating whether this handler should refresh the graph """ return self.get_argument("refresh", "no").lower() == "yes" # The refresh argument can be added to any page. If the handler for that # route calls this function, it will sync its graph from the database if # requested. def handle_refresh(self): if self.is_refresh(): self.graph.update_from_db(self.session) def redirect(self, url, *args, **kwargs): if self.is_refresh(): url = urlparse.urljoin(url, "?refresh=yes") return super(GrouperHandler, self).redirect(url, *args, **kwargs) def get_current_user(self): username = self.request.headers.get(settings.user_auth_header) if not username: return # Users must be fully qualified if not re.match("^{}$".format(USERNAME_VALIDATION), username): raise InvalidUser() try: user, created = User.get_or_create(self.session, username=username) if created: logging.info("Created new user %s", username) self.session.commit() # Because the graph doesn't initialize until the updates table # is populated, we need to refresh the graph here in case this # is the first update. self.graph.update_from_db(self.session) except sqlalchemy.exc.OperationalError: # Failed to connect to database or create user, try to reconfigure the db. This invokes # the fetcher to try to see if our URL string has changed. Session.configure(bind=get_db_engine(get_database_url(settings))) raise DatabaseFailure() return user def prepare(self): if not self.current_user or not self.current_user.enabled: self.forbidden() self.finish() return def on_finish(self): if self.perf_collector: self.perf_collector.stop() perf_profile.record_trace(self.session, self.perf_collector, self.perf_trace_uuid) self.session.close() # log request duration duration = datetime.utcnow() - self._request_start_time duration_ms = int(duration.total_seconds() * 1000) stats.incr("duration_ms", duration_ms) stats.incr("duration_ms_{}".format(self.__class__.__name__), duration_ms) # log response status code response_status = self.get_status() stats.incr("response_status_{}".format(response_status)) stats.incr("response_status_{}_{}".format(self.__class__.__name__, response_status)) def update_qs(self, **kwargs): qs = self.request.arguments.copy() qs.update(kwargs) return "?" + urllib.urlencode(qs, True) def is_active(self, test_path): path = self.request.path if path == test_path: return "active" return "" def get_template_namespace(self): namespace = super(GrouperHandler, self).get_template_namespace() namespace.update( { "update_qs": self.update_qs, "is_active": self.is_active, "perf_trace_uuid": self.perf_trace_uuid, "xsrf_form": self.xsrf_form_html, "alerts": [], } ) return namespace def render_template(self, template_name, **kwargs): template = self.application.my_settings["template_env"].get_template(template_name) content = template.render(kwargs) return content def render(self, template_name, **kwargs): context = {} context.update(self.get_template_namespace()) context.update(kwargs) self.write(self.render_template(template_name, **context)) def get_form_alerts(self, errors): alerts = [] for field, field_errors in errors.items(): for error in field_errors: alerts.append(Alert("danger", error, field)) return alerts def raise_and_log_exception(self, exc): try: raise exc except Exception: self.log_exception(*sys.exc_info()) def log_message(self, message, **kwargs): if self.captureMessage: self.captureMessage(message, **kwargs) else: logging.info("{}, kwargs={}".format(message, kwargs)) # TODO(gary): Add json error responses. def badrequest(self, format_type=None): self.set_status(400) self.raise_and_log_exception(tornado.web.HTTPError(400)) self.render("errors/badrequest.html") def forbidden(self, format_type=None): self.set_status(403) self.raise_and_log_exception(tornado.web.HTTPError(403)) self.render("errors/forbidden.html") def notfound(self, format_type=None): self.set_status(404) self.raise_and_log_exception(tornado.web.HTTPError(404)) self.render("errors/notfound.html") def get_sentry_user_info(self): user = self.get_current_user() return {"username": user.name}
def process_view(self, request, view_func, _, __): 'process a single view, adding the collector' request.collector = Collector() request.collector.start() request.plop_filename = self.get_filename(view_func)
def main(): """ Set up environment for processing """ parser = argparse.ArgumentParser(description="X12 Validation") parser.add_argument("--config-file", "-c", action="store", dest="configfile", default=None) parser.add_argument("--log-file", "-l", action="store", dest="logfile", default=None) parser.add_argument("--map-path", "-m", action="store", dest="map_path", default=None, type=check_map_path_arg) parser.add_argument("--verbose", "-v", action="count") parser.add_argument("--debug", "-d", action="store_true") parser.add_argument("--quiet", "-q", action="store_true") parser.add_argument("--html", "-H", action="store_true") parser.add_argument( "--exclude-external-codes", "-x", action="append", dest="exclude_external", default=[], help="External Code Names to ignore", ) parser.add_argument("--charset", "-s", choices=("b", "e"), help="Specify X12 character set: b=basic, e=extended") # parser.add_argument('--background', '-b', action='store_true') # parser.add_argument('--test', '-t', action='store_true') parser.add_argument("--profile", action="store_true", help="Profile the code with plop") parser.add_argument( "--version", action="version", version="{prog} {version}".format(prog=parser.prog, version=__version__) ) parser.add_argument("input_files", nargs="*") args = parser.parse_args() logger = logging.getLogger("pyx12") formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s") stdout_hdlr = logging.StreamHandler() stdout_hdlr.setFormatter(formatter) logger.addHandler(stdout_hdlr) logger.setLevel(logging.INFO) param = pyx12.params.params(args.configfile) if args.debug: logger.setLevel(logging.DEBUG) param.set("debug", True) if args.verbose > 0: logger.setLevel(logging.DEBUG) if args.quiet: logger.setLevel(logging.ERROR) fd_997 = None fd_html = None flag_997 = True param.set("exclude_external_codes", ",".join(args.exclude_external)) if args.map_path: param.set("map_path", args.map_path) if args.logfile: try: hdlr = logging.FileHandler(args.logfile) hdlr.setFormatter(formatter) logger.addHandler(hdlr) except IOError: logger.exception("Could not open log file: %s" % (args.logfile)) for src_filename in args.input_files: try: if not os.path.isfile(src_filename): logger.error('Could not open file "%s"' % (src_filename)) continue if flag_997: fd_997 = tempfile.TemporaryFile() if args.html: if os.path.splitext(src_filename)[1] == ".txt": target_html = os.path.splitext(src_filename)[0] + ".html" else: target_html = src_filename + ".html" fd_html = open(target_html, "w") if args.profile: from plop.collector import Collector p = Collector() p.start() if pyx12.x12n_document.x12n_document( param=param, src_file=src_filename, fd_997=fd_997, fd_html=fd_html, fd_xmldoc=None, map_path=args.map_path, ): sys.stderr.write("%s: OK\n" % (src_filename)) else: sys.stderr.write("%s: Failure\n" % (src_filename)) # import profile # prof_str = 'pyx12.x12n_document.x12n_document(param, src_filename, ' \ # + 'fd_997, fd_html, None, None)' # print prof_str # print param # profile.run(prof_str, 'pyx12.prof') p.stop() try: pfile = os.path.splitext(os.path.basename(src_filename))[0] + ".plop.out" pfull = os.path.join(os.path.expanduser("~/.plop.profiles"), pfile) print(pfull) with open(pfull, "w") as fdp: fdp.write(repr(dict(p.stack_counts))) except Exception: logger.exception("Failed to write profile data") sys.stderr.write("%s: bad profile save\n" % (src_filename)) else: if pyx12.x12n_document.x12n_document( param=param, src_file=src_filename, fd_997=fd_997, fd_html=fd_html, fd_xmldoc=None, map_path=args.map_path, ): sys.stderr.write("%s: OK\n" % (src_filename)) else: sys.stderr.write("%s: Failure\n" % (src_filename)) if flag_997 and fd_997.tell() != 0: fd_997.seek(0) if os.path.splitext(src_filename)[1] == ".txt": target_997 = os.path.splitext(src_filename)[0] + ".997" else: target_997 = src_filename + ".997" codecs.open(target_997, mode="w", encoding="ascii").write(fd_997.read()) if fd_997: fd_997.close() if fd_html: fd_html.close() except IOError: logger.exception("Could not open files") return False except KeyboardInterrupt: print("\n[interrupt]") return True