Beispiel #1
0
class GRRSeleniumTest(test_lib.GRRBaseTest, acl_test_lib.AclTestMixin):
  """Baseclass for selenium UI tests."""

  # Default duration (in seconds) for WaitUntil.
  duration = 5

  # Time to wait between polls for WaitUntil.
  sleep_time = 0.2

  # This is the global selenium handle.
  driver = None

  # Base url of the Admin UI
  base_url = None

  @staticmethod
  def _TearDownSelenium():
    """Tear down Selenium session."""
    try:
      if GRRSeleniumTest.driver:
        GRRSeleniumTest.driver.quit()
    except Exception as e:  # pylint: disable=broad-except
      logging.exception(e)

  @staticmethod
  def _SetUpSelenium(port):
    """Set up Selenium session."""
    atexit.register(GRRSeleniumTest._TearDownSelenium)
    GRRSeleniumTest.base_url = ("http://localhost:%s" % port)

    options = webdriver.ChromeOptions()
    prefs = {
        "profile.content_settings.exceptions.clipboard": {
            f"{GRRSeleniumTest.base_url},*": {
                "setting": 1,
            },
        },
    }
    options.add_experimental_option("prefs", prefs)

    if flags.FLAGS.chrome_binary_path:
      options.binary_location = flags.FLAGS.chrome_binary_path

    options.add_argument("--disable-notifications")

    if flags.FLAGS.use_headless_chrome:
      options.add_argument("--headless")
      options.add_argument("--window-size=1400,1080")

    if flags.FLAGS.disable_chrome_sandboxing:
      options.add_argument("--no-sandbox")

    # pylint: disable=unreachable
    os.environ.pop("http_proxy", None)

    if flags.FLAGS.chrome_driver_path:
      GRRSeleniumTest.driver = webdriver.Chrome(
          flags.FLAGS.chrome_driver_path, chrome_options=options)
    else:
      GRRSeleniumTest.driver = webdriver.Chrome(chrome_options=options)

    # TODO(user): Hack! This is needed to allow downloads in headless mode.
    # Remove this code when upstream Python ChromeDriver implementation has
    # send_command implemented.
    #
    # See
    # https://stackoverflow.com/questions/45631715/downloading-with-chrome-headless-and-selenium
    # and the code in setUp().
    # pylint: disable=protected-access
    GRRSeleniumTest.driver.command_executor._commands["send_command"] = (
        "POST", "/session/$sessionId/chromium/send_command")
    # pylint: enable=protected-access
    # pylint: enable=unreachable

  _selenium_set_up_lock = threading.RLock()
  _selenium_set_up_done = False

  # Cached jQuery source to be injected into pages on Open (for selectors
  # support).
  _jquery_source = None

  @classmethod
  def setUpClass(cls):
    super(GRRSeleniumTest, cls).setUpClass()
    with GRRSeleniumTest._selenium_set_up_lock:
      if not GRRSeleniumTest._selenium_set_up_done:

        port = portpicker.pick_unused_port()
        logging.info("Picked free AdminUI port %d.", port)

        # Start up a server in another thread
        GRRSeleniumTest._server_trd = wsgiapp_testlib.ServerThread(
            port, name="SeleniumServerThread")
        GRRSeleniumTest._server_trd.StartAndWaitUntilServing()
        GRRSeleniumTest._SetUpSelenium(port)

        jquery_path = package.ResourcePath(
            "grr-response-test",
            "grr_response_test/test_data/jquery_3.1.0.min.js")
        with open(jquery_path, mode="r", encoding="utf-8") as fd:
          GRRSeleniumTest._jquery_source = fd.read()

        GRRSeleniumTest._selenium_set_up_done = True

  def InstallACLChecks(self):
    """Installs AccessControlManager and stubs out SendEmail."""
    acrwac = api_call_router_with_approval_checks

    # Clear the cache of the approvals-based router.
    acrwac.ApiCallRouterWithApprovalChecks.ClearCache()

    name = compatibility.GetName(acrwac.ApiCallRouterWithApprovalChecks)
    config_overrider = test_lib.ConfigOverrider({"API.DefaultRouter": name})
    config_overrider.Start()
    self.addCleanup(config_overrider.Stop)
    # Make sure ApiAuthManager is initialized with this configuration setting.
    api_auth_manager.InitializeApiAuthManager()

  def _CheckJavascriptErrors(self):
    errors = self.driver.execute_script(
        "return (() => {const e = window.grrInterceptedJSErrors_ || []; "
        "window.grrInterceptedJSErrors_ = []; return e;})();")

    msgs = []
    for e in errors:
      msg = "[javascript]: %s" % e
      logging.error(msg)
      msgs.append(msg)

    if msgs:
      self.fail("Javascript error encountered during test: %s" %
                "\n\t".join(msgs))

  def DisableHttpErrorChecks(self):
    self.ignore_http_errors = True
    return DisabledHttpErrorChecksContextManager(self)

  def GetHttpErrors(self):
    return self.driver.execute_script(
        "return (() => {const e = window.grrInterceptedHTTPErrors_ || []; "
        "window.grrInterceptedHTTPErrors_ = []; return e;})();")

  def _CheckHttpErrors(self):
    if self.ignore_http_errors:
      return

    msgs = []
    for e in self.GetHttpErrors():
      try:
        msg = e["data"]["traceBack"]
      except (TypeError, KeyError):
        msg = "[http]: {!r}".format(e)

      logging.error(msg)
      msgs.append(msg)

    if msgs:
      self.fail("HTTP request failed during test: %s" % "\n\t".join(msgs))

  def CheckBrowserErrors(self):
    self._CheckJavascriptErrors()
    self._CheckHttpErrors()

  def WaitUntil(self, condition_cb, *args):
    self.CheckBrowserErrors()

    for _ in range(int(self.duration / self.sleep_time)):
      try:
        res = condition_cb(*args)
        if res:
          return res

      # Raise in case of a test-related error (i.e. failing assertion).
      except self.failureException:
        raise
      # The element might not exist yet and selenium could raise here. (Also
      # Selenium raises Exception not StandardError).
      except Exception as e:  # pylint: disable=broad-except
        logging.warning("Selenium raised %s", utils.SmartUnicode(e))

      self.CheckBrowserErrors()
      time.sleep(self.sleep_time)

    self.fail(
        "condition %s %s not met, body is: %s" %
        (condition_cb, args, self.driver.find_element_by_tag_name("body").text))

  def _FindElements(self, selector):
    selector_type, effective_selector = selector.split("=", 1)
    if selector_type != "css":
      raise ValueError(
          "Only CSS selector is supported for querying multiple elements.")

    elems = self.driver.execute_script(
        "return $(\"" + effective_selector.replace("\"", "\\\"") + "\");")
    return [e for e in elems if e.is_displayed()]

  def _FindElement(self, selector):
    try:
      selector_type, effective_selector = selector.split("=", 1)
    except ValueError:
      effective_selector = selector
      selector_type = None

    if selector_type == "css":
      elems = self.driver.execute_script(
          "return $(\"" + effective_selector.replace("\"", "\\\"") + "\");")
      elems = [e for e in elems if e.is_displayed()]

      if not elems:
        raise exceptions.NoSuchElementException()
      else:
        return elems[0]

    elif selector_type == "link":
      links = self.driver.find_elements_by_partial_link_text(effective_selector)
      for l in links:
        if l.text.strip() == effective_selector:
          return l
      raise exceptions.NoSuchElementException()

    elif selector_type == "xpath":
      return self.driver.find_element_by_xpath(effective_selector)

    elif selector_type == "id":
      return self.driver.find_element_by_id(effective_selector)

    elif selector_type == "name":
      return self.driver.find_element_by_name(effective_selector)

    elif selector_type is None:
      if effective_selector.startswith("//"):
        return self.driver.find_element_by_xpath(effective_selector)
      else:
        return self.driver.find_element_by_id(effective_selector)
    else:
      raise ValueError("unknown selector type %s" % selector_type)

  @SeleniumAction
  def Open(self, url):
    # In GRR Selenium tests calling Open() implies page refresh.
    # We make sure that browser/webdriver is not confused by the fact that
    # only the fragment part of the URL (after the '#' symbol) changes.
    # It's important to not confuse WebDriver since it tends to get stuck
    # when confused.
    self.driver.get("data:.")
    self.driver.get(self.base_url + url)

    jquery_present = self.driver.execute_script(
        "return window.$ !== undefined;")
    if not jquery_present:
      self.driver.execute_script(GRRSeleniumTest._jquery_source)

  @SeleniumAction
  def Refresh(self):
    self.driver.refresh()

  @SeleniumAction
  def Back(self):
    self.driver.back()

  @SeleniumAction
  def Forward(self):
    self.driver.forward()

  def WaitUntilNot(self, condition_cb, *args):
    self.WaitUntil(lambda: not condition_cb(*args))

  def GetPageTitle(self):
    return self.driver.title

  def IsElementPresent(self, target):
    try:
      self._FindElement(target)
      return True
    except exceptions.NoSuchElementException:
      return False

  def GetCurrentUrlPath(self):
    url = urlparse.urlparse(self.driver.current_url)

    result = url.path
    if url.fragment:
      result += "#" + url.fragment

    return result

  def GetElement(self, target):
    try:
      return self._FindElement(target)
    except exceptions.NoSuchElementException:
      return None

  def GetVisibleElement(self, target):
    try:
      element = self._FindElement(target)
      if element.is_displayed():
        return element
    except exceptions.NoSuchElementException:
      pass

    return None

  def IsTextPresent(self, text):
    return self.AllTextsPresent([text])

  def AllTextsPresent(self, texts):
    body = self.driver.find_element_by_tag_name("body").text
    for text in texts:
      if utils.SmartUnicode(text) not in body:
        return False
    return True

  def IsVisible(self, target):
    element = self.GetElement(target)
    return element and element.is_displayed()

  def GetText(self, target):
    element = self.WaitUntil(self.GetVisibleElement, target)
    return element.text.strip()

  def GetValue(self, target):
    return self.GetAttribute(target, "value")

  def GetAttribute(self, target, attribute):
    element = self.WaitUntil(self.GetVisibleElement, target)
    return element.get_attribute(attribute)

  def GetClipboard(self):
    return self.GetJavaScriptValue(
        "return await navigator.clipboard.readText();")

  def IsUserNotificationPresent(self, contains_string):
    self.Click("css=#notification_button")
    self.WaitUntil(self.IsElementPresent, "css=grr-user-notification-dialog")
    self.WaitUntilNot(
        self.IsElementPresent,
        "css=grr-user-notification-dialog:contains('Loading...')")

    notifications_text = self.GetText("css=grr-user-notification-dialog")
    self.Click("css=grr-user-notification-dialog button:contains('Close')")

    return contains_string in notifications_text

  def GetJavaScriptValue(self, js_expression):
    return self.driver.execute_script(js_expression)

  def _WaitForAjaxCompleted(self):
    self.WaitUntilEqual(
        [], self.GetJavaScriptValue,
        "return (window.$ && $('body') && $('body').injector && "
        "$('body').injector().get('$http').pendingRequests) || []")

  @SeleniumAction
  def Type(self, target, text, end_with_enter=False):
    element = self.WaitUntil(self.GetVisibleElement, target)
    element.clear()
    element.send_keys(text)
    if end_with_enter:
      element.send_keys(keys.Keys.ENTER)

    # We experienced that Selenium sometimes swallows the last character of the
    # text sent. Raising an exception here will just retry in that case.
    if not end_with_enter:
      if text != self.GetValue(target):
        raise exceptions.WebDriverException("Send_keys did not work correctly.")

  @SeleniumAction
  def Click(self, target):
    # Selenium clicks elements by obtaining their position and then issuing a
    # click action in the middle of this area. This may lead to misclicks when
    # elements are moving. Make sure that they are stationary before issuing
    # the click action (specifically, using the bootstrap "fade" class that
    # slides dialogs in is highly discouraged in combination with .Click()).

    # Since Selenium does not know when the page is ready after AJAX calls, we
    # need to wait for AJAX completion here to be sure that all event handlers
    # are attached to their respective DOM elements.
    self._WaitForAjaxCompleted()

    element = self.WaitUntil(self.GetVisibleElement, target)
    element.click()

  @SeleniumAction
  def MoveMouseTo(self, target):
    self._WaitForAjaxCompleted()
    element = self.WaitUntil(self.GetVisibleElement, target)
    action_chains.ActionChains(self.driver).move_to_element(element).perform()

  @SeleniumAction
  def ScrollToBottom(self):
    self.driver.execute_script(
        "window.scrollTo(0, document.body.scrollHeight);")

  @SeleniumAction
  def DoubleClick(self, target):
    # Selenium clicks elements by obtaining their position and then issuing a
    # click action in the middle of this area. This may lead to misclicks when
    # elements are moving. Make sure that they are stationary before issuing
    # the click action (specifically, using the bootstrap "fade" class that
    # slides dialogs in is highly discouraged in combination with
    # .DoubleClick()).

    # Since Selenium does not know when the page is ready after AJAX calls, we
    # need to wait for AJAX completion here to be sure that all event handlers
    # are attached to their respective DOM elements.
    self._WaitForAjaxCompleted()

    element = self.WaitUntil(self.GetVisibleElement, target)
    action_chains.ActionChains(self.driver).double_click(element).perform()

  @SeleniumAction
  def Select(self, target, label):
    element = self.WaitUntil(self.GetVisibleElement, target)
    select.Select(element).select_by_visible_text(label)

  def GetSelectedLabel(self, target):
    element = self.WaitUntil(self.GetVisibleElement, target)
    return select.Select(element).first_selected_option.text.strip()

  def IsChecked(self, target):
    return self.WaitUntil(self.GetVisibleElement, target).is_selected()

  def GetCssCount(self, target):
    if not target.startswith("css="):
      raise ValueError("invalid target for GetCssCount: " + target)

    return len(self._FindElements(target))

  def WaitUntilEqual(self, target, condition_cb, *args):
    condition_value = None
    for _ in range(int(self.duration / self.sleep_time)):
      try:
        condition_value = condition_cb(*args)
        if condition_value == target:
          return True

      # Raise in case of a test-related error (i.e. failing assertion).
      except self.failureException:
        raise
      # The element might not exist yet and selenium could raise here. (Also
      # Selenium raises Exception not StandardError).
      except Exception as e:  # pylint: disable=broad-except
        logging.warning("Selenium raised %s", utils.SmartUnicode(e))

      time.sleep(self.sleep_time)

    self.fail("condition %s(%s) not met (expected=%r, got_last_time=%r)" %
              (condition_cb, args, target, condition_value))

  def WaitUntilContains(self, target, condition_cb, *args):
    data = ""
    target = utils.SmartUnicode(target)

    for _ in range(int(self.duration / self.sleep_time)):
      try:
        data = condition_cb(*args)
        if target in data:
          return True

      # Raise in case of a test-related error (i.e. failing assertion).
      except self.failureException:
        raise
      # The element might not exist yet and selenium could raise here.
      except Exception as e:  # pylint: disable=broad-except
        logging.warning("Selenium raised %s", utils.SmartUnicode(e))

      time.sleep(self.sleep_time)

    self.fail("condition not met. got: %r, does not contain: %s" %
              (data, target))

  def setUp(self):
    super(GRRSeleniumTest, self).setUp()

    # Used by CheckHttpErrors
    self.ignore_http_errors = False

    self.token.username = u"gui_user"
    webauth.WEBAUTH_MANAGER.SetUserName(self.token.username)

    # Make the user use the advanced gui so we can test it.
    data_store.REL_DB.WriteGRRUser(
        self.token.username, ui_mode=api_user.GUISettings.UIMode.ADVANCED)

    artifact_patcher = ar_test_lib.PatchDatastoreOnlyArtifactRegistry()
    artifact_patcher.start()
    self.addCleanup(artifact_patcher.stop)

    self.InstallACLChecks()

    if flags.FLAGS.use_headless_chrome:
      params = {
          "cmd": "Page.setDownloadBehavior",
          "params": {
              "behavior": "allow",
              "downloadPath": self.temp_dir
          }
      }
      result = self.driver.execute("send_command", params)
      if result["status"] != 0:
        raise RuntimeError("can't set Page.setDownloadBehavior: %s" % result)

  def tearDown(self):
    self.CheckBrowserErrors()
    super(GRRSeleniumTest, self).tearDown()

  def WaitForNotification(self, username):
    sleep_time = 0.2
    iterations = 50
    for _ in range(iterations):
      try:
        pending_notifications = data_store.REL_DB.ReadUserNotifications(
            username, state=rdf_objects.UserNotification.State.STATE_PENDING)
        if pending_notifications:
          return
      except IOError:
        pass
      time.sleep(sleep_time)
    self.fail("Notification for user %s never sent." % username)
Beispiel #2
0
areg_2dim_formula = re.compile('(.+)\*(.)(.+)\*(.)(.*)')
reset_value_pattern = re.compile('^\s*(\S+)\s+RESET_VALUE\s+(\S+).*$')
field_reset_value_pattern = re.compile(
    '^\s*(\S+)\s+FLD_RESET_VALUE\s+(\S+).*$')

# ----------------------------------------------------------------------------
# General global variables
# ----------------------------------------------------------------------------

symbols = {}
silent = False
warnings = 0
saved_warnings = None
print_arg = ''
allow_duplicate_field_values = True
lock = threading.RLock()

# ============================================================================
# LogInfo
#
# Prints a message unless "silent" mode is in use.
# ============================================================================


def LogInfo(log):
    if not silent:
        print log


# ============================================================================
# LogWarning
Beispiel #3
0
    # Sync up with the node
    def sync_with_ping(self, timeout=60):
        self.send_message(msg_ping(nonce=self.ping_counter))
        test_function = lambda: self.last_message.get(
            "pong") and self.last_message["pong"].nonce == self.ping_counter
        wait_until(test_function, timeout=timeout, lock=mininode_lock)
        self.ping_counter += 1


# One lock for synchronizing all data access between the network event loop (see
# NetworkThread below) and the thread running the test logic.  For simplicity,
# P2PConnection acquires this lock whenever delivering a message to a P2PInterface.
# This lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the P2PInterface or P2PConnection.
mininode_lock = threading.RLock()


class NetworkThread(threading.Thread):
    network_event_loop = None

    def __init__(self):
        super().__init__(name="NetworkThread")
        # There is only one event loop and no more than one thread must be created
        assert not self.network_event_loop

        NetworkThread.network_event_loop = asyncio.new_event_loop()

    def run(self):
        """Start the network thread."""
        self.network_event_loop.run_forever()
Beispiel #4
0
 def __init__(self, max_size=0):
     self._init(max_size)
     self.max_size = max_size
     self._lock = threading.RLock()
 def lock(self, name):
     name = 'lock_' + name
     if not hasattr(self, name): setattr(self, name, threading.RLock())
     return getattr(self, name)
Beispiel #6
0
    def __init__(self,
                 interval=0.5,
                 max_tries=3,
                 initial_configs=None,
                 event_loop=None,
                 logger_log_level=logging.WARNING):
        """Create a LoggerRunner.
    ```
    interval - number of seconds to sleep between checking/updating loggers

    max_tries - number of times to try a dead logger config. If zero, then
                never stop retrying.

    initial_configs - optional dict of configs to start up on creation.

    event_loop - Optional event loop, if we are instantiated in a thread
                that does not have its own.

    logger_log_level - At what logging level our component loggers
                should operate.
    ```
    """
        logging.info('Starting LoggerRunner')
        # Map logger name to config, process running it, and any errors
        self.logger_configs = {}
        self.processes = {}
        self.errors = {}
        self.num_tries = {}
        self.failed_loggers = set()

        # We want to remember that we've shut down and erased a logger so
        # that the next time we're asked for a status update, we can add
        # one last notice that it's not running. Without it, the most
        # recent status report for it will be its previous state.
        self.disappeared_loggers = set()

        self.interval = interval
        self.max_tries = max_tries
        self.logger_log_level = logger_log_level

        self.quit_flag = False

        # If we've had an event loop passed in, use it
        self.event_loop = event_loop or asyncio.get_event_loop()
        if event_loop:
            asyncio.set_event_loop(event_loop)

        # Set the signal handler so that an external break will get
        # translated into a KeyboardInterrupt. But signal only works if
        # we're in the main thread - catch if we're not, and just assume
        # everything's gonna be okay and we'll get shut down with a proper
        # "quit()" call othewise.
        try:
            signal.signal(signal.SIGTERM, kill_handler)
        except ValueError:
            logging.info('LoggerRunner not running in main thread; '
                         'shutting down with Ctl-C may not work.')

        # Don't let other threads mess with data while we're
        # messing. Re-entrant so that we don't have to worry about
        # re-acquiring when, for example set_configs() calls set_config().
        self.config_lock = threading.RLock()

        # Only let one call to check_loggers() proceed at a time
        self.check_loggers_lock = threading.Lock()

        # If we were given any initial configs, set 'em up
        if initial_configs:
            self.set_configs(initial_configs)
Beispiel #7
0
 def __init__(self):
     self.lock = threading.RLock()
     global log
     log = logging.getLogger("cloudscheduler")
     pass
Beispiel #8
0
from mistral import context
from mistral.db.sqlalchemy import base as b
from mistral.db.sqlalchemy import model_base as mb
from mistral.db.sqlalchemy import sqlite_lock
from mistral.db import utils as m_dbutils
from mistral.db.v2.sqlalchemy import filters as db_filters
from mistral.db.v2.sqlalchemy import models
from mistral import exceptions as exc
from mistral.services import security
from mistral import utils
from mistral.workflow import states

CONF = cfg.CONF
LOG = logging.getLogger(__name__)

_SCHEMA_LOCK = threading.RLock()
_initialized = False


def get_backend():
    """Consumed by openstack common code.

    The backend is this module itself.
    :return: Name of db backend.
    """
    return sys.modules[__name__]


def setup_db():
    global _initialized
Beispiel #9
0
    def __init__(self):
        """Initialize logging for modRana
        Logging is quite important as a one never knows when a bug shows
        up and it is important to know what was modRana doing at that time.
        """

        self._log_folder_path = None
        self._file_handler = None
        self._log_file_enabled = False
        self._log_file_enabled_lock = threading.RLock()
        self._log_file_path = None
        self._log_file_compression = False
        self._compressed_log_file = None
        self._log_file_override = False

        # create main modRana logger (root logger)
        self._root_modrana_logger = logging.getLogger('')
        self._root_modrana_logger.setLevel(logging.DEBUG)
        # name is undocumented and might blow up in the future ^_-
        self._root_modrana_logger.name = ""

        # create log for the core modules
        self._core_logger = logging.getLogger('core')
        self._core_logger.setLevel(logging.DEBUG)

        # create a log for non-core/standalone/feature specific modules
        # (BaseModule subclasses)
        self._mod_logger = logging.getLogger('mod')
        self._mod_logger.setLevel(logging.DEBUG)

        # as we set the root logger to accept even debug messages,
        # we need to explicitly tell urllib3 to skip debug level
        # messages
        urllib3_logger = logging.getLogger("urllib3")
        urllib3_logger.setLevel(logging.ERROR)

        # create console handler that prints everything to stdout
        # (as was done previously by just using print)
        self._console_handler = logging.StreamHandler()
        self._console_handler.setLevel(logging.DEBUG)

        # create formatters and add them to the handlers
        # omit some stuff when printing to console to make the logs fit to terminal windows
        console_formatter = logging.Formatter('%(levelname)s %(name)s: %(message)s')
        # file viewers should be able to handle longer lines
        self._console_handler.setFormatter(console_formatter)

        # also, the file log can't be created and opened at once as modRana needs to load
        # and consult the persistent settings database first, but we don't want to loose
        # any early log messages
        # * solution -> MemoryLogHandler stores the log messages and is either flushed to the
        #   log file or discarded when modRana discovers that logging to file is disabled
        self._memory_handler = logging.handlers.MemoryHandler(capacity = 1024*10)
        self._memory_handler.setLevel(logging.DEBUG)

        # now we attach the console and memory handlers to the root modRana logger
        # * like this all messages should arrive in the handlers
        self._root_modrana_logger.addHandler(self._console_handler)
        self._root_modrana_logger.addHandler(self._memory_handler)

        # check if we are running on Android - if we do (by checking qrc usage), check
        # if the Android debug log folder exists - if it does, enable the log file at once
        # This is done Android being such a mess needs the possibility to easily enable early
        # logging to debug all the breakage. :)
        if qrc.is_qrc:
            # check for the debug log folder (note that this is on purpose folder is different
            # from the folder used to store debug logs enabled by the user)
            if os.path.isdir(ANDROID_SPECIAL_LOG_FOLDER):
                self._root_modrana_logger.debug("special Android log folder (%s) detected", ANDROID_SPECIAL_LOG_FOLDER)
                self._root_modrana_logger.debug("enabling early Android log file")
                self.log_folder_path = ANDROID_SPECIAL_LOG_FOLDER
                self.enable_log_file()
                self._log_file_override = True
                self._root_modrana_logger.debug("early Android log file enabled")
Beispiel #10
0
        self.timeout_int = timeout_int

    def __repr__(self):
        return "<User timeout '%s' in %s>" % (self.user_id, self.chat_id)


Welcome.__table__.create(checkfirst=True)
WelcomeButtons.__table__.create(checkfirst=True)
GoodbyeButtons.__table__.create(checkfirst=True)
CleanServiceSetting.__table__.create(checkfirst=True)
WelcomeSecurity.__table__.create(checkfirst=True)
UserRestrict.__table__.create(checkfirst=True)
WelcomeTimeout.__table__.create(checkfirst=True)
AllowedChat.__table__.create(checkfirst=True)

INSERTION_LOCK = threading.RLock()
WELC_BTN_LOCK = threading.RLock()
LEAVE_BTN_LOCK = threading.RLock()
CS_LOCK = threading.RLock()
WS_LOCK = threading.RLock()
UR_LOCK = threading.RLock()
TO_LOCK = threading.RLock()
ALLOWCHATLOCK = threading.RLock()

CHAT_USERRESTRICT = {}
CHAT_TIMEOUT = {}

WHITELIST = set()


def add_to_userlist(chat_id, user_id, is_clicked):
    def __init__(self, filename, mode="r", compression=ZIP_STORED, allowZip64=False, compresslevel=None, strict=True):
        """Open the ZIP file with mode read "r", write "w" or append "a"."""
        # Mostly from zipfile.py
        if mode not in ("r", "w", "a"):
            raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')

        if compression == self.ZIP_STORED:
            pass
        elif compression == self.ZIP_DEFLATED:
            if not zlib:
                raise RuntimeError("Compression requires the (missing) zlib module")

        else:
            raise RuntimeError("That compression method is not supported")

        self._allowZip64 = allowZip64
        self._didModify = False
        self.debug = 0  # Level of printing: 0 through 3
        self.NameToInfo = {}  # Find file info given name
        self.filelist = []  # List of ZipInfo instances for archive
        self.compression = compression  # Method of compression
        self.compresslevel = compresslevel
        self.mode = key = mode.replace('b', '')[0]
        self.pwd = None
        self._comment = b''
        self.is_zip = True

        self._fileRefCnt = 1
        self._lock = threading.RLock()
        self._seekable = True
        self._writing = False

        # Check if we were passed a file-like object
        if isinstance(filename, str):
            self._filePassed = 0
            self.filename = filename
            mode_dict = {'r': 'rb', 'w': 'wb', 'a': 'r+b'}
            try:
                self.fp = open(filename, mode_dict[mode])
            except IOError:
                if mode == 'a':
                    mode = key = 'w'
                    self.fp = open(filename, mode_dict[mode])
                else:
                    raise
        else:
            self._filePassed = 1
            self.fp = filename
            self.filename = getattr(filename, 'name', None)

        # noinspection PyBroadException
        try:
            if key == 'r':
                self.fp.seek(0)
                if stringFileHeader not in self.fp.read(1024):
                    self.is_zip = False
                self._RealGetContents()
            elif key == 'w':
                # set the modified flag so central directory gets written
                # even if no files are added to the archive
                self._didModify = True
                try:
                    self.start_dir = self.fp.tell()
                except (AttributeError, OSError):
                    # self.fp = _Tellable(self.fp)
                    self.start_dir = 0
                    self._seekable = False
                else:
                    # Some file-like objects can provide tell() but not seek()
                    try:
                        self.fp.seek(self.start_dir)
                    except (AttributeError, OSError):
                        self._seekable = False

            elif key == 'a':
                try:
                    # See if file is a zip file
                    self._RealGetContents()
                    # seek to start of directory and overwrite
                    self.fp.seek(self.start_dir, 0)
                except BadZipfile:
                    # file is not a zip file, just append
                    self.fp.seek(0, 2)

                    # set the modified flag so central directory gets written
                    # even if no files are added to the archive
                    self._didModify = True
            else:
                raise RuntimeError('Mode must be "r", "w" or "a"')
            self.broken = False
        except Exception:
            if strict:
                if not self._filePassed:
                    self.fp.close()
                self.fp = None
                raise
            else:
                self.broken = True
    def __init__(self, min_proxies=200):
        self._bad_proxies = {}
        self._minProxies = min_proxies
        self.lock = threading.RLock()

        self.get_list()
Beispiel #13
0
        return "<Fed {} subscribes for {}>".format(self.fed_id, self.fed_subs)


# Dropping db
# Federations.__table__.drop()
# ChatF.__table__.drop()
# BansF.__table__.drop()
# FedSubs.__table__.drop()

Federations.__table__.create(checkfirst=True)
ChatF.__table__.create(checkfirst=True)
BansF.__table__.create(checkfirst=True)
FedsUserSettings.__table__.create(checkfirst=True)
FedSubs.__table__.create(checkfirst=True)

FEDS_LOCK = threading.RLock()
CHAT_FEDS_LOCK = threading.RLock()
FEDS_SETTINGS_LOCK = threading.RLock()
FEDS_SUBSCRIBER_LOCK = threading.RLock()

FEDERATION_BYNAME = {}
FEDERATION_BYOWNER = {}
FEDERATION_BYFEDID = {}

FEDERATION_CHATS = {}
FEDERATION_CHATS_BYID = {}

FEDERATION_BANNED_FULL = {}
FEDERATION_BANNED_USERID = {}

FEDERATION_NOTIFICATION = {}
Beispiel #14
0
def run(config=None):
  global logger, broker3, broker5, brokerSN, server
  logger = logging.getLogger('MQTT broker')
  logger.setLevel(logging.INFO)
  logger.addFilter(filter)

  lock = threading.RLock() # shared lock
  persistence = False
  if persistence:
    connection, sharedData = setup_persistence("sharedData") # location for data shared between brokers - subscriptions for example
  else:
    sharedData = {}

  options = {}
  if config != None:
    servers_to_create, options, bridges_to_create = process_config(config)

  broker3 = MQTTV3Brokers(options=options, lock=lock, sharedData=sharedData)

  broker5 = MQTTV5Brokers(options=options, lock=lock, sharedData=sharedData)

  brokerSN = MQTTSNBrokers(lock=lock, sharedData=sharedData)

  brokers = [broker3, broker5, brokerSN]

  broker3.setBroker5(broker5)
  broker5.setBroker3(broker3)

  brokerSN.setBroker3(broker3)
  brokerSN.setBroker5(broker5)

  servers = []
  bridges = []
  UDPListeners.setBroker(brokerSN)
  TCPListeners.setBrokers(broker3, broker5)
  HTTPListeners.setBrokers(broker3, broker5, brokerSN)
  HTTPListeners.setSharedData(lock, sharedData)

  try:
    if config == None:
#      TCPBridges.setBroker5(broker5)
#      TCPBridges.create("bridge",1883,"172.16.0.4")
      servers.append(TCPListeners.create(1883, serve_forever=True))
    else:
      logger.debug("Starting bridges")
      for bridge in bridges_to_create:
        bridge[0].setBroker5(broker5)
        bridges.append(bridge[0].create(**bridge[1]))
      logger.debug("Starting servers")
      for server in servers_to_create:
        servers.append(server[0].create(**server[1]))
  except KeyboardInterrupt:
    pass
  except:
    logger.exception("startBroker")

  # Stop incoming communications
  import socket
  for server in servers:
    try:
      logger.info("Stopping listener %s", str(server))
      server.shutdown()
    except:
      traceback.print_exc()

  logger.info("Shutdown brokers")
  for broker in brokers:
    try:
      logger.info("Stopping broker %s", str(broker))
      broker.shutdown()
    except:
      traceback.print_exc()
  filter.measure()

  logger.debug("Ending sharedData %s", sharedData)
  if persistence:
    sharedData._p_changed = True
    import transaction
    transaction.commit()
    connection.close()
Beispiel #15
0
    """Decorator for a cached property."""
    def __init__(self, fn):
        self._fn = fn
        self.__doc__ = getattr(fn, '__doc__')

    def __get__(self, obj, objtype=None):
        value = self._fn(obj)
        setattr(obj, self._fn.__name__, value)
        return value


# application lock/globals are global to one application
# server locks lock the whole server
# server globals are pickled in /tmp

_app_lock = threading.RLock()


def app_lock(name=''):
    return _app_lock


_app_globals: dict = {}


def get_app_global(name, init_fn):
    if name in _app_globals:
        return _app_globals[name]

    with app_lock(name):
        if name not in _app_globals:
Beispiel #16
0
    def initialize(self, dictionary=None):
        if dictionary:
            abs_path = _get_abs_path(dictionary)
            if self.dictionary == abs_path and self.initialized:
                return
            else:
                self.dictionary = abs_path
                self.initialized = False
        else:
            abs_path = self.dictionary

        with self.lock:
            try:
                with DICT_WRITING[abs_path]:
                    pass
            except KeyError:
                pass
            if self.initialized:
                return

            default_logger.debug("Building prefix dict from %s ..." %
                                 (abs_path or 'the default dictionary'))
            t1 = time.time()
            if self.cache_file:
                cache_file = self.cache_file
            # default dictionary
            elif abs_path == DEFAULT_DICT:
                cache_file = "jieba.cache"
            # custom dictionary
            else:
                cache_file = "jieba.u%s.cache" % md5(
                    abs_path.encode('utf-8', 'replace')).hexdigest()
            cache_file = os.path.join(self.tmp_dir or tempfile.gettempdir(),
                                      cache_file)
            # prevent absolute path in self.cache_file
            tmpdir = os.path.dirname(cache_file)

            load_from_cache_fail = True
            if os.path.isfile(cache_file) and (abs_path == DEFAULT_DICT
                                               or os.path.getmtime(cache_file)
                                               > os.path.getmtime(abs_path)):
                default_logger.debug("Loading model from cache %s" %
                                     cache_file)
                try:
                    with open(cache_file, 'rb') as cf:
                        self.FREQ, self.total = pickle.load(cf)
                    load_from_cache_fail = False
                except Exception:
                    load_from_cache_fail = True

            if load_from_cache_fail:
                wlock = DICT_WRITING.get(abs_path, threading.RLock())
                DICT_WRITING[abs_path] = wlock
                with wlock:
                    self.FREQ, self.total = self.gen_pfdict(
                        self.get_dict_file())
                    default_logger.debug("Dumping model to file cache %s" %
                                         cache_file)
                    try:
                        # prevent moving across different filesystems
                        fd, fpath = tempfile.mkstemp(dir=tmpdir)
                        with os.fdopen(fd, 'wb') as temp_cache_file:
                            pickle.dump((self.FREQ, self.total),
                                        temp_cache_file)
                        _replace_file(fpath, cache_file)
                    except Exception:
                        default_logger.exception("Dump cache file failed.")

                try:
                    del DICT_WRITING[abs_path]
                except KeyError:
                    pass

            self.initialized = True
            default_logger.debug("Loading model cost %.3f seconds." %
                                 (time.time() - t1))
            default_logger.debug("Prefix dict has been built succesfully.")
Beispiel #17
0
 def __init__(self):
     self.kernel = pyspec.framework.PySpecKernel()
     self.spec_modules = []
     self.selected = []
     self.project = pyspec.wxui.project.WxPySpecProjectManager()
     self.load_lock = threading.RLock()
	def __init__(self, std, is_alive):
		threading.Thread.__init__(self)
		self.std = std
		self.is_alive = is_alive
		self.data = ''
		self.data_lock = threading.RLock()
Beispiel #19
0
 def __init__(self):
     dbg("creating MembersLock")
     self._lock = threading.RLock()
     dbg("created MembersLock")
Beispiel #20
0
 def __init__(self):
     self.mutex = threading.RLock()
Beispiel #21
0
    name = Column(UnicodeText, nullable=False)
    url = Column(UnicodeText, nullable=False)
    same_line = Column(Boolean, default=False)

    def __init__(self, chat_id, keyword, name, url, same_line=False):
        self.chat_id = str(chat_id)
        self.keyword = keyword
        self.name = name
        self.url = url
        self.same_line = same_line


CustomFilters.__table__.create(checkfirst=True)
Buttons.__table__.create(checkfirst=True)

CUST_FILT_LOCK = threading.RLock()
BUTTON_LOCK = threading.RLock()
CHAT_FILTERS = {}


def get_all_filters():
    try:
        return SESSION.query(CustomFilters).all()
    finally:
        SESSION.close()


def add_filter(chat_id,
               keyword,
               reply,
               is_sticker=False,
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations under the License.

from obs import const

LOCK_COUNT = 16

lock_list = None
 
if const.IS_WINDOWS:
    lock_list = []
    import threading
    for i in range(LOCK_COUNT):
        lock_list.append(threading.RLock())
else:
    import multiprocessing
    lock0 = multiprocessing.RLock()
    lock1 = multiprocessing.RLock()
    lock2 = multiprocessing.RLock()
    lock3 = multiprocessing.RLock()
    lock4 = multiprocessing.RLock()
    lock5 = multiprocessing.RLock()
    lock6 = multiprocessing.RLock()
    lock7 = multiprocessing.RLock()
    lock8 = multiprocessing.RLock()
    lock9 = multiprocessing.RLock()
    lock10 = multiprocessing.RLock()
    lock11 = multiprocessing.RLock()
    lock12 = multiprocessing.RLock()
Beispiel #23
0
    def __init__(self, user_id, chat_id, chat_name, conn_time):
        self.user_id = user_id
        self.chat_id = str(chat_id)
        self.chat_name = str(chat_name)
        self.conn_time = int(conn_time)

    def __repr__(self):
        return "<connection user {} history {}>".format(
            self.user_id, self.chat_id)


ChatAccessConnectionSettings.__table__.create(checkfirst=True)
Connection.__table__.create(checkfirst=True)
ConnectionHistory.__table__.create(checkfirst=True)

CHAT_ACCESS_LOCK = threading.RLock()
CONNECTION_INSERTION_LOCK = threading.RLock()
CONNECTION_HISTORY_LOCK = threading.RLock()

HISTORY_CONNECT = {}


def allow_connect_to_chat(chat_id: Union[str, int]) -> bool:
    try:
        chat_setting = SESSION.query(ChatAccessConnectionSettings).get(
            str(chat_id))
        if chat_setting:
            return chat_setting.allow_connect_to_chat
        return False
    finally:
        SESSION.close()
Beispiel #24
0
import sublime

from .jsonclient import AsynClient
from .decorators import auto_project_switch
from .vagrant import VagrantStatus, VagrantIPAddress
from .builder.python_builder import AnacondaSetPythonBuilder
from .helpers import (
    get_settings, get_traceback, project_name, create_subprocess, active_view
)

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler(sys.stdout))
logger.setLevel(logging.WARNING)

WORKERS = {}
WORKERS_LOCK = threading.RLock()
LOOP_RUNNING = False


class BaseWorker(object):
    """Base class for different worker interfaces
    """

    def __init__(self):
        self.available_port = None
        self.reconnecting = False
        self.green_light = True
        self.last_error = None
        self.process = None
        self.client = None
 def locked(self, lock=None):
     if not lock: lock = threading.RLock()
     yield lock.acquire()
     lock.release()
Beispiel #26
0
 def __init__(self, size):
     self.size = size
     self._quence = []
     self._mutex = threading.RLock()
     self._full = threading.Condition(self._mutex)
     self._empty = threading.Condition(self._mutex)
Beispiel #27
0
from lte.protos.mobilityd_pb2 import IPAddress, GWInfo, IPBlock

from magma.pipelined.tests.app.start_pipelined import (
    TestSetup,
    PipelinedController,
)
from magma.pipelined.bridge_util import BridgeTools
from magma.pipelined.tests.pipelined_test_util import (start_ryu_app_thread,
                                                       stop_ryu_app_thread,
                                                       create_service_manager,
                                                       SnapshotVerifier)

from magma.pipelined.app import inout

gw_info_map = {}
gw_info_lock = threading.RLock()  # re-entrant locks


def mocked_get_mobilityd_gw_info() -> List[GWInfo]:
    global gw_info_map
    global gw_info_lock

    with gw_info_lock:
        return gw_info_map.values()


def mocked_set_mobilityd_gw_info(ip: IPAddress, mac: str, vlan: str):
    global gw_info_map
    global gw_info_lock

    with gw_info_lock:
Beispiel #28
0
 def __init__(self):
     super(LambdaExecutorReuseContainers, self).__init__()
     # keeps track of each function arn and the last time it was invoked
     self.function_invoke_times = {}
     # locking thread for creation/destruction of docker containers.
     self.docker_container_lock = threading.RLock()
Beispiel #29
0
 def __init__(self):
     super(GdbDebugger, self).__init__()
     self._is_running = False
     self._is_running_lock = threading.RLock()
     self._child_exited_event = threading.Event()
     self._signals_reset_event = threading.Event()
Beispiel #30
0
 def __init__(self):
     dict.__init__(self)
     self._lock = threading.RLock(
     )  # reentrant lock allows multiple calls per thread
     self._set_times = 0