Exemplo n.º 1
0
  def WaitUntilEqual(self, target, condition_cb, *args):
    condition_value = None
    for _ in range(int(self.duration / self.sleep_time)):
      try:
        condition_value = condition_cb(*args)
        if condition_value == target:
          return True

      # Raise in case of a test-related error (i.e. failing assertion).
      except self.failureException:
        raise
      # The element might not exist yet and selenium could raise here. (Also
      # Selenium raises Exception not StandardError).
      except Exception as e:  # pylint: disable=broad-except
        logging.warn("Selenium raised %s", utils.SmartUnicode(e))

      time.sleep(self.sleep_time)

    self.fail("condition %s(%s) not met (expected=%s, got_last_time=%s)" %
              (condition_cb, args, target, condition_value))
Exemplo n.º 2
0
    def ParseLines(cls, lines):
        users = set()
        filter_regexes = [
            re.compile(x)
            for x in config.CONFIG["Artifacts.netgroup_filter_regexes"]
        ]
        username_regex = re.compile(cls.USERNAME_REGEX)
        blacklist = config.CONFIG["Artifacts.netgroup_user_blacklist"]
        for index, line in enumerate(lines):
            if line.startswith("#"):
                continue

            splitline = line.split(" ")
            group_name = splitline[0]

            if filter_regexes:
                filter_match = False
                for regex in filter_regexes:
                    if regex.search(group_name):
                        filter_match = True
                        break
                if not filter_match:
                    continue

            for member in splitline[1:]:
                if member.startswith("("):
                    try:
                        _, user, _ = member.split(",")
                        if user not in users and user not in blacklist:
                            if not username_regex.match(user):
                                yield rdf_anomaly.Anomaly(
                                    type="PARSER_ANOMALY",
                                    symptom="Invalid username: %s" % user)
                            else:
                                users.add(user)
                                yield rdf_client.User(
                                    username=utils.SmartUnicode(user))
                    except ValueError:
                        raise parser.ParseError(
                            "Invalid netgroup file at line %d: %s" %
                            (index + 1, line))
Exemplo n.º 3
0
    def ListClientsForKeywords(self, keywords, start_time=None, cursor=None):
        """Lists the clients associated with keywords."""
        keywords = set(keywords)
        keyword_mapping = {utils.SmartUnicode(kw): kw for kw in keywords}

        result = {}
        for kw in itervalues(keyword_mapping):
            result[kw] = []

        query = (
            "SELECT DISTINCT keyword, client_id FROM client_keywords WHERE "
            "keyword IN ({})".format(",".join(["%s"] * len(keyword_mapping))))
        args = list(iterkeys(keyword_mapping))
        if start_time:
            query += " AND timestamp >= %s"
            args.append(mysql_utils.RDFDatetimeToMysqlString(start_time))

        cursor.execute(query, args)
        for kw, cid in cursor.fetchall():
            result[keyword_mapping[kw]].append(mysql_utils.IntToClientID(cid))
        return result
Exemplo n.º 4
0
  def Add(self, category, label, age):
    """Adds another instance of this category into the active_days counter.

    We automatically count the event towards all relevant active_days. For
    example, if the category "Windows" was seen 8 days ago it will be counted
    towards the 30 day active, 14 day active but not against the 7 and 1 day
    actives.

    Args:
      category: The category name to account this instance against.
      label: Client label to which this should be applied.
      age: When this instance occurred.
    """
    now = rdfvalue.RDFDatetime.Now()
    category = utils.SmartUnicode(category)

    for active_time in self.active_days:
      self.categories[active_time].setdefault(label, {})
      if (now - age).seconds < active_time * 24 * 60 * 60:
        self.categories[active_time][label][
            category] = self.categories[active_time][label].get(category, 0) + 1
Exemplo n.º 5
0
 def testIgnoresFileNotMatchingPathGlobsBlacklist(self):
     handler = flow_plugin.ApiGetFlowFilesArchiveHandler(
         path_globs_whitelist=[
             rdf_paths.GlobExpression("/**/*/test.plist")
         ],
         path_globs_blacklist=[rdf_paths.GlobExpression("**/*.plist")])
     result = handler.Handle(flow_plugin.ApiGetFlowFilesArchiveArgs(
         client_id=self.client_id,
         flow_id=self.flow_id,
         archive_format="ZIP"),
                             token=self.token)
     manifest = self._GetZipManifest(result)
     self.assertEqual(manifest["archived_files"], 0)
     self.assertEqual(manifest["failed_files"], 0)
     self.assertEqual(manifest["processed_files"], 1)
     self.assertEqual(manifest["ignored_files"], 1)
     self.assertEqual(manifest["ignored_files_list"], [
         utils.SmartUnicode(
             self.client_id.Add("fs/os").Add(
                 self.base_path).Add("test.plist"))
     ])
    def DeleteAttributes(self,
                         subject,
                         attributes,
                         start=None,
                         end=None,
                         sync=True):
        """Remove some attributes from a subject."""
        _ = sync  # Unused
        if not attributes:
            return

        if isinstance(attributes, basestring):
            raise ValueError(
                "String passed to DeleteAttributes (non string iterable expected)."
            )

        for attribute in attributes:
            timestamp = self._MakeTimestamp(start, end)
            attribute = utils.SmartUnicode(attribute)
            queries = self._BuildDelete(subject, attribute, timestamp)
            self._ExecuteQueries(queries)
  def testCorrectlyAccountsForFailedFiles(self):
    self._InitializeFiles(hashing=True)

    path2 = (u"aff4:/%s/fs/os/foo/bar/中国新闻网新闻中.txt" % self.client_id.Basename())
    with aff4.FACTORY.Create(path2, aff4.AFF4Image, token=self.token) as fd:
      fd.Write("hello2".encode("utf-8"))

    # Delete a single chunk
    aff4.FACTORY.Delete(
        (u"aff4:/%s/fs/os/foo/bar/中国新闻网新闻中.txt/0000000000" %
         self.client_id.Basename()),
        token=self.token)

    fd_path = self._GenerateArchive(
        self.stat_entries,
        archive_format=api_call_handler_utils.CollectionArchiveGenerator.ZIP)

    zip_fd = zipfile.ZipFile(fd_path)
    names = [utils.SmartUnicode(s) for s in sorted(zip_fd.namelist())]
    self.assertTrue(self.archive_paths[0] in names)
    self.assertTrue(self.archive_paths[1] not in names)

    manifest = yaml.safe_load(zip_fd.read("test_prefix/MANIFEST"))
    self.assertEqual(
        manifest, {
            "description":
                "Test description",
            "processed_files":
                2,
            "archived_files":
                1,
            "ignored_files":
                0,
            "failed_files":
                1,
            "failed_files_list": [
                u"aff4:/%s/fs/os/foo/bar/中国新闻网新闻中.txt" %
                self.client_id.Basename()
            ]
        })
Exemplo n.º 8
0
  def run(self):
    """Main thread for processing messages."""

    self.OnStartup()

    try:
      while True:
        message = self._in_queue.get()

        # A message of None is our terminal message.
        if message is None:
          break

        try:
          self.HandleMessage(message)
          # Catch any errors and keep going here
        except Exception as e:  # pylint: disable=broad-except
          logging.warning("%s", e)
          self.SendReply(
              rdf_flows.GrrStatus(
                  status=rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR,
                  error_message=utils.SmartUnicode(e)),
              request_id=message.request_id,
              response_id=1,
              session_id=message.session_id,
              task_id=message.task_id,
              message_type=rdf_flows.GrrMessage.Type.STATUS)
          if flags.FLAGS.pdb_post_mortem:
            pdb.post_mortem()

    except Exception as e:  # pylint: disable=broad-except
      logging.error("Exception outside of the processing loop: %r", e)
    finally:
      # There's no point in running the client if it's broken out of the
      # processing loop and it should be restarted shortly anyway.
      logging.fatal("The client has broken out of its processing loop.")

      # The binary (Python threading library, perhaps) has proven in tests to be
      # very persistent to termination calls, so we kill it with fire.
      os.kill(os.getpid(), signal.SIGKILL)
Exemplo n.º 9
0
    def Handle(self, args, token=None):
        audit_description = ",".join([
            token.username + u"." + utils.SmartUnicode(name)
            for name in args.labels
        ])
        audit_events = []

        try:
            index = client_index.CreateClientIndex(token=token)
            client_objs = aff4.FACTORY.MultiOpen(
                [cid.ToClientURN() for cid in args.client_ids],
                aff4_type=aff4_grr.VFSGRRClient,
                mode="rw",
                token=token)
            for client_obj in client_objs:
                if data_store.RelationalDBWriteEnabled():
                    cid = client_obj.urn.Basename()
                    try:
                        data_store.REL_DB.AddClientLabels(
                            cid, token.username, args.labels)
                        idx = client_index.ClientIndex()
                        idx.AddClientLabels(cid, args.labels)
                    except db.UnknownClientError:
                        # TODO(amoser): Remove after data migration.
                        pass

                client_obj.AddLabels(args.labels)
                index.AddClient(client_obj)
                client_obj.Close()

                audit_events.append(
                    rdf_events.AuditEvent(
                        user=token.username,
                        action="CLIENT_ADD_LABEL",
                        flow_name="handler.ApiAddClientsLabelsHandler",
                        client=client_obj.urn,
                        description=audit_description))
        finally:
            events.Events.PublishMultipleEvents(
                {audit.AUDIT_EVENT: audit_events}, token=token)
Exemplo n.º 10
0
    def SendMail(self, responses):
        """Sends a mail when the client has responded."""
        if responses.success:
            client = aff4.FACTORY.Open(self.client_id, token=self.token)
            hostname = client.Get(client.Schema.HOSTNAME)

            subject = self.__class__.subject_template.render(hostname=hostname)
            body = self.__class__.template.render(
                client_id=self.client_id,
                admin_ui=config.CONFIG["AdminUI.url"],
                hostname=hostname,
                url="/clients/%s" % self.client_id.Basename(),
                creator=self.token.username,
                signature=utils.SmartUnicode(config.CONFIG["Email.signature"]))

            email_alerts.EMAIL_ALERTER.SendEmail(self.args.email,
                                                 "grr-noreply",
                                                 utils.SmartStr(subject),
                                                 utils.SmartStr(body),
                                                 is_html=True)
        else:
            flow.FlowError("Error while pinging client.")
Exemplo n.º 11
0
    def WaitUntil(self, condition_cb, *args):
        self.CheckBrowserErrors()

        for _ in range(int(self.duration / self.sleep_time)):
            try:
                res = condition_cb(*args)
                if res:
                    return res

            # Raise in case of a test-related error (i.e. failing assertion).
            except self.failureException:
                raise
            # The element might not exist yet and selenium could raise here. (Also
            # Selenium raises Exception not StandardError).
            except Exception as e:  # pylint: disable=broad-except
                logging.warn("Selenium raised %s", utils.SmartUnicode(e))

            self.CheckBrowserErrors()
            time.sleep(self.sleep_time)

        self.fail("condition not met, body is: %s" %
                  self.driver.find_element_by_tag_name("body").text)
Exemplo n.º 12
0
    def SendMail(self, responses):
        """Sends a mail when the client has responded."""
        if not responses.success:
            raise flow_base.FlowError("Error while pinging client.")

        client = data_store.REL_DB.ReadClientSnapshot(self.client_id)
        hostname = client.knowledge_base.fqdn

        subject = self.__class__.subject_template.render(hostname=hostname)
        body = self.__class__.template.render(
            client_id=self.client_id,
            admin_ui=config.CONFIG["AdminUI.url"],
            hostname=hostname,
            url="/clients/%s" % self.client_id,
            creator=self.creator,
            signature=utils.SmartUnicode(config.CONFIG["Email.signature"]))

        email_alerts.EMAIL_ALERTER.SendEmail(self.args.email,
                                             "grr-noreply",
                                             subject,
                                             body,
                                             is_html=True)
Exemplo n.º 13
0
  def _Walk(self, depth, top_path, top_tsk_dir):
    if depth < 0:
      return

    dirs, files = [], []
    tsk_dir_map = {}

    for f in top_tsk_dir:
      name = f.info.name.name
      if name in [".", ".."] or name in self.BLACKLIST_FILES:
        continue

      name = utils.SmartUnicode(name)

      try:
        inode = f.info.meta.addr
        fd = self.fs.open_meta(inode)
        tsk_dir = fd.as_directory()

      except (IOError, AttributeError):  # Not a directory
        files.append(name)

      else:  # Is a directory
        dirs.append(name)
        tsk_dir_map[name] = tsk_dir

    dirs.sort()
    files.sort()

    yield top_path, dirs, files

    for d in dirs:
      path = "%s/%s" % (top_path if top_path != "/" else "", d)
      tsk_dir = tsk_dir_map[d]

      for item in self._Walk(depth - 1, path, tsk_dir):
        yield item
Exemplo n.º 14
0
    def testCreatesZipContainingFilesAndClientInfosAndManifest(self):
        self._InitializeFiles(hashing=True)

        fd_path = self._GenerateArchive(self.stat_entries,
                                        archive_format=api_call_handler_utils.
                                        CollectionArchiveGenerator.ZIP)

        zip_fd = zipfile.ZipFile(fd_path)
        names = [utils.SmartUnicode(s) for s in sorted(zip_fd.namelist())]

        client_info_name = (u"test_prefix/%s/client_info.yaml" %
                            self.client_id.Basename())
        manifest_name = u"test_prefix/MANIFEST"

        self.assertEqual(
            names,
            sorted(self.archive_paths + [client_info_name, manifest_name]))

        contents = zip_fd.read(self.archive_paths[0])
        self.assertEqual(contents, "hello1")

        contents = zip_fd.read(self.archive_paths[1])
        self.assertEqual(contents, "hello2")

        manifest = yaml.safe_load(zip_fd.read(manifest_name))
        self.assertEqual(
            manifest, {
                "description": "Test description",
                "processed_files": 2,
                "archived_files": 2,
                "ignored_files": 0,
                "failed_files": 0
            })

        client_info = yaml.safe_load(zip_fd.read(client_info_name))
        self.assertEqual(client_info["system_info"]["fqdn"],
                         "Host-0.example.com")
Exemplo n.º 15
0
    def ScanAttributes(self,
                       subject_prefix,
                       attributes,
                       after_urn="",
                       max_records=None,
                       relaxed_order=False):
        precondition.AssertType(subject_prefix, Text)
        precondition.AssertIterableType(attributes, Text)

        subject_prefix = utils.SmartStr(rdfvalue.RDFURN(subject_prefix))
        if subject_prefix[-1] != "/":
            subject_prefix += "/"
        if after_urn:
            after_urn = utils.SmartUnicode(after_urn)
        subjects = []
        for s in self.subjects:
            if s.startswith(subject_prefix) and (after_urn is None
                                                 or s > after_urn):
                subjects.append(s)
        subjects.sort()

        return_count = 0
        for s in subjects:
            if max_records and return_count >= max_records:
                break
            r = self.subjects[s]
            results = {}
            for attribute in attributes:
                attribute_list = r.get(attribute)
                if attribute_list:
                    encoded_value, timestamp = attribute_list[-1]
                    value = self._value_converter.Decode(
                        attribute, encoded_value)
                    results[attribute] = (timestamp, value)
            if results:
                return_count += 1
                yield (s, results)
Exemplo n.º 16
0
    def DeleteAttributes(self,
                         subject,
                         attributes,
                         start=None,
                         end=None,
                         sync=None):
        _ = sync  # Unimplemented.
        if isinstance(attributes, basestring):
            raise ValueError(
                "String passed to DeleteAttributes (non string iterable expected)."
            )

        subject = utils.SmartUnicode(subject)
        try:
            record = self.subjects[subject]
            keys_to_delete = []
            for name, values in iteritems(record):
                if name not in attributes:
                    continue

                start = start or 0
                if end is None:
                    end = (2**63) - 1  # sys.maxsize
                new_values = []
                for value, timestamp in values:
                    if not start <= timestamp <= end:
                        new_values.append((value, int(timestamp)))

                if new_values:
                    record[name] = new_values
                else:
                    keys_to_delete.append(name)

            for key in keys_to_delete:
                record.pop(key)
        except KeyError:
            pass
Exemplo n.º 17
0
    def ProcessRepliesWithOutputPlugins(self, replies):
        """Processes replies with output plugins."""
        for output_plugin_state in self.context.output_plugins_states:
            plugin_descriptor = output_plugin_state.plugin_descriptor
            output_plugin_cls = plugin_descriptor.GetPluginClass()
            output_plugin = output_plugin_cls(
                source_urn=self.flow_obj.urn,
                args=plugin_descriptor.plugin_args,
                token=self.token)

            # Extend our lease if needed.
            self.flow_obj.HeartBeat()
            try:
                output_plugin.ProcessResponses(
                    output_plugin_state.plugin_state, replies)
                output_plugin.Flush(output_plugin_state.plugin_state)
                output_plugin.UpdateState(output_plugin_state.plugin_state)

                log_item = output_plugin_lib.OutputPluginBatchProcessingStatus(
                    plugin_descriptor=plugin_descriptor,
                    status="SUCCESS",
                    batch_size=len(replies))
                output_plugin_state.Log(log_item)

                self.Log("Plugin %s successfully processed %d flow replies.",
                         plugin_descriptor, len(replies))
            except Exception as e:  # pylint: disable=broad-except
                error = output_plugin_lib.OutputPluginBatchProcessingStatus(
                    plugin_descriptor=plugin_descriptor,
                    status="ERROR",
                    summary=utils.SmartUnicode(e),
                    batch_size=len(replies))
                output_plugin_state.Error(error)

                self.Log("Plugin %s failed to process %d replies due to: %s",
                         plugin_descriptor, len(replies), e)
Exemplo n.º 18
0
  def ParseFile(
      self,
      knowledge_base: rdf_client.KnowledgeBase,
      pathspec: rdf_paths.PathSpec,
      filedesc: IO[bytes],
  ) -> Iterator[rdf_client.User]:
    del knowledge_base  # Unused.
    del pathspec  # Unused.

    users = {}
    wtmp = filedesc.read()
    while wtmp:
      try:
        record = UtmpStruct(wtmp)
      except utils.ParsingError:
        break

      wtmp = wtmp[record.size:]
      # Users only appear for USER_PROCESS events, others are system.
      if record.ut_type != 7:
        continue

      # Lose the null termination
      record.user = record.user.split(b"\x00", 1)[0]

      # Store the latest login time.
      # TODO(user): remove the 0 here once RDFDatetime can support times
      # pre-epoch properly.
      try:
        users[record.user] = max(users[record.user], record.sec, 0)
      except KeyError:
        users[record.user] = record.sec

    for user, last_login in users.items():
      yield rdf_client.User(
          username=utils.SmartUnicode(user), last_logon=last_login * 1000000)
Exemplo n.º 19
0
    def MultiSet(self,
                 subject,
                 values,
                 timestamp=None,
                 replace=True,
                 sync=True,
                 to_delete=None):
        subject = utils.SmartUnicode(subject)
        if to_delete:
            self.DeleteAttributes(subject, to_delete, sync=sync)

        for k, seq in iteritems(values):
            for v in seq:
                if isinstance(v, (list, tuple)):
                    v, element_timestamp = v
                else:
                    element_timestamp = timestamp

                self.Set(subject,
                         k,
                         v,
                         timestamp=element_timestamp,
                         replace=replace,
                         sync=sync)
Exemplo n.º 20
0
 def __unicode__(self):
     return utils.SmartUnicode(u"aff4:%s" % self._string_urn)
Exemplo n.º 21
0
        def Decorated(this, token, *args, **kwargs):
            try:
                result = func(this, token, *args, **kwargs)
                if (self.access_type == "data_store_access" and token
                        and token.username in aff4_users.GRRUser.SYSTEM_USERS):
                    # Logging internal system database access is noisy and useless.
                    return result
                if logging.getLogger().isEnabledFor(logging.DEBUG):
                    logging.debug(
                        u"%s GRANTED by %s to %s%s (%s, %s) with reason: %s",
                        utils.SmartUnicode(self.access_type),
                        compatibility.GetName(this.__class__.__name__),
                        utils.SmartUnicode(token and token.username),
                        utils.SmartUnicode(
                            token and token.supervisor and " (supervisor)"
                            or ""), utils.SmartUnicode(args),
                        utils.SmartUnicode(kwargs),
                        utils.SmartUnicode(token and token.reason))

                return result
            except access_control.UnauthorizedAccess:
                if logging.getLogger().isEnabledFor(logging.DEBUG):
                    logging.debug(
                        u"%s REJECTED by %s to %s%s (%s, %s) with reason: %s",
                        utils.SmartUnicode(self.access_type),
                        compatibility.GetName(this.__class__.__name__),
                        utils.SmartUnicode(token and token.username),
                        utils.SmartUnicode(
                            token and token.supervisor and " (supervisor)"
                            or ""), utils.SmartUnicode(args),
                        utils.SmartUnicode(kwargs),
                        utils.SmartUnicode(token and token.reason))

                raise
Exemplo n.º 22
0
    def Generate(self, items, token=None):
        """Generates archive from a given collection.

    Iterates the collection and generates an archive by yielding contents
    of every referenced AFF4Stream.

    Args:
      items: Iterable with items that point to aff4 paths.
      token: User's ACLToken.

    Yields:
      Binary chunks comprising the generated archive.
    """
        clients = set()
        for fd_urn_batch in collection.Batch(self._ItemsToUrns(items),
                                             self.BATCH_SIZE):

            fds_to_write = {}
            for fd in aff4.FACTORY.MultiOpen(fd_urn_batch, token=token):
                self.total_files += 1

                # Derive a ClientPath from AFF4 URN to make new and old
                # archive_generator predicate input consistent.
                # TODO(user): This code is clearly hacky and intended to be removed.
                urn_components = fd.urn.Split()
                if urn_components[1:3] != ["fs", "os"]:
                    raise AssertionError(
                        "URN components are expected to start with "
                        "client, 'fs', 'os'. Got %r" % (urn_components, ))

                client_path = db.ClientPath.OS(client_id=urn_components[0],
                                               components=urn_components[3:])

                if not self.predicate(client_path):
                    self.ignored_files.append(utils.SmartUnicode(fd.urn))
                    continue

                # Any file-like object with data in AFF4 should inherit AFF4Stream.
                if isinstance(fd, aff4.AFF4Stream):
                    urn_components = fd.urn.Split()
                    clients.add(rdf_client.ClientURN(urn_components[0]))

                    content_path = os.path.join(self.prefix, *urn_components)
                    self.archived_files += 1

                    # Make sure size of the original file is passed. It's required
                    # when output_writer is StreamingTarWriter.
                    st = os.stat_result(
                        (0o644, 0, 0, 0, 0, 0, fd.size, 0, 0, 0))
                    fds_to_write[fd] = (content_path, st)

            if fds_to_write:
                prev_fd = None
                for fd, chunk, exception in aff4.AFF4Stream.MultiStream(
                        fds_to_write):
                    if exception:
                        logging.exception(exception)

                        self.archived_files -= 1
                        self.failed_files.append(utils.SmartUnicode(fd.urn))
                        continue

                    if prev_fd != fd:
                        if prev_fd:
                            yield self.archive_generator.WriteFileFooter()
                        prev_fd = fd

                        content_path, st = fds_to_write[fd]
                        yield self.archive_generator.WriteFileHeader(
                            content_path, st=st)

                    yield self.archive_generator.WriteFileChunk(chunk)

                if self.archive_generator.is_file_write_in_progress:
                    yield self.archive_generator.WriteFileFooter()

        if clients:
            for client_urn_batch in collection.Batch(clients, self.BATCH_SIZE):
                for fd in aff4.FACTORY.MultiOpen(
                        client_urn_batch,
                        aff4_type=aff4_grr.VFSGRRClient,
                        token=token):
                    for chunk in self._GenerateClientInfo(fd):
                        yield chunk

        for chunk in self._GenerateDescription():
            yield chunk

        yield self.archive_generator.Close()
Exemplo n.º 23
0
    def ProcessEvents(self, msgs=None, publisher_username=None):
        """Processes this event."""
        nanny_msg = ""

        for crash_details in msgs:
            client_urn = crash_details.client_id
            client_id = client_urn.Basename()

            # The session id of the flow that crashed.
            session_id = crash_details.session_id

            # Log.
            logging.info("Client crash reported, client %s.", client_urn)

            # Export.
            GRR_CLIENT_CRASHES.Increment()

            # Write crash data.
            client = data_store.REL_DB.ReadClientSnapshot(client_id)
            if client:
                crash_details.client_info = client.startup_info.client_info
                hostname = client.knowledge_base.fqdn
            else:
                hostname = ""

            crash_details.crash_type = "Client Crash"

            if nanny_msg:
                termination_msg = "Client crashed, " + nanny_msg
            else:
                termination_msg = "Client crashed."

            # Terminate the flow.
            flow_id = session_id.Basename()
            flow_base.TerminateFlow(
                client_id,
                flow_id,
                reason=termination_msg,
                flow_state=rdf_flow_objects.Flow.FlowState.CRASHED)

            WriteAllCrashDetails(client_id,
                                 crash_details,
                                 flow_session_id=session_id)

            # Also send email.
            email_address = config.CONFIG["Monitoring.alert_email"]
            if not email_address:
                return

            if crash_details.nanny_status:
                nanny_msg = "Nanny status: %s" % crash_details.nanny_status

            body = self.__class__.mail_template.render(
                client_id=client_id,
                admin_ui=config.CONFIG["AdminUI.url"],
                hostname=utils.SmartUnicode(hostname),
                url="/clients/%s" % client_id,
                nanny_msg=utils.SmartUnicode(nanny_msg),
                signature=config.CONFIG["Email.signature"])

            try:
                email_alerts.EMAIL_ALERTER.SendEmail(
                    email_address,
                    "GRR server",
                    "Client %s reported a crash." % client_id,
                    body,
                    is_html=True)
            except email_alerts.EmailNotSentError as e:
                # We have already written the crash details to the DB, so failing
                # to send an email isn't super-critical.
                logging.warning(e)
Exemplo n.º 24
0
    def FromLegacyResponses(cls, request=None, responses=None):
        """Creates a Responses object from old style flow request and responses."""
        res = cls()
        res.request = request
        if request:
            res.request_data = rdf_protodict.Dict(request.data)
        dropped_responses = []
        # The iterator that was returned as part of these responses. This should
        # be passed back to actions that expect an iterator.
        res.iterator = None

        if not responses:
            return res

        # This may not be needed if we can assume that responses are
        # returned in lexical order from the data_store.
        responses.sort(key=operator.attrgetter("response_id"))

        if request.HasField("request"):
            client_action_name = request.request.name
            action_registry = server_stubs.ClientActionStub.classes
            if client_action_name not in action_registry:
                raise RuntimeError("Got unknown client action: %s." %
                                   client_action_name)
            expected_response_classes = action_registry[
                client_action_name].out_rdfvalues

        old_response_id = None

        # Filter the responses by authorized states
        for msg in responses:
            # Check if the message is authenticated correctly.
            if msg.auth_state != msg.AuthorizationState.AUTHENTICATED:
                logging.warning(
                    "%s: Messages must be authenticated (Auth state %s)",
                    msg.session_id, msg.auth_state)
                dropped_responses.append(msg)
                # Skip this message - it is invalid
                continue

            # Handle retransmissions
            if msg.response_id == old_response_id:
                continue

            old_response_id = msg.response_id

            # Check for iterators
            if msg.type == msg.Type.ITERATOR:
                if res.iterator:
                    raise ValueError(
                        "Received multiple iterator messages at once.")
                res.iterator = rdf_client_action.Iterator(msg.payload)
                continue

            # Look for a status message
            if msg.type == msg.Type.STATUS:
                # Our status is set to the first status message that we see in
                # the responses. We ignore all other messages after that.
                res.status = rdf_flows.GrrStatus(msg.payload)

                # Check this to see if the call succeeded
                res.success = res.status.status == res.status.ReturnedStatus.OK

                # Ignore all other messages
                break

            if msg.type == msg.Type.MESSAGE:
                if request.HasField("request"):
                    # Let's do some verification for requests that came from clients.
                    if not expected_response_classes:
                        raise RuntimeError(
                            "Client action %s does not specify out_rdfvalue." %
                            client_action_name)
                    else:
                        args_rdf_name = msg.args_rdf_name
                        if not args_rdf_name:
                            raise RuntimeError(
                                "Deprecated message format received: "
                                "args_rdf_name is None.")
                        elif args_rdf_name not in [
                                x.__name__ for x in expected_response_classes
                        ]:
                            raise RuntimeError(
                                "Response type was %s but expected %s for %s."
                                % (args_rdf_name, expected_response_classes,
                                   client_action_name))
            # Use this message
            res.responses.append(msg.payload)

        if res.status is None:
            # This is a special case of de-synchronized messages.
            if dropped_responses:
                logging.error(
                    "De-synchronized messages detected:\n %s", "\n".join(
                        [utils.SmartUnicode(x) for x in dropped_responses]))

            res.LogFlowState(responses)

            raise ValueError("No valid Status message.")

        return res
Exemplo n.º 25
0
    def __init__(self,
                 base_fd,
                 handlers,
                 pathspec=None,
                 progress_callback=None):
        super(File, self).__init__(base_fd,
                                   handlers=handlers,
                                   pathspec=pathspec,
                                   progress_callback=progress_callback)
        if base_fd is None:
            self.pathspec.Append(pathspec)

        # We can stack on another directory, which means we concatenate their
        # directory with ours.
        elif base_fd.IsDirectory():
            self.pathspec.last.path = utils.JoinPath(self.pathspec.last.path,
                                                     pathspec.path)

        else:
            raise IOError(
                "File handler can not be stacked on another handler.")

        self.path = self.pathspec.last.path

        # We can optionally apply a global offset to the file.
        if self.pathspec[0].HasField("offset"):
            self.file_offset = self.pathspec[0].offset

        self.pathspec.last.path_options = rdf_paths.PathSpec.Options.CASE_LITERAL

        self.FileHacks()
        self.filename = client_utils.CanonicalPathToLocalPath(self.path)

        error = None
        # Pythonic way - duck typing. Is the handle a directory?
        try:
            if not self.files:
                # Note that the encoding of local path is system specific
                local_path = client_utils.CanonicalPathToLocalPath(self.path +
                                                                   "/")
                self.files = [
                    utils.SmartUnicode(entry)
                    for entry in os.listdir(local_path)
                ]
        # Some filesystems do not support unicode properly
        except UnicodeEncodeError as e:
            raise IOError(str(e))
        except (IOError, OSError) as e:
            self.files = []
            error = e

        # Ok, it's not. Is it a file then?
        try:
            with FileHandleManager(self.filename) as fd:

                if pathspec.last.HasField("file_size_override"):
                    self.size = pathspec.last.file_size_override - self.file_offset
                else:
                    # Work out how large the file is.
                    if self.size is None:
                        fd.Seek(0, 2)
                        end = fd.Tell()
                        if end == 0:
                            # This file is not seekable, we just use the default.
                            end = pathspec.last.file_size_override

                        self.size = end - self.file_offset

            error = None
        # Some filesystems do not support unicode properly
        except UnicodeEncodeError as e:
            raise IOError(str(e))

        except IOError as e:
            if error:
                error = e

        if error is not None:
            raise error  # pylint: disable=raising-bad-type
Exemplo n.º 26
0
  def CallFlow(self,
               flow_name=None,
               next_state=None,
               request_data=None,
               client_id=None,
               base_session_id=None,
               **kwargs):
    """Creates a new flow and send its responses to a state.

    This creates a new flow. The flow may send back many responses which will be
    queued by the framework until the flow terminates. The final status message
    will cause the entire transaction to be committed to the specified state.

    Args:
       flow_name: The name of the flow to invoke.
       next_state: The state in this flow, that responses to this message should
         go to.
       request_data: Any dict provided here will be available in the
         RequestState protobuf. The Responses object maintains a reference to
         this protobuf for use in the execution of the state method. (so you can
         access this data by responses.request). There is no format mandated on
         this data but it may be a serialized protobuf.
       client_id: If given, the flow is started for this client.
       base_session_id: A URN which will be used to build a URN.
       **kwargs: Arguments for the child flow.

    Raises:
       FlowRunnerError: If next_state is not one of the allowed next states.

    Returns:
       The URN of the child flow which was created.
    """
    client_id = client_id or self.runner_args.client_id

    # This looks very much like CallClient() above - we prepare a request state,
    # and add it to our queue - any responses from the child flow will return to
    # the request state and the stated next_state. Note however, that there is
    # no client_id or actual request message here because we directly invoke the
    # child flow rather than queue anything for it.
    state = rdf_flow_runner.RequestState(
        id=self.GetNextOutboundId(),
        session_id=utils.SmartUnicode(self.session_id),
        client_id=client_id,
        next_state=next_state,
        response_count=0)

    if request_data:
      state.data = rdf_protodict.Dict().FromDict(request_data)

    # If the urn is passed explicitly (e.g. from the hunt runner) use that,
    # otherwise use the urn from the flow_runner args. If both are None, create
    # a new collection and give the urn to the flow object.
    logs_urn = self._GetLogCollectionURN(
        kwargs.pop("logs_collection_urn", None) or
        self.runner_args.logs_collection_urn)

    # If we were called with write_intermediate_results, propagate down to
    # child flows.  This allows write_intermediate_results to be set to True
    # either at the top level parent, or somewhere in the middle of
    # the call chain.
    write_intermediate = (
        kwargs.pop("write_intermediate_results", False) or
        self.runner_args.write_intermediate_results)

    # Create the new child flow but do not notify the user about it.
    child_urn = self.flow_obj.StartAFF4Flow(
        client_id=client_id,
        flow_name=flow_name,
        base_session_id=base_session_id or self.session_id,
        request_state=state,
        token=self.token,
        notify_to_user=False,
        parent_flow=self.flow_obj,
        queue=self.runner_args.queue,
        write_intermediate_results=write_intermediate,
        logs_collection_urn=logs_urn,
        sync=True,
        **kwargs)

    self.QueueRequest(state)

    return child_urn
Exemplo n.º 27
0
  def CallClient(self,
                 action_cls,
                 request=None,
                 next_state=None,
                 request_data=None,
                 **kwargs):
    """Calls the client asynchronously.

    This sends a message to the client to invoke an Action. The run
    action may send back many responses. These will be queued by the
    framework until a status message is sent by the client. The status
    message will cause the entire transaction to be committed to the
    specified state.

    Args:
       action_cls: The function to call on the client.
       request: The request to send to the client. If not specified (Or None) we
         create a new RDFValue using the kwargs.
       next_state: The state in this flow, that responses to this message should
         go to.
       request_data: A dict which will be available in the RequestState
         protobuf. The Responses object maintains a reference to this protobuf
         for use in the execution of the state method. (so you can access this
         data by responses.request). Valid values are strings, unicode and
         protobufs.
       **kwargs: These args will be used to construct the client action semantic
         protobuf.

    Raises:
       FlowRunnerError: If called on a flow that doesn't run on a single client.
       ValueError: The request passed to the client does not have the correct
                     type.
    """
    client_id = self.runner_args.client_id
    if client_id is None:
      raise FlowRunnerError("CallClient() is used on a flow which was not "
                            "started with a client.")

    if not isinstance(client_id, rdf_client.ClientURN):
      # Try turning it into a ClientURN
      client_id = rdf_client.ClientURN(client_id)

    if action_cls.in_rdfvalue is None:
      if request:
        raise ValueError(
            "Client action %s does not expect args." % action_cls.__name__)
    else:
      if request is None:
        # Create a new rdf request.
        request = action_cls.in_rdfvalue(**kwargs)
      else:
        # Verify that the request type matches the client action requirements.
        if not isinstance(request, action_cls.in_rdfvalue):
          raise ValueError("Client action expected %s but got %s" %
                           (action_cls.in_rdfvalue, type(request)))

    outbound_id = self.GetNextOutboundId()

    # Create a new request state
    state = rdf_flow_runner.RequestState(
        id=outbound_id,
        session_id=self.session_id,
        next_state=next_state,
        client_id=client_id)

    if request_data is not None:
      state.data = rdf_protodict.Dict(request_data)

    # Send the message with the request state
    msg = rdf_flows.GrrMessage(
        session_id=utils.SmartUnicode(self.session_id),
        name=action_cls.__name__,
        request_id=outbound_id,
        require_fastpoll=self.runner_args.require_fastpoll,
        queue=client_id.Queue(),
        payload=request,
        generate_task_id=True)

    cpu_usage = self.context.client_resources.cpu_usage
    if self.runner_args.cpu_limit:
      msg.cpu_limit = max(
          self.runner_args.cpu_limit - cpu_usage.user_cpu_time -
          cpu_usage.system_cpu_time, 0)

      if msg.cpu_limit == 0:
        raise FlowRunnerError("CPU limit exceeded.")

    if self.runner_args.network_bytes_limit:
      msg.network_bytes_limit = max(
          self.runner_args.network_bytes_limit -
          self.context.network_bytes_sent, 0)
      if msg.network_bytes_limit == 0:
        raise FlowRunnerError("Network limit exceeded.")

    state.request = msg
    self.QueueRequest(state)
Exemplo n.º 28
0
 def __unicode__(self):
     return utils.SmartUnicode(str(self))
Exemplo n.º 29
0
 def AllTextsPresent(self, texts):
     body = self.driver.find_element_by_tag_name("body").text
     for text in texts:
         if utils.SmartUnicode(text) not in body:
             return False
     return True
Exemplo n.º 30
0
 def RemoveClientLabels(self, client_id, owner, labels):
     """Removes a list of user labels from a given client."""
     labelset = self.labels.setdefault(client_id,
                                       {}).setdefault(owner, set())
     for l in labels:
         labelset.discard(utils.SmartUnicode(l))