Esempio n. 1
0
  def ProcessMessage(self, message=None, event=None):
    """Processes this event."""
    _ = event
    nanny_msg = ""

    crash_details = message.payload
    client_id = crash_details.client_id

    # The session id of the flow that crashed.
    session_id = crash_details.session_id

    flow_obj = aff4.FACTORY.Open(session_id, token=self.token)

    # Log.
    logging.info("Client crash reported, client %s.", client_id)

    # Only kill the flow if it does not handle its own crashes. Some flows
    # restart the client and therefore expect to get a crash notification.
    if flow_obj.handles_crashes:
      return

    # Export.
    stats.STATS.IncrementCounter("grr_client_crashes")

    # Write crash data to AFF4.
    client = aff4.FACTORY.Open(client_id, token=self.token)
    client_info = client.Get(client.Schema.CLIENT_INFO)

    crash_details.client_info = client_info
    crash_details.crash_type = self.well_known_session_id

    self.WriteAllCrashDetails(
        client_id, crash_details, flow_session_id=session_id)

    # Also send email.
    to_send = []

    try:
      hunt_session_id = self._ExtractHuntId(session_id)
      if hunt_session_id and hunt_session_id != session_id:
        hunt_obj = aff4.FACTORY.Open(
            hunt_session_id, aff4_type=implementation.GRRHunt, token=self.token)
        email = hunt_obj.runner_args.crash_alert_email
        if email:
          to_send.append(email)
    except aff4.InstantiationError:
      logging.error("Failed to open hunt %s.", hunt_session_id)

    email = config.CONFIG["Monitoring.alert_email"]
    if email:
      to_send.append(email)

    for email_address in to_send:
      if crash_details.nanny_status:
        nanny_msg = "Nanny status: %s" % crash_details.nanny_status

      client = aff4.FACTORY.Open(client_id, token=self.token)
      hostname = client.Get(client.Schema.HOSTNAME)
      url = "/clients/%s" % client_id.Basename()

      body = self.__class__.mail_template.render(
          client_id=client_id,
          admin_ui=config.CONFIG["AdminUI.url"],
          hostname=utils.SmartUnicode(hostname),
          context=utils.SmartUnicode(flow_obj.context),
          state=utils.SmartUnicode(flow_obj.state),
          args=utils.SmartUnicode(flow_obj.args),
          runner_args=utils.SmartUnicode(flow_obj.runner_args),
          urn=url,
          nanny_msg=utils.SmartUnicode(nanny_msg),
          signature=config.CONFIG["Email.signature"]),
      email_alerts.EMAIL_ALERTER.SendEmail(
          email_address,
          "GRR server",
          "Client %s reported a crash." % client_id,
          utils.SmartStr(body),
          is_html=True)

    if nanny_msg:
      msg = "Client crashed, " + nanny_msg
    else:
      msg = "Client crashed."

    # Now terminate the flow.
    flow.GRRFlow.TerminateFlow(
        session_id, reason=msg, token=self.token, force=True)
Esempio n. 2
0
 def __eq__(self, other):
     return (self._value == utils.SmartStr(other)
             or self._value.encode("hex") == other)
Esempio n. 3
0
    def HandleRequest(self, request):
        """Handles given HTTP request."""
        impersonated_username = config.CONFIG["AdminUI.debug_impersonate_user"]
        if impersonated_username:
            logging.info("Overriding user as %s", impersonated_username)
            request.user = config.CONFIG["AdminUI.debug_impersonate_user"]

        if not aff4_users.GRRUser.IsValidUsername(request.user):
            return self._BuildResponse(
                403, dict(message="Invalid username: %s" % request.user))

        try:
            router, method_metadata, args = self._router_matcher.MatchRouter(
                request)
        except access_control.UnauthorizedAccess as e:
            logging.exception("Access denied to %s (%s): %s", request.path,
                              request.method, e)

            additional_headers = {
                "X-GRR-Unauthorized-Access-Reason":
                utils.SmartStr(e.message).replace("\n", ""),
                "X-GRR-Unauthorized-Access-Subject":
                utils.SmartStr(e.subject)
            }
            return self._BuildResponse(
                403,
                dict(message="Access denied by ACL: %s" %
                     utils.SmartStr(e.message),
                     subject=utils.SmartStr(e.subject)),
                headers=additional_headers)

        except ApiCallRouterNotFoundError as e:
            return self._BuildResponse(404, dict(message=e.message))
        except werkzeug_exceptions.MethodNotAllowed as e:
            return self._BuildResponse(405, dict(message=e.message))
        except Error as e:
            logging.exception("Can't match URL to router/method: %s", e)

            return self._BuildResponse(
                500, dict(message=str(e), traceBack=traceback.format_exc()))

        request.method_metadata = method_metadata
        request.parsed_args = args

        # SetUID() is called here so that ACL checks done by the router do not
        # clash with datastore ACL checks.
        # TODO(user): increase token expiry time.
        token = self.BuildToken(request, 60).SetUID()

        # We send a blind-write request to ensure that the user object is created
        # for a user specified by the username.
        user_urn = rdfvalue.RDFURN("aff4:/users/").Add(request.user)
        # We can't use conventional AFF4 interface, since aff4.FACTORY.Create will
        # create a new version of the object for every call.
        with data_store.DB.GetMutationPool() as pool:
            pool.MultiSet(user_urn, {
                aff4_users.GRRUser.SchemaCls.TYPE:
                [aff4_users.GRRUser.__name__],
                aff4_users.GRRUser.SchemaCls.LAST:
                [rdfvalue.RDFDatetime.Now().SerializeToDataStore()]
            },
                          replace=True)

        if data_store.RelationalDBWriteEnabled():
            data_store.REL_DB.WriteGRRUser(request.user)

        handler = None
        try:
            # ACL checks are done here by the router. If this method succeeds (i.e.
            # does not raise), then handlers run without further ACL checks (they're
            # free to do some in their own implementations, though).
            handler = getattr(router, method_metadata.name)(args, token=token)

            if handler.args_type != method_metadata.args_type:
                raise RuntimeError(
                    "Handler args type doesn't match "
                    "method args type: %s vs %s" %
                    (handler.args_type, method_metadata.args_type))

            binary_result_type = (
                api_call_router.RouterMethodMetadata.BINARY_STREAM_RESULT_TYPE)

            if (handler.result_type != method_metadata.result_type and
                    not (handler.result_type is None and
                         method_metadata.result_type == binary_result_type)):
                raise RuntimeError(
                    "Handler result type doesn't match "
                    "method result type: %s vs %s" %
                    (handler.result_type, method_metadata.result_type))

            # HEAD method is only used for checking the ACLs for particular API
            # methods.
            if request.method == "HEAD":
                # If the request would return a stream, we add the Content-Length
                # header to the response.
                if (method_metadata.result_type ==
                        method_metadata.BINARY_STREAM_RESULT_TYPE):
                    binary_stream = handler.Handle(args, token=token)
                    return self._BuildResponse(
                        200, {"status": "OK"},
                        method_name=method_metadata.name,
                        no_audit_log=method_metadata.no_audit_log_required,
                        content_length=binary_stream.content_length,
                        token=token)
                else:
                    return self._BuildResponse(
                        200, {"status": "OK"},
                        method_name=method_metadata.name,
                        no_audit_log=method_metadata.no_audit_log_required,
                        token=token)

            if (method_metadata.result_type ==
                    method_metadata.BINARY_STREAM_RESULT_TYPE):
                binary_stream = handler.Handle(args, token=token)
                return self._BuildStreamingResponse(
                    binary_stream, method_name=method_metadata.name)
            else:
                format_mode = GetRequestFormatMode(request, method_metadata)
                result = self.CallApiHandler(handler, args, token=token)
                rendered_data = self._FormatResultAsJson(
                    result, format_mode=format_mode)

                return self._BuildResponse(
                    200,
                    rendered_data,
                    method_name=method_metadata.name,
                    no_audit_log=method_metadata.no_audit_log_required,
                    token=token)
        except access_control.UnauthorizedAccess as e:
            logging.exception("Access denied to %s (%s) with %s: %s",
                              request.path, request.method,
                              method_metadata.name, e)

            additional_headers = {
                "X-GRR-Unauthorized-Access-Reason":
                utils.SmartStr(e.message).replace("\n", ""),
                "X-GRR-Unauthorized-Access-Subject":
                utils.SmartStr(e.subject)
            }
            return self._BuildResponse(
                403,
                dict(message="Access denied by ACL: %s" % e.message,
                     subject=utils.SmartStr(e.subject)),
                headers=additional_headers,
                method_name=method_metadata.name,
                no_audit_log=method_metadata.no_audit_log_required,
                token=token)
        except api_call_handler_base.ResourceNotFoundError as e:
            return self._BuildResponse(
                404,
                dict(message=e.message),
                method_name=method_metadata.name,
                no_audit_log=method_metadata.no_audit_log_required,
                token=token)
        except NotImplementedError as e:
            return self._BuildResponse(
                501,
                dict(message=e.message),
                method_name=method_metadata.name,
                no_audit_log=method_metadata.no_audit_log_required,
                token=token)
        except Exception as e:  # pylint: disable=broad-except
            logging.exception("Error while processing %s (%s) with %s: %s",
                              request.path, request.method,
                              handler.__class__.__name__, e)
            return self._BuildResponse(
                500,
                dict(message=str(e), traceBack=traceback.format_exc()),
                method_name=method_metadata.name,
                no_audit_log=method_metadata.no_audit_log_required,
                token=token)
Esempio n. 4
0
 def ParseFromString(self, initializer=None):
     # Old clients sometimes send bare well known flow ids.
     if not utils.SmartStr(initializer).startswith("aff4"):
         initializer = "aff4:/flows/" + initializer
     super(FlowSessionID, self).ParseFromString(initializer)
Esempio n. 5
0
 def ParseFromString(self, string):
     # This handles the cases when we're initialized from Unicode strings.
     self._value = utils.SmartStr(string)
Esempio n. 6
0
 def ExportedClassNameForValue(self, value):
     return utils.SmartStr("AutoExported" + value.__class__.__name__)
Esempio n. 7
0
 def GetRegisteredArtifactNames(self):
     return [utils.SmartStr(x) for x in self._artifacts]
Esempio n. 8
0
 def _CleanSubjectPrefix(self, subject_prefix):
     subject_prefix = utils.SmartStr(rdfvalue.RDFURN(subject_prefix))
     if subject_prefix[-1] != "/":
         subject_prefix += "/"
     return subject_prefix
Esempio n. 9
0
    def ResolveRegex(self,
                     subject,
                     attribute_regex,
                     token=None,
                     timestamp=None,
                     limit=None):
        """Resolve all attributes for a subject matching a regex."""
        self.security_manager.CheckDataStoreAccess(
            token, [subject], self.GetRequiredResolveAccess(attribute_regex))

        # Does timestamp represent a range?
        if isinstance(timestamp, (list, tuple)):
            start, end = timestamp  # pylint: disable=unpacking-non-sequence
        else:
            start, end = 0, (2**63) - 1

        start = int(start)
        end = int(end)

        if isinstance(attribute_regex, str):
            attribute_regex = [attribute_regex]

        subject = utils.SmartUnicode(subject)
        try:
            record = self.subjects[subject]
        except KeyError:
            return []

        # Holds all the attributes which matched. Keys are attribute names, values
        # are lists of timestamped data.
        results = {}
        nr_results = 0
        for regex in attribute_regex:
            regex = re.compile(regex, re.DOTALL)

            for attribute, values in record.iteritems():
                if limit and nr_results >= limit:
                    break
                if regex.match(utils.SmartStr(attribute)):
                    for value, ts in values:
                        results_list = results.setdefault(attribute, [])
                        # If we are always after the latest ts we clear older ones.
                        if (results_list and timestamp == self.NEWEST_TIMESTAMP
                                and results_list[0][1] < ts):
                            results_list = []
                            results[attribute] = results_list

                        # Timestamp outside the range, drop it.
                        elif ts < start or ts > end:
                            continue

                        results_list.append((attribute, ts, value))
                        nr_results += 1
                        if limit and nr_results >= limit:
                            break

        result = []
        for k, values in sorted(results.items()):
            for v in sorted(values, key=lambda x: x[1], reverse=True):
                result.append((k, v[2], v[1]))
        return result
Esempio n. 10
0
    def ReceiveFileHash(self, responses):
        """Add hash digest to tracker and check with filestore."""
        # Support old clients which may not have the new client action in place yet.
        # TODO(user): Deprecate once all clients have the HashFile action.

        if not responses.success and responses.request.request.name == "HashFile":
            logging.debug(
                "HashFile action not available, falling back to FingerprintFile."
            )
            self.CallClient("FingerprintFile",
                            responses.request.request.payload,
                            next_state="ReceiveFileHash",
                            request_data=responses.request_data)
            return

        index = responses.request_data["index"]
        if not responses.success:
            self.Log("Failed to hash file: %s", responses.status)
            self.state.pending_hashes.pop(index, None)
            self.FileFetchFailed(responses.request.request.payload.pathspec,
                                 responses.request.request.name,
                                 request_data=responses.request_data)
            return

        self.state.files_hashed += 1
        response = responses.First()
        if response.HasField("hash"):
            hash_obj = response.hash
        else:
            # Deprecate this method of returning hashes.
            hash_obj = rdf_crypto.Hash()

            if len(response.results
                   ) < 1 or response.results[0]["name"] != "generic":
                self.Log("Failed to hash file: %s",
                         self.state.indexed_pathspecs[index])
                self.state.pending_hashes.pop(index, None)
                return

            result = response.results[0]

            try:
                for hash_type in ["md5", "sha1", "sha256"]:
                    value = result.GetItem(hash_type)
                    setattr(hash_obj, hash_type, value)
            except AttributeError:
                self.Log("Failed to hash file: %s",
                         self.state.indexed_pathspecs[index])
                self.state.pending_hashes.pop(index, None)
                return

        try:
            tracker = self.state.pending_hashes[index]
        except KeyError:
            # TODO(user): implement a test for this and handle the failure
            # gracefully: i.e. maybe we can continue with an empty StatEntry.
            self.Error(
                "Couldn't stat the file, but got the hash (%s): %s" %
                (utils.SmartStr(index), utils.SmartStr(response.pathspec)))
            return

        tracker.hash_obj = hash_obj
        tracker.bytes_read = response.bytes_read

        self.state.files_hashed_since_check += 1
        if self.state.files_hashed_since_check >= self.MIN_CALL_TO_FILE_STORE:
            self._CheckHashesWithFileStore()
Esempio n. 11
0
 def __init__(self, data=""):
     self.buffer = utils.SmartStr(data)
     self.state_stack = []
Esempio n. 12
0
 def ToString(self, value):
   return utils.SmartStr(value)
Esempio n. 13
0
 def Seek(self, offset, whence=0):
     if not self.fd:
         self.fd = StringIO.StringIO(utils.SmartStr(self.value))
     return self.fd.seek(offset, whence)
Esempio n. 14
0
 def Read(self, length):
     if not self.fd:
         self.fd = StringIO.StringIO(utils.SmartStr(self.value))
     return self.fd.read(length)
Esempio n. 15
0
 def RenderValue(self, value):
   result = utils.SmartStr(value)
   return self._IncludeTypeInfo(result, value)
Esempio n. 16
0
File: user.py Progetto: tanner-g/grr
 def BuildApprovalObjUrn(self):
     return aff4.ROOT_URN.Add("ACL").Add(utils.SmartStr(
         self.client_id)).Add(self.username).Add(self.approval_id)
Esempio n. 17
0
class RekallResponseConverter(ExportConverter):
    """Export converter for RekallResponse objects."""

    input_rdf_type = "RekallResponse"

    OUTPUT_CLASSES = {}

    OBJECT_RENDERERS = {
        "_EPROCESS": RekallEProcessRenderer,
        "Address": lambda x: utils.FormatAsHexString(x["value"]),
        "AddressSpace": lambda x: x["name"],
        "BaseObject": lambda x: "@%s" % utils.FormatAsHexString(x["offset"]),
        "Enumeration": lambda x: "%s (%s)" % (x["enum"], x["value"]),
        "Instruction": lambda x: utils.SmartStr(x["value"]),
        "Literal": lambda x: utils.SmartStr(x["value"]),
        "NativeType": lambda x: utils.SmartStr(x["value"]),
        "NoneObject": lambda x: "-",
        "Pointer": lambda x: utils.FormatAsHexString(x["target"], 14),
        "PaddedAddress": lambda x: utils.FormatAsHexString(x["value"], 14),
        "str": RekallStringRenderer,
        "Struct": lambda x: utils.FormatAsHexString(x["offset"]),
        "UnixTimeStamp": lambda x: utils.FormatAsTimestamp(x["epoch"])
    }

    def _RenderObject(self, obj):
        """Renders a single object - i.e. a table cell."""

        if not hasattr(obj, "iteritems"):
            # Maybe we have to deal with legacy strings, ecnoded as lists with first
            # element being "+" for base64 strings and "*" for unicode strings -
            # check it.
            if isinstance(obj,
                          list) and len(obj) == 2 and obj[0] in ["*", "+"]:
                return utils.SmartStr(obj[1])

            return utils.SmartStr(obj)

        if "string_value" in obj:
            return obj["string_value"]

        if "mro" in obj:
            obj_mro = obj["mro"]
            if isinstance(obj_mro, basestring):
                obj_mro = obj_mro.split(":")

            for mro_type in obj_mro:
                if mro_type in self.OBJECT_RENDERERS:
                    return self.OBJECT_RENDERERS[mro_type](obj)

        return utils.SmartStr(obj)

    def _GenerateOutputClass(self, class_name, tables):
        """Generates output class with a given name for a given set of tables."""

        output_class = type(utils.SmartStr(class_name),
                            (rdf_structs.RDFProtoStruct, ), {})

        if not tables:
            raise RuntimeError(
                "Can't generate output class without Rekall table "
                "definition.")

        field_number = 1
        output_class.AddDescriptor(
            rdf_structs.ProtoEmbedded(name="metadata",
                                      field_number=field_number,
                                      nested=ExportedMetadata))

        field_number += 1
        output_class.AddDescriptor(
            rdf_structs.ProtoString(name="section_name",
                                    field_number=field_number))

        field_number += 1
        output_class.AddDescriptor(
            rdf_structs.ProtoString(name="text", field_number=field_number))

        # All the tables are merged into one. This is done so that if plugin
        # outputs multiple tables, we get all possible columns in the output
        # RDFValue.
        used_names = set()
        for table in tables:
            for column_header in table:
                column_name = None
                try:
                    column_name = column_header["cname"]
                except KeyError:
                    pass

                if not column_name:
                    column_name = column_header["name"]

                if not column_name:
                    raise RuntimeError(
                        "Can't determine column name in table header.")

                if column_name in used_names:
                    continue

                field_number += 1
                used_names.add(column_name)
                output_class.AddDescriptor(
                    rdf_structs.ProtoString(name=column_name,
                                            field_number=field_number))

        return output_class

    def _GetOutputClass(self, plugin_name, tables):
        output_class_name = "RekallExport_" + plugin_name

        try:
            return RekallResponseConverter.OUTPUT_CLASSES[output_class_name]
        except KeyError:
            output_class = self._GenerateOutputClass(output_class_name, tables)
            RekallResponseConverter.OUTPUT_CLASSES[
                output_class_name] = output_class
            return output_class

    def _HandleTableRow(self, metadata, context_dict, message, output_class):
        """Handles a single row in one of the tables in RekallResponse."""
        attrs = {}
        for key, value in message[1].iteritems():
            if hasattr(output_class, key):
                # ProtoString expects a unicode object, so let's convert
                # everything to unicode strings.
                attrs[key] = utils.SmartUnicode(self._RenderObject(value))

        result = output_class(**attrs)
        result.metadata = metadata

        try:
            result.section_name = self._RenderObject(context_dict["s"]["name"])
        except KeyError:
            pass

        return result

    def Convert(self, metadata, rekall_response, token=None):
        """Convert a single RekallResponse."""
        if rekall_response.HasField("json_context_messages"):
            parsed_context_messages = json.loads(
                rekall_response.json_context_messages)
        else:
            parsed_context_messages = []

        context_dict = dict(parsed_context_messages)
        if "t" in context_dict:
            tables = [context_dict["t"]]
        else:
            tables = []

        parsed_messages = json.loads(rekall_response.json_messages)

        # First scan all the messages and find all table definitions there.
        for message in parsed_messages:
            # We do not decode lexicon-based responses. If there's non empty
            # lexicon in the message, we ignore the whole response altogether.
            if message[0] == "l" and message[1]:
                logging.warn("Non-empty lexicon found. Client %s is too old.",
                             rekall_response.client_urn)
                break

            if message[0] == "t":
                tables.append(message[1])

        # Generate output class based on all table definitions.
        output_class = self._GetOutputClass(rekall_response.plugin, tables)

        # Fill generated output class instances with values from every row.
        for message in parsed_messages:
            if message[0] in ["s", "t"]:
                context_dict[message[0]] = message[1]

            if message[0] == "r":
                yield self._HandleTableRow(metadata, context_dict, message,
                                           output_class)

    def BatchConvert(self, metadata_value_pairs, token=None):
        """Convert batch of RekallResponses."""

        for metadata, rekall_response in metadata_value_pairs:
            for result in self.Convert(metadata, rekall_response):
                yield result
Esempio n. 18
0
    def Handle(self, args, token):
        stats_store = aff4.FACTORY.Create(
            stats_store_lib.StatsStore.DATA_STORE_ROOT,
            aff4_type=stats_store_lib.StatsStore,
            mode="rw",
            token=token)

        process_ids = stats_store.ListUsedProcessIds()
        filtered_ids = [
            pid for pid in process_ids
            if pid.startswith(args.component.name.lower())
        ]

        start_time = args.start
        end_time = args.end

        if not end_time:
            end_time = rdfvalue.RDFDatetime.Now()

        if not start_time:
            start_time = end_time - rdfvalue.Duration("1h")

        # Run for a little extra time at the start. This improves the quality of the
        # first data points of counter metrics which don't appear in every interval.
        base_start_time = start_time
        # pylint: disable=g-no-augmented-assignment
        start_time = start_time - rdfvalue.Duration("10m")
        # pylint: enable=g-no-augmented-assignment

        if end_time <= start_time:
            raise ValueError("End time can't be less than start time.")

        result = ApiStatsStoreMetric(start=base_start_time,
                                     end=end_time,
                                     metric_name=args.metric_name)

        data = stats_store.MultiReadStats(process_ids=filtered_ids,
                                          metric_name=utils.SmartStr(
                                              args.metric_name),
                                          timestamp=(start_time, end_time))

        if not data:
            return result

        pid = data.keys()[0]
        metadata = stats_store.ReadMetadata(process_id=pid)
        metric_metadata = metadata.AsDict()[args.metric_name]

        query = stats_store_lib.StatsStoreDataQuery(data)
        query.In(args.component.name.lower() + ".*").In(args.metric_name)
        if metric_metadata.fields_defs:
            query.InAll()

        requested_duration = end_time - start_time
        if requested_duration >= rdfvalue.Duration("1d"):
            sampling_duration = rdfvalue.Duration("5m")
        elif requested_duration >= rdfvalue.Duration("6h"):
            sampling_duration = rdfvalue.Duration("1m")
        else:
            sampling_duration = rdfvalue.Duration("30s")

        if metric_metadata.metric_type == metric_metadata.MetricType.COUNTER:
            query.TakeValue().MakeIncreasing().Normalize(
                sampling_duration,
                start_time,
                end_time,
                mode=timeseries.NORMALIZE_MODE_COUNTER)
        elif metric_metadata.metric_type == metric_metadata.MetricType.EVENT:
            if args.distribution_handling_mode == "DH_SUM":
                query.TakeDistributionSum()
            elif args.distribution_handling_mode == "DH_COUNT":
                query.TakeDistributionCount()
            else:
                raise ValueError(
                    "Unexpected request.distribution_handling_mode "
                    "value: %s." % args.distribution_handling_mode)
            query.MakeIncreasing()
            query.Normalize(sampling_duration,
                            start_time,
                            end_time,
                            mode=timeseries.NORMALIZE_MODE_COUNTER)

        elif metric_metadata.metric_type == metric_metadata.MetricType.GAUGE:
            query.TakeValue().Normalize(sampling_duration, start_time,
                                        end_time)
        else:
            raise RuntimeError("Unsupported metric type.")

        if args.aggregation_mode == "AGG_SUM":
            query.AggregateViaSum()
        elif args.aggregation_mode == "AGG_MEAN":
            query.AggregateViaMean()
        elif args.aggregation_mode == "AGG_NONE":
            pass
        else:
            raise ValueError("Unexpected request.aggregation value: %s." %
                             args.aggregation)

        if (args.rate and metric_metadata.metric_type !=
                metric_metadata.MetricType.GAUGE):
            query.Rate()

        query.InTimeRange(base_start_time, end_time)

        for value, timestamp in query.ts.data:
            if value is not None:
                result.data_points.append(
                    ApiStatsStoreMetricDataPoint(timestamp=timestamp,
                                                 value=value))

        return result
Esempio n. 19
0
 def Render(self, rdf_data):
   if self.template:
     result = self.formatter(self.template, rdf_data)
   else:
     result = utils.SmartStr(rdf_data)
   return result
Esempio n. 20
0
 def __str__(self):
     result = self.plugin_name
     if self.plugin_args:
         result += " <%s>" % utils.SmartStr(self.plugin_args)
     return result
Esempio n. 21
0
 def TryAppend(prefix, keyword):
   if keyword:
     keyword_string = self._NormalizeKeyword(utils.SmartStr(keyword))
     keywords.append(keyword_string)
     if prefix:
       keywords.append(prefix + ":" + keyword_string)
Esempio n. 22
0
 def __str__(self):
     return utils.SmartStr(self._string_urn)
Esempio n. 23
0
 def __str__(self):
     return utils.SmartStr(self._value)
Esempio n. 24
0
def GetRekallObjectSummary(renderer, obj):
    """Returns summary string for a given encoded Rekall object."""
    object_renderer = json_renderer.JsonObjectRenderer.FromEncoded(
        obj, renderer)(renderer)
    return utils.SmartStr(object_renderer.Summary(obj))
Esempio n. 25
0
 def SerializeToString(self):
     return utils.SmartStr(self._value)
Esempio n. 26
0
    def Layout(self, request, response):
        if self.proxy:
            collection = self.proxy
        else:
            try:
                aff4_path = self.state.get("aff4_path") or request.REQ.get(
                    "aff4_path")
                collection = aff4.FACTORY.Open(
                    aff4_path,
                    aff4_type=aff4_rekall.RekallResponseCollection,
                    token=request.token)
            except IOError:
                return

        output_directories = set()
        renderer = GRRRekallViewerRenderer(session.Session())

        for rekall_response in collection:
            for statement in json.loads(rekall_response.json_messages):

                command = statement[0]

                # Metadata about currently running plugin.
                if command == "m":
                    # Flush any old tables.
                    self._flush_table()
                    self._flush_freetext()
                    self.elements.append(PluginHeader(statement[1]))

                # Start new Section.
                elif command == "s":
                    self._flush_table()
                    self._flush_freetext()
                    self.elements.append(SectionHeader(**statement[1]))

                # Free format statement.
                elif command == "f":
                    self._flush_table()
                    format_string = statement[1]
                    try:
                        args = statement[2:]
                    except IndexError:
                        args = []

                    def FormatCallback(match):
                        arg_pos = int(match.group(1))
                        # It's ok to reference args[arg_pos] as FormatCallback is only
                        # used in the next re.sub() call and nowhere else.
                        arg = args[arg_pos]  # pylint: disable=cell-var-from-loop
                        return GetRekallObjectSummary(renderer, arg)

                    rendered_free_text = re.sub(r"\{(\d+)(?:\:.+?\}|\})",
                                                FormatCallback, format_string)
                    self.free_text.append(rendered_free_text)

                # Errors reported from Rekall.
                elif command == "e":
                    self._flush_table()
                    self._flush_freetext()
                    self.elements.append(RekallErrorRenderer(statement[1]))

                # Start Table
                elif command == "t":
                    self._flush_table()
                    self._flush_freetext()

                    # Create a new table.
                    self.current_table = RekallTable(statement[1])

                # Add row to current table.
                elif command == "r":
                    self._flush_freetext()
                    if not self.current_table:
                        logging.warn(
                            "Rekall plugin %s tried to write a "
                            "table row but no table was defined.",
                            rekall_response.plugin)
                        # This is pretty bad but at least we can show the data somehow.
                        self.free_text.append(utils.SmartStr(statement[1]))
                        continue

                    self.current_table.AddRow(statement[1])

                # File that was output by rekall and extracted.
                elif command == "file":
                    # Currently, when we render a client URN the link leads the user to
                    # the directory in the virtual file system, not the particular
                    # file. So we just render one link for each output directory.
                    file_urn = aff4_grr.VFSGRRClient.PathspecToURN(
                        rdf_paths.PathSpec(**statement[1]),
                        rekall_response.client_urn)
                    output_directories.add(rdfvalue.RDFURN(file_urn.Dirname()))

                elif command == "p":
                    # "p" command indicates progress, we don't render it.
                    pass

        self._flush_table()
        self._flush_freetext()
        for directory in output_directories:
            self.elements.append(semantic.RDFURNRenderer(directory))

        return super(RekallResponseCollectionRenderer,
                     self).Layout(request, response)
Esempio n. 27
0
 def __str__(self):
     return utils.SmartStr("aff4:%s" % self._string_urn)
Esempio n. 28
0
 def RawHTML(self, item, **options):
     """Returns escaped object's summary."""
     return django_html.escape(
         utils.SmartStr(
             self._GetDelegateObjectRenderer(item).Summary(item,
                                                           **options)))
Esempio n. 29
0
def GetMemoryModules(client_id, token):
  """Given a host, returns a list of urns to appropriate memory modules.

  Args:
    client_id: The client_id of the host to use.
    token: Token to use for access.

  Returns:
    A list of URNs pointing to GRRSignedDriver objects.

  Raises:
    IOError: on inability to get any driver.

  The driver is retrieved from the AFF4 configuration space according to the
  client's known attributes. The exact layout of the driver's configuration
  space structure is determined by the configuration system.

  The driver we are sending should have a signature associated with it. This
  would get verified by the client (independently of OS driver signing).  Having
  this mechanism will allow for offline signing of drivers to reduce the risk of
  the system being used to deploy evil things.

  Since the client itself will verify the signature of the client, on the server
  we must retrieve the corresponding private keys to the public key that the
  client has. If the keys depend on the client's architecture, and operating
  system, the configuration system will give the client different keys depending
  on its operating system or architecture. In this case we need to match these
  keys, and retrieve the correct keys.

  For example, the configuration file can specify different keys for windows and
  OSX clients:

  Platform:Windows:
    PrivateKeys.driver_signing_private_key: |
      .... Key 1 .... (Private)

    Client.driver_signing_public_key:  |
      .... Key 1 .... (Public)

    Arch:amd64:
      MemoryDriver.aff4_paths:
        - aff4:/config/drivers/windows/pmem_amd64.sys

    Arch:i386:
      MemoryDriver.aff4_paths:
        - aff4:/config/drivers/windows/pmem_x86.sys
  """
  client_context = []
  client = aff4.FACTORY.Open(client_id, token=token)
  system = client.Get(client.Schema.SYSTEM)
  if system:
    client_context.append("Platform:%s" % system)

  release = client.Get(client.Schema.OS_RELEASE)
  if release:
    client_context.append(utils.SmartStr(release))

  arch = utils.SmartStr(client.Get(client.Schema.ARCH)).lower()
  # Support synonyms for i386.
  if arch == "x86":
    arch = "i386"

  if arch:
    client_context.append("Arch:%s" % arch)

  installer_urns = []
  for aff4_path in config_lib.CONFIG.Get("MemoryDriver.aff4_paths",
                                         context=client_context):
    logging.debug("Will fetch driver at %s for client %s",
                  aff4_path, client_id)
    if GetDriverFromURN(aff4_path, token):
      logging.debug("Driver at %s found.", aff4_path)
      installer_urns.append(aff4_path)
    else:
      logging.debug("Unable to load driver at %s.", aff4_path)

  if not installer_urns:
    raise IOError("Unable to find a driver for client.")
  return installer_urns
Esempio n. 30
0
def MakeDestinationKey(directory, filename):
    """Creates a name that identifies a database file."""
    return utils.SmartStr(utils.JoinPath(directory, filename)).lstrip("/")