示例#1
0
    def RenderObject(self, aff4_object, args):
        """Renders given object as plain JSON-friendly data structure."""
        object_attributes = aff4_object.synced_attributes.copy()
        for key, value in aff4_object.new_attributes.items():
            object_attributes[key] = value

        attributes = {}
        for attribute, values in object_attributes.items():
            attributes[attribute.predicate] = []
            for value in values:
                # This value is really a LazyDecoder() instance. We need to get at the
                # real data here.
                if hasattr(value, "ToRDFValue"):
                    value = value.ToRDFValue()

                if aff4_object.age_policy != aff4.NEWEST_TIME:
                    attributes[attribute.predicate].append(
                        api_value_renderers.RenderValue(
                            value, limit_lists=args.limit_lists))
                else:
                    attributes[
                        attribute.predicate] = api_value_renderers.RenderValue(
                            value, limit_lists=args.limit_lists)

        return dict(aff4_class=aff4_object.__class__.__name__,
                    urn=utils.SmartUnicode(aff4_object.urn),
                    attributes=attributes,
                    age_policy=aff4_object.age_policy)
示例#2
0
    def _FormatResultAsJson(self, result, format_mode=None):
        if result is None:
            return dict(status="OK")

        if format_mode == JsonMode.PROTO3_JSON_MODE:
            return json.loads(
                json_format.MessageToJson(result.AsPrimitiveProto()))
        elif format_mode == JsonMode.GRR_ROOT_TYPES_STRIPPED_JSON_MODE:
            result_dict = {}
            for field, value in result.ListSetFields():
                if isinstance(
                        field,
                    (rdf_structs.ProtoDynamicEmbedded,
                     rdf_structs.ProtoEmbedded, rdf_structs.ProtoList)):
                    result_dict[field.name] = api_value_renderers.RenderValue(
                        value)
                else:
                    result_dict[field.name] = api_value_renderers.RenderValue(
                        value)["value"]
            return result_dict
        elif format_mode == JsonMode.GRR_TYPE_STRIPPED_JSON_MODE:
            rendered_data = api_value_renderers.RenderValue(result)
            return api_value_renderers.StripTypeInfo(rendered_data)
        elif format_mode == JsonMode.GRR_JSON_MODE:
            return api_value_renderers.RenderValue(result)
        else:
            raise ValueError("Invalid format_mode: %s", format_mode)
示例#3
0
  def CallApiHandler(handler, args, token=None):
    """Handles API call to a given handler with given args and token."""

    try:
      result = handler.Handle(args, token=token)
    except NotImplementedError:
      # Fall back to legacy Render() method if Handle() is not implemented.
      return handler.Render(args, token=token)

    expected_type = handler.result_type
    if expected_type is None:
      expected_type = None.__class__

    if result.__class__ != expected_type:
      raise UnexpectedResultTypeError("Expected %s, but got %s." % (
          expected_type.__name__, result.__class__.__name__))

    if result is None:
      return dict(status="OK")
    else:
      if handler.strip_json_root_fields_types:
        result_dict = {}
        for field, value in result.ListSetFields():
          if isinstance(field, (rdf_structs.ProtoDynamicEmbedded,
                                rdf_structs.ProtoEmbedded,
                                rdf_structs.ProtoList)):
            result_dict[field.name] = api_value_renderers.RenderValue(value)
          else:
            result_dict[field.name] = api_value_renderers.RenderValue(
                value)["value"]
      else:
        result_dict = api_value_renderers.RenderValue(result)

      return result_dict
示例#4
0
    def Render(self, args, token=None):
        flow_id = flow.GRRFlow.StartFlow(client_id=args.client_id,
                                         flow_name=args.runner_args.flow_name,
                                         token=token,
                                         args=args.flow_args,
                                         runner_args=args.runner_args)

        return dict(flow_id=api_value_renderers.RenderValue(flow_id),
                    flow_args=api_value_renderers.RenderValue(args.flow_args),
                    runner_args=api_value_renderers.RenderValue(
                        args.runner_args))
示例#5
0
文件: flow.py 项目: lismore/grr
  def Render(self, args, token=None):
    flow_urn = args.client_id.Add("flows").Add(args.flow_id.Basename())
    flow_obj = aff4.FACTORY.Open(flow_urn, aff4_type="GRRFlow", mode="r",
                                 token=token)

    output_plugins_states = flow_obj.GetRunner().context.output_plugins_states

    result = {}
    for plugin_descriptor, plugin_state in output_plugins_states:
      result[plugin_descriptor.plugin_name] = (
          api_value_renderers.RenderValue(plugin_descriptor),
          api_value_renderers.RenderValue(plugin_state))

    return result
示例#6
0
def HandleApiCall(handler, args, token=None):
    """Handles API call to a given handlers with given args and token."""

    if not hasattr(handler, "Render"):
        handler = api_call_handler_base.ApiCallHandler.classes[handler]()

    # Privileged handlers bypass the approvals model to do things like check flow
    # status across multiple clients or add labels to clients. They provide
    # limited functionality and are responsible for their own checking.
    if handler.privileged:
        token = token.SetUID()

    # Raises on access denied
    API_AUTH_MGR.CheckAccess(handler, token.username)

    try:
        result = handler.Handle(args, token=token)
    except NotImplementedError:
        # Fall back to legacy Render() method if Handle() is not implemented.
        return handler.Render(args, token=token)

    expected_type = handler.result_type
    if expected_type is None:
        expected_type = None.__class__

    if result.__class__.__name__ != expected_type.__name__:
        raise UnexpectedResultTypeError(
            "Expected %s, but got %s." %
            (expected_type.__name__, result.__class__.__name__))

    if result is None:
        return dict(status="OK")
    else:
        if handler.strip_json_root_fields_types:
            result_dict = {}
            for field, value in result.ListSetFields():
                if isinstance(field,
                              (structs.ProtoDynamicEmbedded,
                               structs.ProtoEmbedded, structs.ProtoList)):
                    result_dict[field.name] = api_value_renderers.RenderValue(
                        value)
                else:
                    result_dict[field.name] = api_value_renderers.RenderValue(
                        value)["value"]
        else:
            result_dict = api_value_renderers.RenderValue(result)

        return result_dict
示例#7
0
    def RenderObject(self, hunt, args):
        runner = hunt.GetRunner()
        context = runner.context

        untyped_summary_part = dict(state=hunt.Get(hunt.Schema.STATE),
                                    hunt_name=context.args.hunt_name,
                                    create_time=context.create_time,
                                    expires=context.expires,
                                    client_limit=context.args.client_limit,
                                    client_rate=context.args.client_rate,
                                    creator=context.creator,
                                    description=context.args.description)
        typed_summary_part = {}

        if args.with_full_summary:
            all_clients_count, completed_clients_count, _ = hunt.GetClientsCounts(
            )

            untyped_summary_part.update(
                dict(stats=context.usage_stats,
                     all_clients_count=all_clients_count,
                     completed_clients_count=completed_clients_count,
                     outstanding_clients_count=(all_clients_count -
                                                completed_clients_count)))

            typed_summary_part = dict(regex_rules=runner.args.regex_rules
                                      or [],
                                      integer_rules=runner.args.integer_rules
                                      or [],
                                      args=hunt.state.args)

            try:
                typed_summary_part[
                    "client_rule_set"] = runner.args.client_rule_set
            except AttributeError:
                typed_summary_part["client_rule_set"] = []

        for k, v in untyped_summary_part.items():
            untyped_summary_part[k] = api_value_renderers.RenderValue(v)

        for k, v in typed_summary_part.items():
            typed_summary_part[k] = api_value_renderers.RenderValue(v)

        rendered_object = {
            "summary":
            dict(untyped_summary_part.items() + typed_summary_part.items())
        }
        return rendered_object
示例#8
0
    def RenderObject(self, aff4_object, args):
        """Renders RDFValueCollection as plain JSON-friendly data structure."""
        if args.filter:
            index = 0
            items = []
            for item in aff4_object.GenerateItems():
                serialized_item = item.SerializeToString()
                if re.search(re.escape(args.filter), serialized_item, re.I):
                    if index >= args.offset:
                        items.append(item)
                    index += 1

                    if args.count and len(items) >= args.count:
                        break
        else:
            items = list(
                itertools.islice(
                    aff4_object.GenerateItems(), args.offset,
                    args.count and (args.offset + args.count) or sys.maxint))

        result = {}
        result["offset"] = args.offset
        result["count"] = len(items)
        result["items"] = api_value_renderers.RenderValue(
            items, limit_lists=args.items_limit_lists)

        if args.with_total_count:
            if hasattr(aff4_object, "CalculateLength"):
                total_count = aff4_object.CalculateLength()
            else:
                total_count = len(aff4_object)
            result["total_count"] = total_count

        return result
示例#9
0
 def RenderObject(self, client, unused_args):
     """Renders VFSGRRClient as plain JSON-friendly data structure."""
     return dict(disk_warnings=self._GetDiskWarnings(client),
                 summary=api_value_renderers.RenderValue(
                     client.GetSummary(),
                     with_types=True,
                     with_metadata=True))
示例#10
0
    def Render(self, args, token=None):
        args.flow_args.hunt_runner_args.hunt_name = "GenericHunt"

        # TODO(user): The following should be asserted in a more elegant way.
        # Also, it's not clear whether cron job scheduling UI is used often enough
        # to justify its existence. We should check with opensource users whether
        # they find this feature useful and if not, deprecate it altogether.
        if args.flow_name != "CreateAndRunGenericHuntFlow":
            raise ValueError(
                "Only CreateAndRunGenericHuntFlow flows are supported "
                "here (got: %s)." % args.flow_name)

        if not args.flow_runner_args.flow_name:
            args.flow_runner_args.flow_name = args.flow_name

        cron_args = aff4_cronjobs.CreateCronJobFlowArgs(
            description=args.description,
            periodicity=args.periodicity,
            flow_runner_args=args.flow_runner_args,
            flow_args=args.flow_args,
            allow_overruns=args.allow_overruns,
            lifetime=args.lifetime)
        urn = aff4_cronjobs.CRON_MANAGER.ScheduleFlow(cron_args=cron_args,
                                                      disabled=True,
                                                      token=token)

        fd = aff4.FACTORY.Open(urn,
                               aff4_type="CronJob",
                               token=token,
                               age=aff4.ALL_TIMES)
        api_cron_job = CronJobToApiCronJob(fd)

        return api_value_renderers.RenderValue(api_cron_job)
示例#11
0
文件: artifact.py 项目: lismore/grr
    def RenderArtifacts(self, artifacts, custom_artifacts=None):
        if custom_artifacts is None:
            custom_artifacts = set()

        result = []
        for artifact_val in artifacts:
            descriptor = artifact_registry.ArtifactDescriptor(
                artifact=artifact_val,
                artifact_source=artifact_val.ToPrettyJson(extended=True),
                dependencies=sorted(artifact_val.GetArtifactDependencies()),
                path_dependencies=sorted(
                    artifact_val.GetArtifactPathDependencies()),
                is_custom=artifact_val.name in custom_artifacts)

            for processor in parsers.Parser.GetClassesByArtifact(
                    artifact_val.name):
                descriptor.processors.append(
                    artifact_registry.ArtifactProcessorDescriptor(
                        name=processor.__name__,
                        output_types=processor.output_types,
                        description=processor.GetDescription()))

            result.append(api_value_renderers.RenderValue(descriptor))

        return result
示例#12
0
    def Render(self, unused_args, token=None):
        """Fetches and renders current user's settings."""

        user_settings = self.GetUserSettings(token)
        return api_value_renderers.RenderValue(user_settings,
                                               with_types=True,
                                               with_metadata=True)
示例#13
0
    def Render(self, args, token=None):
        flow_urn = args.client_id.Add("flows").Add(args.flow_id.Basename())
        flow_obj = aff4.FACTORY.Open(flow_urn,
                                     aff4_type="GRRFlow",
                                     mode="r",
                                     token=token)

        output_plugins_states = flow_obj.GetRunner(
        ).context.output_plugins_states

        type_indices = {}
        result = []
        for plugin_descriptor, plugin_state in output_plugins_states:
            type_index = type_indices.setdefault(plugin_descriptor.plugin_name,
                                                 0)
            type_indices[plugin_descriptor.plugin_name] += 1

            # Output plugins states are stored differently for hunts and for flows:
            # as a dictionary for hunts and as a simple list for flows.
            #
            # TODO(user): store output plugins states in the same way for flows
            # and hunts. Until this is done, we can emulate the same interface in
            # the HTTP API.
            api_plugin = api_output_plugin.ApiOutputPlugin(
                id=plugin_descriptor.plugin_name + "_%d" % type_index,
                plugin_descriptor=plugin_descriptor,
                state=plugin_state)
            result.append(api_plugin)

        return dict(offset=0,
                    count=len(result),
                    total_count=len(result),
                    items=api_value_renderers.RenderValue(result))
示例#14
0
    def RenderObject(self, aff4_object, args):
        """Renders given object as plain JSON-friendly data structure."""
        render_value_args = dict(limit_lists=args.limit_lists)
        if args.type_info == args.TypeInformation.WITH_TYPES:
            render_value_args["with_types"] = True
        elif args.type_info == args.TypeInformation.WITH_TYPES_AND_METADATA:
            render_value_args["with_types"] = True
            render_value_args["with_metadata"] = True

        object_attributes = aff4_object.synced_attributes.copy()
        for key, value in aff4_object.new_attributes.items():
            object_attributes[key] = value

        attributes = {}
        for attribute, values in object_attributes.items():
            attributes[attribute.predicate] = []
            for value in values:
                # This value is really a LazyDecoder() instance. We need to get at the
                # real data here.
                if hasattr(value, "ToRDFValue"):
                    value = value.ToRDFValue()

                if aff4_object.age_policy != aff4.NEWEST_TIME:
                    attributes[attribute.predicate].append(
                        api_value_renderers.RenderValue(
                            value, **render_value_args))
                else:
                    attributes[
                        attribute.predicate] = api_value_renderers.RenderValue(
                            value, **render_value_args)

        result = dict(aff4_class=aff4_object.__class__.__name__,
                      urn=utils.SmartUnicode(aff4_object.urn),
                      attributes=attributes,
                      age_policy=aff4_object.age_policy)

        if args.type_info == args.TypeInformation.WITH_TYPES_AND_METADATA:
            descriptors = {}
            for attribute, _ in aff4_object.synced_attributes.items():
                descriptors[attribute.predicate] = {
                    "description": attribute.description
                }

            result["metadata"] = descriptors

        return result
示例#15
0
  def Render(self, args, token=None):
    metadata = aff4.FACTORY.Create(
        HUNTS_ROOT_PATH.Add(args.hunt_id).Add("ResultsMetadata"), mode="r",
        aff4_type="HuntResultsMetadata", token=token)

    # We don't need rendered type information, so we return just the "value"
    # part of the result.
    return api_value_renderers.RenderValue(
        metadata.Get(metadata.Schema.OUTPUT_PLUGINS, {}))["value"]
示例#16
0
    def RenderRDFStruct(self, cls):
        fields = []
        for field_desc in cls.type_infos:
            repeated = isinstance(field_desc, type_info.ProtoList)
            if hasattr(field_desc, "delegate"):
                field_desc = field_desc.delegate

            field = {
                "name": field_desc.name,
                "index": field_desc.field_number,
                "repeated": repeated,
                "dynamic": isinstance(field_desc,
                                      type_info.ProtoDynamicEmbedded)
            }

            field_type = field_desc.type
            if field_type is not None:
                field["type"] = field_type.__name__

            if field_type == rdfvalue.EnumNamedValue:
                allowed_values = []
                for enum_label in sorted(field_desc.enum):
                    enum_value = field_desc.enum[enum_label]
                    allowed_values.append(
                        dict(name=enum_label,
                             value=int(enum_value),
                             doc=enum_value.description))
                field["allowed_values"] = allowed_values

            if field_desc.default is not None:
                if field_type:
                    field_default = field_type(field_desc.default)
                else:
                    field_default = field_desc.default

                field["default"] = api_value_renderers.RenderValue(
                    field_default, with_types=True)

            if field_desc.description:
                field["doc"] = field_desc.description

            if field_desc.friendly_name:
                field["friendly_name"] = field_desc.friendly_name

            if field_desc.labels:
                field["labels"] = [
                    rdfvalue.SemanticDescriptor.Labels.reverse_enum[x]
                    for x in field_desc.labels
                ]

            fields.append(field)

        return dict(name=cls.__name__,
                    doc=cls.__doc__ or "",
                    fields=fields,
                    kind="struct")
示例#17
0
  def Render(self, args, token=None):
    labels_index = aff4.FACTORY.Create(
        aff4.VFSGRRClient.labels_index_urn, "AFF4LabelsIndex",
        mode="rw", token=token)

    rendered_labels = []
    for label in labels_index.ListUsedLabels():
      rendered_labels.append(api_value_renderers.RenderValue(label))

    return dict(labels=sorted(rendered_labels))
示例#18
0
  def Render(self, args, token=None):
    stats_store = aff4.FACTORY.Create(
        None, aff4_type=stats_store_lib.StatsStore, mode="w", token=token)

    process_ids = [pid for pid in stats_store.ListUsedProcessIds()
                   if pid.startswith(args.component.name.lower())]
    if not process_ids:
      return {}
    else:
      metadata = stats_store.ReadMetadata(process_id=process_ids[0])
      return api_value_renderers.RenderValue(metadata)
示例#19
0
 def Render(self, args, token=None):
     labels_index = aff4.FACTORY.Create(standard.LabelSet.CLIENT_LABELS_URN,
                                        "LabelSet",
                                        mode="r",
                                        token=token)
     rendered_labels = []
     for label in labels_index.ListLabels():
         label_object = aff4_rdfvalues.AFF4ObjectLabel(name=label)
         rendered_labels.append(
             api_value_renderers.RenderValue(label_object))
     return dict(labels=rendered_labels)
示例#20
0
    def Render(self, args, token=None):
        client_urn = self.GetClientTarget(args, token=token)

        size_condition = file_finder.FileFinderCondition(
            condition_type=file_finder.FileFinderCondition.Type.SIZE,
            size=file_finder.FileFinderSizeCondition(
                max_file_size=args.max_file_size))

        file_finder_args = file_finder.FileFinderArgs(
            paths=args.paths,
            action=file_finder.FileFinderAction(action_type=args.action),
            conditions=[size_condition])

        # Check our flow throttling limits, will raise if there are problems.
        throttler = throttle.FlowThrottler()
        throttler.EnforceLimits(client_urn,
                                token.username,
                                "FileFinder",
                                file_finder_args,
                                token=token)

        # Limit the whole flow to 200MB so if a glob matches lots of small files we
        # still don't have too much impact.
        runner_args = flow_runner.FlowRunnerArgs(client_id=client_urn,
                                                 flow_name="FileFinder",
                                                 network_bytes_limit=200 *
                                                 1000 * 1000)

        flow_id = flow.GRRFlow.StartFlow(runner_args=runner_args,
                                         token=token,
                                         args=file_finder_args)

        # Provide a url where the caller can check on the flow status.
        status_url = urlparse.urljoin(
            config_lib.CONFIG["AdminUI.url"], "/api/flows/%s/%s/status" %
            (client_urn.Basename(), flow_id.Basename()))
        return dict(
            flow_id=api_value_renderers.RenderValue(flow_id),
            flow_args=api_value_renderers.RenderValue(file_finder_args),
            runner_args=api_value_renderers.RenderValue(runner_args),
            status_url=status_url)
示例#21
0
    def RenderPrimitiveRDFValue(self, cls):
        result = dict(name=cls.__name__,
                      doc=cls.__doc__ or "",
                      kind="primitive")
        try:
            default_value = api_value_renderers.RenderValue(cls(),
                                                            with_types=True)
            result["default"] = default_value
        except Exception as e:  # pylint: disable=broad-except
            logging.exception("Can't create default for primitive %s: %s",
                              cls.__name__, e)

        return result
示例#22
0
  def Render(self, args, token=None):
    encoded_urns = []

    aff4_path = rdfvalue.RDFURN(args.aff4_path)
    index_prefix = "index:dir/"
    for predicate, _, timestamp in data_store.DB.ResolveRegex(
        aff4_path, index_prefix + ".+", token=token,
        timestamp=data_store.DB.NEWEST_TIMESTAMP, limit=1000000):

      urn = aff4_path.Add(predicate[len(index_prefix):])
      encoded_urns.append([api_value_renderers.RenderValue(urn),
                           timestamp])

    return encoded_urns
示例#23
0
  def Render(self, args, token=None):
    """Creates a new hunt."""

    # We only create generic hunts with /hunts/create requests.
    args.hunt_runner_args.hunt_name = "GenericHunt"

    # Anyone can create the hunt but it will be created in the paused
    # state. Permissions are required to actually start it.
    with implementation.GRRHunt.StartHunt(
        runner_args=args.hunt_runner_args,
        args=args.hunt_args,
        token=token) as hunt:

      # Nothing really to do here - hunts are always created in the paused
      # state.
      logging.info("User %s created a new %s hunt (%s)",
                   token.username, hunt.state.args.flow_runner_args.flow_name,
                   hunt.urn)

      return dict(
          status="OK",
          hunt_id=api_value_renderers.RenderValue(hunt.urn),
          hunt_args=api_value_renderers.RenderValue(hunt.state.args),
          hunt_runner_args=api_value_renderers.RenderValue(hunt.runner.args))
示例#24
0
  def Render(self, args, token=None):
    metadata = aff4.FACTORY.Create(
        HUNTS_ROOT_PATH.Add(args.hunt_id).Add("ResultsMetadata"), mode="r",
        aff4_type="HuntResultsMetadata", token=token)

    plugins = metadata.Get(metadata.Schema.OUTPUT_PLUGINS, {})

    result = []
    for plugin_name, (plugin_descriptor, plugin_state) in plugins.items():
      api_plugin = api_output_plugin.ApiOutputPlugin(
          id=plugin_name, plugin_descriptor=plugin_descriptor,
          state=plugin_state)
      result.append(api_plugin)

    return dict(offset=0, count=len(result), total_count=len(result),
                items=api_value_renderers.RenderValue(result))
示例#25
0
  def Render(self, args, token=None):
    if not args.count:
      stop = None
    else:
      stop = args.offset + args.count

    cron_jobs_urns = list(itertools.islice(
        aff4_cronjobs.CRON_MANAGER.ListJobs(token=token), args.offset, stop))
    cron_jobs = aff4.FACTORY.MultiOpen(
        cron_jobs_urns, aff4_type="CronJob", token=token, age=aff4.ALL_TIMES)

    items = [CronJobToApiCronJob(cron_job) for cron_job in cron_jobs]
    items.sort(key=lambda item: item.urn)
    result = dict(offset=args.offset,
                  count=len(items),
                  items=api_value_renderers.RenderValue(items))
    return result
示例#26
0
    def Render(self, args, token=None):
        if not self.collection_name:
            raise ValueError("collection_name can't be None")

        metadata = aff4.FACTORY.Create(HUNTS_ROOT_PATH.Add(
            args.hunt_id).Add("ResultsMetadata"),
                                       mode="r",
                                       aff4_type="HuntResultsMetadata",
                                       token=token)
        plugins = metadata.Get(metadata.Schema.OUTPUT_PLUGINS, {})
        plugin_descriptor = plugins.get(args.plugin_id)[0]

        # Currently all logs and errors are written not in per-plugin
        # collections, but in per-hunt collections. This doesn't make
        # much sense, because to show errors of particular plugin, we have
        # to filter the collection. Still, things are not too bad, because
        # typically only one output plugin is used.
        #
        # TODO(user): Write errors/logs per-plugin, so that we don't
        # have to do the filtering.

        logs_collection = aff4.FACTORY.Open(HUNTS_ROOT_PATH.Add(
            args.hunt_id).Add(self.collection_name),
                                            mode="r",
                                            token=token)

        if len(plugins) == 1:
            total_count = len(logs_collection)
            logs = list(
                itertools.islice(
                    logs_collection.GenerateItems(offset=args.offset),
                    args.count or None))
        else:
            all_logs_for_plugin = [
                x for x in logs_collection
                if x.plugin_descriptor == plugin_descriptor
            ]
            total_count = len(all_logs_for_plugin)
            logs = all_logs_for_plugin[args.offset:]
            if args.count:
                logs = logs[:args.count]

        return dict(offset=args.offset,
                    count=len(logs),
                    total_count=total_count,
                    items=api_value_renderers.RenderValue(logs))
示例#27
0
    def Render(self, args, token=None):
        if not self.attribute_name:
            raise ValueError("attribute_name can't be None")

        flow_urn = args.client_id.Add("flows").Add(args.flow_id.Basename())
        flow_obj = aff4.FACTORY.Open(flow_urn,
                                     aff4_type="GRRFlow",
                                     mode="r",
                                     token=token)

        output_plugins_states = flow_obj.GetRunner(
        ).context.output_plugins_states

        # Flow output plugins don't use collections to store status/error
        # information. Instead, it's stored in plugin's state. Nevertheless,
        # we emulate collections API here. Having similar API interface allows
        # one to reuse the code when handling hunts and flows output plugins.
        # Good example is the UI code.
        type_indices = {}
        found_state = None
        for plugin_descriptor, plugin_state in output_plugins_states:
            type_index = type_indices.setdefault(plugin_descriptor.plugin_name,
                                                 0)
            type_indices[plugin_descriptor.plugin_name] += 1

            if args.plugin_id == plugin_descriptor.plugin_name + "_%d" % type_index:
                found_state = plugin_state
                break

        if not found_state:
            raise RuntimeError("Flow %s doesn't have output plugin %s" %
                               (flow_urn, args.plugin_id))

        stop = None
        if args.count:
            stop = args.offset + args.count

        logs_collection = found_state.get(self.attribute_name, [])
        sliced_collection = logs_collection[args.offset:stop]

        return dict(offset=args.offset,
                    count=len(sliced_collection),
                    total_count=len(logs_collection),
                    items=api_value_renderers.RenderValue(sliced_collection))
示例#28
0
  def Render(self, args, token=None):
    if args.flow_name and not args.flow_runner_args.flow_name:
      args.flow_runner_args.flow_name = args.flow_name

    cron_args = aff4_cronjobs.CreateCronJobFlowArgs(
        description=args.description,
        periodicity=args.periodicity,
        flow_runner_args=args.flow_runner_args,
        flow_args=args.flow_args,
        allow_overruns=args.allow_overruns,
        lifetime=args.lifetime)
    urn = aff4_cronjobs.CRON_MANAGER.ScheduleFlow(
        cron_args=cron_args, disabled=True, token=token)

    fd = aff4.FACTORY.Open(urn, aff4_type="CronJob", token=token,
                           age=aff4.ALL_TIMES)
    api_cron_job = CronJobToApiCronJob(fd)

    return api_value_renderers.RenderValue(api_cron_job)
示例#29
0
  def RenderArtifacts(self, artifacts):
    result = []
    for artifact_val in artifacts:
      descriptor = artifact_registry.ArtifactDescriptor(
          artifact=artifact_val,
          artifact_source=artifact_val.ToPrettyJson(extended=True),
          dependencies=sorted(artifact_val.GetArtifactDependencies()),
          path_dependencies=sorted(artifact_val.GetArtifactPathDependencies()),
          error_message=artifact_val.error_message,
          is_custom=artifact_val.loaded_from.startswith("datastore:"))

      for processor in parsers.Parser.GetClassesByArtifact(artifact_val.name):
        descriptor.processors.append(
            artifact_registry.ArtifactProcessorDescriptor(
                name=processor.__name__,
                output_types=processor.output_types,
                description=processor.GetDescription()))

      result.append(api_value_renderers.RenderValue(descriptor))

    return result
示例#30
0
  def _PrepareV1Request(self, method, args=None):
    """Prepares API v1 request for a given method and args."""

    args_proto = None
    if args:
      args_proto = args.AsPrimitiveProto()
    request = self.connector.BuildRequest(method, args_proto)
    request.url = request.url.replace("/api/v2/", "/api/")
    if args and request.data:
      body_proto = args.__class__().AsPrimitiveProto()
      json_format.Parse(request.data, body_proto)
      body_args = args.__class__()
      body_args.ParseFromString(body_proto.SerializeToString())
      request.data = json.dumps(
          api_value_renderers.StripTypeInfo(
              api_value_renderers.RenderValue(body_args)),
          cls=http_api.JSONEncoderWithRDFPrimitivesSupport)

    prepped_request = request.prepare()

    return request, prepped_request