def ScheduleSystemCronFlows(names=None, token=None): """Schedule all the SystemCronFlows found.""" if (config.CONFIG["Cron.enabled_system_jobs"] and config.CONFIG["Cron.disabled_system_jobs"]): raise RuntimeError("Can't have both Cron.enabled_system_jobs and " "Cron.disabled_system_jobs specified in the config.") # TODO(user): remove references to Cron.enabled_system_jobs by the end # of Q1 2016. for name in config.CONFIG["Cron.enabled_system_jobs"]: try: cls = flow.GRRFlow.classes[name] except KeyError: raise KeyError("No such flow: %s." % name) if not aff4.issubclass(cls, SystemCronFlow): raise ValueError("Enabled system cron job name doesn't correspond to " "a flow inherited from SystemCronFlow: %s" % name) for name in config.CONFIG["Cron.disabled_system_jobs"]: try: cls = flow.GRRFlow.classes[name] except KeyError: raise KeyError("No such flow: %s." % name) if not aff4.issubclass(cls, SystemCronFlow): raise ValueError("Disabled system cron job name doesn't correspond to " "a flow inherited from SystemCronFlow: %s" % name) if names is None: names = flow.GRRFlow.classes.keys() for name in names: cls = flow.GRRFlow.classes[name] if aff4.issubclass(cls, SystemCronFlow): cron_args = CreateCronJobFlowArgs(periodicity=cls.frequency) cron_args.flow_runner_args.flow_name = name cron_args.lifetime = cls.lifetime cron_args.allow_overruns = cls.allow_overruns cron_args.start_time = GetStartTime(cls) if cls.disabled: disabled = True elif config.CONFIG["Cron.enabled_system_jobs"]: disabled = name not in config.CONFIG["Cron.enabled_system_jobs"] else: disabled = name in config.CONFIG["Cron.disabled_system_jobs"] CRON_MANAGER.ScheduleFlow( cron_args=cron_args, job_name=name, token=token, disabled=disabled)
def CallFallback(self, artifact_name, request_data): classes = artifact.ArtifactFallbackCollector.classes.items() for clsname, fallback_class in classes: if not aff4.issubclass(fallback_class, artifact.ArtifactFallbackCollector): continue if artifact_name in fallback_class.artifacts: if artifact_name in self.state.called_fallbacks: self.Log( "Already called fallback class %s for artifact: %s", clsname, artifact_name) else: self.Log("Calling fallback class %s for artifact: %s", clsname, artifact_name) self.CallFlow(clsname, request_data=request_data.ToDict(), artifact_name=artifact_name, next_state="ProcessCollected") # Make sure we only try this once self.state.called_fallbacks.add(artifact_name) return True return False
def GetAllWellKnownFlows(cls, token=None): """Get instances of all well known flows.""" well_known_flows = {} for cls in GRRFlow.classes.values(): if aff4.issubclass(cls, WellKnownFlow) and cls.well_known_session_id: well_known_flow = cls(cls.well_known_session_id, mode="rw", token=token) well_known_flows[cls.well_known_session_id.FlowName()] = well_known_flow return well_known_flows
def ScheduleSystemCronFlows(names=None, token=None): """Schedule all the SystemCronFlows found.""" errors = [] for name in config.CONFIG["Cron.disabled_system_jobs"]: try: cls = flow.GRRFlow.classes[name] except KeyError: errors.append("No such flow: %s." % name) continue if not aff4.issubclass(cls, SystemCronFlow): errors.append( "Disabled system cron job name doesn't correspond to " "a flow inherited from SystemCronFlow: %s" % name) if names is None: names = flow.GRRFlow.classes.keys() for name in names: cls = flow.GRRFlow.classes[name] if aff4.issubclass(cls, SystemCronFlow): cron_args = CreateCronJobFlowArgs(periodicity=cls.frequency) cron_args.flow_runner_args.flow_name = name cron_args.lifetime = cls.lifetime cron_args.allow_overruns = cls.allow_overruns cron_args.start_time = GetStartTime(cls) if cls.disabled: disabled = True else: disabled = name in config.CONFIG["Cron.disabled_system_jobs"] CRON_MANAGER.ScheduleFlow(cron_args=cron_args, job_name=name, token=token, disabled=disabled) if errors: raise ValueError( "Error(s) while parsing Cron.disabled_system_jobs: %s" % errors)
def _AddTest(self, test_name, system, client_version): # We need to exclude classes that aren't in automatedtest, but .classes is # shared between all classes in the inheritance structure by design. cls = base.AutomatedTest.classes[test_name] if aff4.issubclass(cls, base.AutomatedTest): if system not in cls.platforms: return if cls.client_min_version and client_version < cls.client_min_version: return if not cls.__name__.startswith("Abstract"): self.state.test_set.add(test_name)
def GetRendererForValueOrClass(cls, value, limit_lists=-1): """Returns renderer corresponding to a given value and rendering args.""" if inspect.isclass(value): value_cls = value else: value_cls = value.__class__ cache_key = "%s_%d" % (value_cls.__name__, limit_lists) try: renderer_cls = cls._renderers_cache[cache_key] except KeyError: candidates = [] for candidate in ApiValueRenderer.classes.values(): if candidate.value_class: candidate_class = candidate.value_class else: continue if inspect.isclass(value): if aff4.issubclass(value_cls, candidate_class): candidates.append((candidate, candidate_class)) else: if isinstance(value, candidate_class): candidates.append((candidate, candidate_class)) if not candidates: raise RuntimeError("No renderer found for value %s." % value.__class__.__name__) candidates = sorted(candidates, key=lambda candidate: len(candidate[1].mro())) renderer_cls = candidates[-1][0] cls._renderers_cache[cache_key] = renderer_cls return renderer_cls(limit_lists=limit_lists)
def BuildTypeDescriptor(self, value_cls): result = ApiRDFValueDescriptor( name=value_cls.__name__, parents=[klass.__name__ for klass in value_cls.__mro__], doc=value_cls.__doc__ or "", kind="STRUCT") for field_desc in value_cls.type_infos: repeated = isinstance(field_desc, rdf_structs.ProtoList) if hasattr(field_desc, "delegate"): field_desc = field_desc.delegate field = ApiRDFValueFieldDescriptor( name=field_desc.name, index=field_desc.field_number, repeated=repeated, dynamic=isinstance(field_desc, rdf_structs.ProtoDynamicEmbedded)) field_type = field_desc.type if field_type is not None: field.type = field_type.__name__ if field_type.context_help_url: # Class attribute context_help_url masks similarly named protobuf # attribute. Using the Set method to set the right attribute. field.Set("context_help_url", field_type.context_help_url) if field_type == rdf_structs.EnumNamedValue: for enum_label in sorted(field_desc.enum, key=field_desc.enum.get): enum_value = field_desc.enum[enum_label] labels = [ rdf_structs.SemanticDescriptor.Labels.reverse_enum[x] for x in enum_value.labels or [] ] field.allowed_values.append( ApiRDFAllowedEnumValueDescriptor( name=enum_label, value=int(enum_value), labels=labels, doc=enum_value.description)) if (field_desc.default is not None and not aff4.issubclass(field_type, rdf_structs.RDFStruct) and hasattr(field_desc, "GetDefault")): field.default = field.GetDefaultValueClass()( field_desc.GetDefault()) if field_desc.description: field.doc = field_desc.description if field_desc.friendly_name: field.friendly_name = field_desc.friendly_name if field_desc.labels: field.labels = [ rdf_structs.SemanticDescriptor.Labels.reverse_enum[x] for x in field_desc.labels ] result.fields.append(field) for processor in self.descriptor_processors: result.fields = processor(self, result.fields) if getattr(value_cls, "union_field", None): result.union_field_name = value_cls.union_field try: result.default = value_cls() except Exception as e: # pylint: disable=broad-except # TODO(user): Some RDFStruct classes can't be constructed using # default constructor (without arguments). Fix the code so that # we can either construct all the RDFStruct classes with default # constructors or know exactly which classes can't be constructed # with default constructors. logging.debug("Can't create default for struct %s: %s", field_type.__name__, e) return result
def CreateClientObject(self, vfs_fixture): """Make a new client object.""" # First remove the old fixture just in case its still there. aff4.FACTORY.Delete(self.client_id, token=self.token) # Create the fixture at a fixed time. with test_lib.FakeTime(self.age): for path, (aff4_type, attributes) in vfs_fixture: path %= self.args aff4_object = aff4.FACTORY.Create(self.client_id.Add(path), aff4_type, mode="rw", token=self.token) for attribute_name, value in attributes.items(): attribute = aff4.Attribute.PREDICATES[attribute_name] if isinstance(value, (str, unicode)): # Interpolate the value value %= self.args # Is this supposed to be an RDFValue array? if aff4.issubclass(attribute.attribute_type, rdf_protodict.RDFValueArray): rdfvalue_object = attribute() for item in value: new_object = rdfvalue_object.rdf_type.FromTextFormat( utils.SmartStr(item)) rdfvalue_object.Append(new_object) # It is a text serialized protobuf. elif aff4.issubclass(attribute.attribute_type, rdf_structs.RDFProtoStruct): # Use the alternate constructor - we always write protobufs in # textual form: rdfvalue_object = attribute.attribute_type.FromTextFormat( utils.SmartStr(value)) elif aff4.issubclass(attribute.attribute_type, rdfvalue.RDFInteger): rdfvalue_object = attribute(int(value)) else: rdfvalue_object = attribute(value) # If we don't already have a pathspec, try and get one from the stat. if aff4_object.Get(aff4_object.Schema.PATHSPEC) is None: # If the attribute was a stat, it has a pathspec nested in it. # We should add that pathspec as an attribute. if attribute.attribute_type == rdf_client.StatEntry: stat_object = attribute.attribute_type.FromTextFormat( utils.SmartStr(value)) if stat_object.pathspec: pathspec_attribute = aff4.Attribute( "aff4:pathspec", rdf_paths.PathSpec, "The pathspec used to retrieve " "this object from the client.", "pathspec") aff4_object.AddAttribute( pathspec_attribute, stat_object.pathspec) if attribute in ["aff4:content", "aff4:content"]: # For AFF4MemoryStreams we need to call Write() instead of # directly setting the contents.. aff4_object.Write(rdfvalue_object) else: aff4_object.AddAttribute(attribute, rdfvalue_object) # Populate the KB from the client attributes. if aff4_type == aff4_grr.VFSGRRClient: kb = rdf_client.KnowledgeBase() artifact.SetCoreGRRKnowledgeBaseValues(kb, aff4_object) aff4_object.Set(aff4_object.Schema.KNOWLEDGE_BASE, kb) # Make sure we do not actually close the object here - we only want to # sync back its attributes, not run any finalization code. aff4_object.Flush() if aff4_type == aff4_grr.VFSGRRClient: index = client_index.CreateClientIndex(token=self.token) index.AddClient(aff4_object)
def RunEndToEndTests(): runner = unittest.TextTestRunner() # We are running a test so let the config system know that. config.CONFIG.AddContext(contexts.TEST_CONTEXT, "Context applied when we run tests.") server_startup.Init() token = access_control.ACLToken(username="******", reason="Running end to end client tests.") # We need this for the launchbinary test with aff4.FACTORY.Create("aff4:/users/GRREndToEndTest", aff4_users.GRRUser, mode="rw", token=token) as test_user: test_user.AddLabel("admin") client_id_set = base.GetClientTestTargets( client_ids=flags.FLAGS.client_ids, hostnames=flags.FLAGS.hostnames, checkin_duration_threshold="1h", token=token) for cls in base.ClientTestBase.classes.values(): for p in cls.platforms: if p not in set(["Linux", "Darwin", "Windows"]): raise ValueError("Unsupported platform: %s in class %s" % (p, cls.__name__)) if not client_id_set: print( "No clients to test on. Define Test.end_to_end_client* config " "options, or pass them as parameters.") results_by_client = {} for client in aff4.FACTORY.MultiOpen(client_id_set, token=token): client_summary = client.GetSummary() if hasattr(client_summary, "system_info"): sysinfo = client_summary.system_info else: raise RuntimeError( "Unknown system type, likely waiting on interrogate" " to complete.") results = {} results_by_client[client.urn] = results for cls in base.ClientTestBase.classes.values(): if flags.FLAGS.testnames and (cls.__name__ not in flags.FLAGS.testnames): continue if not aff4.issubclass(cls, base.ClientTestBase): continue if cls.__name__.startswith("Abstract"): continue if sysinfo.system in cls.platforms: print "Running %s on %s (%s: %s, %s, %s)" % ( cls.__name__, client_summary.client_id, sysinfo.fqdn, sysinfo.system, sysinfo.version, sysinfo.machine) try: # Mixin the unittest framework so we can use the test runner to run # the test and get nice output. We don't want to depend on unitttest # code in the tests themselves. testcase = cls(client_id=client_summary.client_id, platform=sysinfo.system, token=token, local_client=flags.FLAGS.local_client, local_worker=flags.FLAGS.local_worker) results[cls.__name__] = runner.run(testcase) except Exception: # pylint: disable=broad-except logging.exception("Failed to run test %s", cls) # Print a little summary. for client, results in results_by_client.iteritems(): print "Results for %s:" % client for testcase, result in sorted(results.items()): res = "[ OK ]" if result.errors or result.failures: res = "[ FAIL ]" print "%45s: %s" % (testcase, res)