config_lib.DEFINE_integer( "Worker.task_limit", 2000, "Limits the number of tasks a worker retrieves " "every poll") config_lib.DEFINE_integer("Worker.flow_lease_time", 600, "Duration of flow lease time in seconds.") config_lib.DEFINE_integer( "Frontend.throttle_average_interval", 60, "Time interval over which average request rate is " "calculated when throttling is enabled.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["aff4:/flows/W:TransferStore", "aff4:/flows/W:Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smpt server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") # Server Cryptographic settings. config_lib.DEFINE_semantic( rdfvalue.PEMPrivateKey, "PrivateKeys.ca_key", description="CA private key. Used to sign for client enrollment.", )
"The main path to the locally cached documentation.") config_lib.DEFINE_string( "AdminUI.webauth_manager", "NullWebAuthManager", "The web auth manager for controlling access to the UI.") config_lib.DEFINE_bool("AdminUI.django_debug", True, "Turn on to add django debugging") config_lib.DEFINE_string( "AdminUI.django_secret_key", "CHANGE_ME", "This is a secret key that should be set in the server " "config. It is used in XSRF and session protection.") config_lib.DEFINE_list( "AdminUI.django_allowed_hosts", ["*"], "Set the django ALLOWED_HOSTS parameter. " "See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts") config_lib.DEFINE_bool("AdminUI.enable_ssl", False, "Turn on SSL. This needs AdminUI.ssl_cert to be set.") config_lib.DEFINE_string("AdminUI.ssl_cert_file", "", "The SSL certificate to use.") config_lib.DEFINE_string( "AdminUI.ssl_key_file", None, "The SSL key to use. The key may also be part of the cert file, in which " "case this can be omitted.") config_lib.DEFINE_string("AdminUI.url", "http://localhost:8000/", "The direct external URL for the user interface.")
#!/usr/bin/env python """Configuration parameters for the check subsystem.""" from grr.lib import config_lib config_lib.DEFINE_list("Checks.config_dir", ["%(grr/checks|resource)", "%(grr/checks/local|resource)"], "A list of directories to load checks from.") config_lib.DEFINE_list("Checks.config_files", [], "Paths of check configurations to load at start up.") config_lib.DEFINE_integer("Checks.max_results", 50, "Maximum items to include as check results.")
from grr.lib import config_lib from grr.lib import rdfvalue from grr.lib import type_info # Windows Memory driver information. config_lib.DEFINE_string("MemoryDriver.driver_service_name", "Pmem", "The SCCM service name for the driver.") config_lib.DEFINE_string("MemoryDriver.driver_display_name", "%(Client.name) Pmem", "The SCCM display name for the driver.") config_lib.DEFINE_list("MemoryDriver.driver_files", [], "The default drivers to use.") config_lib.DEFINE_list("MemoryDriver.aff4_paths", [], "The AFF4 paths to the driver objects.") config_lib.DEFINE_string("MemoryDriver.device_path", r"\\\\.\\pmem", "The device path which the client will open after " "installing this driver.") config_lib.DEFINE_string("MemoryDriver.service_name", "pmem", "The name of the service created for " "the driver (Windows).") config_lib.DEFINE_string("MemoryDriver.display_name", "%(service_name)", "The display name of the service created for " "the driver (Windows).")
help="How long do we wait for a transaction lock.") DATASTORE_PATHING = [ r"%{(?P<path>files/hash/generic/sha256/...).*}", r"%{(?P<path>files/hash/generic/sha1/...).*}", r"%{(?P<path>files/hash/generic/md5/...).*}", r"%{(?P<path>files/hash/pecoff/md5/...).*}", r"%{(?P<path>files/hash/pecoff/sha1/...).*}", r"%{(?P<path>files/nsrl/...).*}", r"%{(?P<path>W/[^/]+).*}", r"%{(?P<path>CA/[^/]+).*}", r"%{(?P<path>C\..\{1,16\}?)($|/.*)}", r"%{(?P<path>hunts/[^/]+).*}", r"%{(?P<path>blobs/[^/]+).*}", r"%{(?P<path>[^/]+).*}" ] config_lib.DEFINE_list("Datastore.pathing", DATASTORE_PATHING, ("Path selection for subjects in the file-based data " "stores (by priority).")) config_lib.DEFINE_string("Datastore.location", default="/var/grr-datastore", help=("Location of the data store (usually a " "filesystem directory)")) # SQLite data store. config_lib.DEFINE_integer( "SqliteDatastore.vacuum_check", default=10, help=("Number of rows that need to be deleted before " "checking if the sqlite file may need to be " "vacuumed."))
from grr.lib import rdfvalue from grr.lib.rdfvalues import crypto # General Client options. config_lib.DEFINE_string( "Client.name", "GRR", "The name of the client. This will be used as a base " "name to generate many other default parameters such " "as binary names and service names. Note that on " "Linux we lowercase the name to confirm with most " "linux naming conventions.") config_lib.DEFINE_string("Client.binary_name", "%(Client.name)", "The name of the client binary.") config_lib.DEFINE_list("Client.labels", [], "Labels for this client.") config_lib.DEFINE_string("Client.company_name", "GRR Project", "The name of the company which made the client.") config_lib.DEFINE_string("Client.description", "%(name) %(platform) %(arch)", "A description of this specific client build.") config_lib.DEFINE_string("Client.platform", "windows", "The platform we are running on.") config_lib.DEFINE_string("Client.arch", "amd64", "The architecture we are running on.") config_lib.DEFINE_string("Client.build_time", "Unknown", "The time the client was built.")
"could not complete after this many seconds.") # We write a journal entry for the flow when it's about to be processed. # If the journal entry is there after this time, the flow will get terminated. config_lib.DEFINE_integer( "Worker.stuck_flows_timeout", 60 * 60 * 6, "Flows who got stuck in the worker for more than this time (in seconds) " "are forcibly terminated") config_lib.DEFINE_integer( "Frontend.throttle_average_interval", 60, "Time interval over which average request rate is " "calculated when throttling is enabled.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["TransferStore", "Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") config_lib.DEFINE_string( "Frontend.static_aff4_prefix", "aff4:/web/static/", "The AFF4 URN prefix for all streams served publicly from the frontend.") config_lib.DEFINE_string( "Frontend.static_url_path_prefix", "/static/", "The URL prefix for all streams served publicly from the frontend.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smtp server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.")
#!/usr/bin/env python """Configuration parameters for the artifact subsystem.""" import os from grr.lib import config_lib config_lib.DEFINE_list("Artifacts.artifact_dirs", [ os.path.normpath(os.path.dirname(__file__) + "/../../grr/artifacts"), os.path.normpath( os.path.dirname(__file__) + "/../../grr/artifacts/" + "flow_templates"), os.path.normpath(os.path.dirname(__file__) + "/../../grr/artifacts/local") ], "A list directories to load artifacts from.") config_lib.DEFINE_list( "Artifacts.knowledge_base", [ "AllUsersAppDataEnvironmentVariable", "AllUsersProfileEnvironmentVariable", "CurrentControlSet", "ProgramFiles", "ProgramFilesx86", "SystemDriveEnvironmentVariable", "SystemRoot", "TempEnvironmentVariable", "UserShellFolders", "WinCodePage", "WinDirEnvironmentVariable", "WinDomainName", "WinPathEnvironmentVariable", "WinTimeZone", "WindowsRegistryProfiles", "WMIProfileUsersHomeDir", "WMIAccountUsersDomain", "OSXUsers", "LinuxUserProfiles", "LinuxRelease" ], "List of artifacts that are collected regularly by" " interrogate and used for interpolation of client-side" " variables. Includes artifacts for all supported OSes. " "Anything not in this list won't be downloaded by" " interrogate so be sure to include any necessary" " dependencies.")
"could not complete after this many seconds.") # We write a journal entry for the flow when it's about to be processed. # If the journal entry is there after this time, the flow will get terminated. config_lib.DEFINE_integer( "Worker.stuck_flows_timeout", 60 * 60 * 6, "Flows who got stuck in the worker for more than this time (in seconds) " "are forcibly terminated") config_lib.DEFINE_integer( "Frontend.throttle_average_interval", 60, "Time interval over which average request rate is " "calculated when throttling is enabled.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["TransferStore", "Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smtp server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") config_lib.DEFINE_bool("Worker.smtp_starttls", False, "Enable TLS for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_user", None, "Username for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_password", None,
#!/usr/bin/env python """API config options.""" from grr.lib import config_lib from grr.lib import rdfvalue config_lib.DEFINE_list("API.access_groups", [], "Groups allowed to call the AdminUI API. Empty list" " means anyone can call. Note you MUST replace the " "lib.authorizations.local.groups.GroupAccess class " "with a class that can retrieve group information to" " use this setting.") config_lib.DEFINE_string("API.access_groups_label", "api_access", "The access that API.access_groups will be granted. " "This config option is useful if you have multiple " "API servers which should be accessed by different " "API.access_group groups. You likely don't want to " "change this.") config_lib.DEFINE_integer("API.DailyFlowRequestLimit", "10", "Number of flows a user can run on a single client " "per day before being blocked by throttling. Set to " "0 to disable checking.") config_lib.DEFINE_semantic(rdfvalue.Duration, "API.FlowDuplicateInterval", default="1200s", description="Amount of time " "that needs to pass before the throttler will allow " "an identical flow to run on the same client. Set " "to 0s to disable checking.")
from grr.lib import aff4 from grr.lib import config_lib from grr.lib import data_store from grr.lib import flow from grr.lib import master from grr.lib import rdfvalue from grr.lib import registry from grr.lib import stats from grr.lib import utils from grr.proto import flows_pb2 config_lib.DEFINE_list( "Cron.enabled_system_jobs", [], "List of system cron jobs that will be " "automatically scheduled on worker startup. " "If cron jobs from this list were disabled " "before, they will be enabled on worker " "startup. Vice versa, if they were enabled " "but are not specified in the list, they " "will be disabled.") class Error(Exception): pass class CronSpec(rdfvalue.Duration): data_store_type = "string" def SerializeToDataStore(self): return self.SerializeToString()
config_lib.DEFINE_integer( "Source.version_numeric", "%(version_major)%(version_minor)" "%(version_revision)%(version_release)", "Version string of the client as an integer.") # Note: Each thread adds about 8mb for stack space. config_lib.DEFINE_integer("Threadpool.size", 50, "Number of threads in the shared thread pool.") config_lib.DEFINE_integer( "Worker.queue_shards", 5, "Queue notifications will be sharded across " "this number of datastore subjects.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["TransferStore", "Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smtp server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") config_lib.DEFINE_bool("Worker.smtp_starttls", False, "Enable TLS for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_user", None, "Username for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_password", None,
from grr.lib import config_lib from grr.lib.rdfvalues import crypto # General Client options. config_lib.DEFINE_string( "Client.name", "GRR", "The name of the client. This will be used as a base " "name to generate many other default parameters such " "as binary names and service names. Note that on " "Linux we lowercase the name to confirm with most " "linux naming conventions.") config_lib.DEFINE_string("Client.binary_name", "%(Client.name)", "The name of the client binary.") config_lib.DEFINE_list("Client.labels", [], "Labels for this client.") config_lib.DEFINE_string("Client.company_name", "GRR Project", "The name of the company which made the client.") config_lib.DEFINE_string("Client.description", "%(name) %(platform) %(arch)", "A description of this specific client build.") config_lib.DEFINE_string("Client.platform", "windows", "The platform we are running on.") config_lib.DEFINE_string("Client.arch", "amd64", "The architecture we are running on.") config_lib.DEFINE_string("Client.build_time", "Unknown", "The time the client was built.")
"%(grr/gui/templates|resource)", "The main path to the templates.") config_lib.DEFINE_string("AdminUI.help_root", "%(docs|resource)", "The main path to the locally cached documentation.") config_lib.DEFINE_string( "AdminUI.webauth_manager", "NullWebAuthManager", "The web auth manager for controlling access to the UI.") config_lib.DEFINE_string( "AdminUI.remote_user_header", "X-Remote-User", "Header containing authenticated user's username. " "Used by RemoteUserWebAuthManager.") config_lib.DEFINE_list( "AdminUI.remote_user_trusted_ips", ["127.0.0.1"], "Only requests coming from these IPs will be processed " "by RemoteUserWebAuthManager.") config_lib.DEFINE_string("AdminUI.firebase_api_key", None, "Firebase API key. Used by FirebaseWebAuthManager.") config_lib.DEFINE_string("AdminUI.firebase_auth_domain", None, "Firebase API key. Used by FirebaseWebAuthManager.") config_lib.DEFINE_string( "AdminUI.firebase_auth_provider", "GoogleAuthProvider", "Firebase auth provider (see " "https://firebase.google.com/docs/auth/web/start). Used by " "FirebaseWebAuthManager.") # TODO(amoser): Deprecated, remove at some point. config_lib.DEFINE_string("AdminUI.django_secret_key", "CHANGE_ME", "This is deprecated. Used csrf_secret_key instead!.")
from grr.lib import config_lib from grr.lib import rdfvalue # General Client options. config_lib.DEFINE_string( "Client.name", "GRR", "The name of the client. This will be used as a base " "name to generate many other default parameters such " "as binary names and service names. Note that on " "Linux we lowercase the name to confirm with most " "linux naming conventions.") config_lib.DEFINE_string("Client.binary_name", "%(Client.name)", "The name of the client binary.") config_lib.DEFINE_list("Client.labels", [], "Labels for this client.") config_lib.DEFINE_string("Client.company_name", "GRR Project", "The name of the company which made the client.") config_lib.DEFINE_string("Client.description", "%(name) %(platform) %(arch)", "A description of this specific client build.") config_lib.DEFINE_string("Client.platform", "windows", "The platform we are running on.") config_lib.DEFINE_string("Client.arch", "amd64", "The architecture we are running on.") config_lib.DEFINE_string("Client.build_time", "Unknown", "The time the client was built.")
#!/usr/bin/env python """Configuration parameters for the data servers.""" from grr.lib import config_lib # The Data Store server. config_lib.DEFINE_integer("Dataserver.stats_frequency", 60, ("Time interval in seconds for data server " "statistics updates")) config_lib.DEFINE_list("Dataserver.server_list", ["http://127.0.0.1:7000", "http://127.0.0.1:7001"], "List of allowed data servers (first is the master).") config_lib.DEFINE_integer("Dataserver.max_connections", 5, ("Maximum number of connections to the data server " "per process.")) config_lib.DEFINE_integer("Dataserver.port", 7000, "Port for a specific data server.") # Login information for clients of the data servers. config_lib.DEFINE_list( "Dataserver.client_credentials", ["user:pass:rw"], "List of data server client credentials, given as " "<username>:<password>:<mode> where mode is r or rw.") # Login information used by data servers when registering with the master. config_lib.DEFINE_string("Dataserver.server_username", "server", "Username for servers.")
#!/usr/bin/env python """Configuration parameters for the artifact subsystem.""" from grr.lib import config_lib config_lib.DEFINE_list("Artifacts.artifact_dirs", [ "%(grr/artifacts|resource)", "%(grr/artifacts/flow_templates|resource)", "%(grr/artifacts/local|resource)" ], "A list directories to load artifacts from.") config_lib.DEFINE_list( "Artifacts.knowledge_base", [ "LinuxRelease", "LinuxUserProfiles", "MacOSUsers", "WindowsCodePage", "WindowsDomainName", "WindowsEnvironmentVariableAllUsersAppData", "WindowsEnvironmentVariableAllUsersProfile", "WindowsEnvironmentVariablePath", "WindowsEnvironmentVariableProfilesDirectory", "WindowsEnvironmentVariableProgramFiles", "WindowsEnvironmentVariableProgramFilesX86", "WindowsEnvironmentVariableSystemDrive", "WindowsEnvironmentVariableSystemRoot", "WindowsEnvironmentVariableTemp", "WindowsEnvironmentVariableWinDir", "WindowsRegistryCurrentControlSet", "WindowsRegistryProfiles", "WindowsUserShellFolders", "WindowsTimezone",
config_lib.DEFINE_integer( "Source.version_numeric", "%(version_major)%(version_minor)" "%(version_revision)%(version_release)", "Version string of the client as an integer.") # Note: Each thread adds about 8mb for stack space. config_lib.DEFINE_integer("Threadpool.size", 50, "Number of threads in the shared thread pool.") config_lib.DEFINE_integer( "Worker.queue_shards", 5, "Queue notifications will be sharded across " "this number of datastore subjects.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["TransferStore", "Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") config_lib.DEFINE_list( "Frontend.DEBUG_well_known_flows_blacklist", [], "Drop these well known flows requests without " "processing. Useful as an emergency tool to reduce " "the load on the system.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smtp server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") config_lib.DEFINE_bool("Worker.smtp_starttls", False,
"The queue manager retries to work on requests it " "could not complete after this many seconds.") # We write a journal entry for the flow when it's about to be processed. # If the journal entry is there after this time, the flow will get terminated. config_lib.DEFINE_integer( "Worker.stuck_flows_timeout", 60 * 60 * 6, "Flows who got stuck in the worker for more than this time (in seconds) " "are forcibly terminated") config_lib.DEFINE_integer("Frontend.throttle_average_interval", 60, "Time interval over which average request rate is " "calculated when throttling is enabled.") config_lib.DEFINE_list("Frontend.well_known_flows", ["TransferStore", "Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smtp server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") config_lib.DEFINE_bool("Worker.smtp_starttls", False, "Enable TLS for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_user", None, "Username for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_password", None,
#!/usr/bin/env python """Configuration parameters for logging and error reporting subsystems.""" from grr.lib import config_lib from grr.lib import rdfvalue from grr.lib import type_info config_lib.DEFINE_string("Logging.domain", "localhost", "The email domain belonging to this installation. " "Leave blank to not restrict email to this domain") config_lib.DEFINE_list("Logging.engines", ["stderr"], "Enabled logging engines. Valid values are " "combinations of stderr,file,syslog,event_log.") config_lib.DEFINE_bool("Logging.verbose", False, help="If true log more verbosely.") config_lib.DEFINE_string("Logging.path", "%(TEMP|env)/tmp/", help="Path to log file directory.") config_lib.DEFINE_string("Logging.syslog_path", "localhost:514", help="Path to syslog socket. This can be a unix " "domain socket or in a UDP host:port notation.") config_lib.DEFINE_string("Logging.filename", "%(Logging.path)/GRRlog.txt", help="Filename of the grr log file.") config_lib.DEFINE_string( "Logging.format",
"after this many seconds.") config_lib.DEFINE_integer( "Worker.notification_retry_interval", 30, "The queue manager retries to work on requests it " "could not complete after this many seconds.") # We write a journal entry for the flow when it's about to be processed. # If the journal entry is there after this time, the flow will get terminated. config_lib.DEFINE_integer( "Worker.stuck_flows_timeout", 60 * 60 * 6, "Flows who got stuck in the worker for more than this time (in seconds) " "are forcibly terminated") config_lib.DEFINE_list( "Frontend.well_known_flows", ["TransferStore", "Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") config_lib.DEFINE_list( "Frontend.DEBUG_well_known_flows_blacklist", [], "Drop these well known flows requests without " "processing. Useful as an emergency tool to reduce " "the load on the system.") config_lib.DEFINE_string( "Frontend.static_aff4_prefix", "aff4:/web/static/", "The AFF4 URN prefix for all streams served publicly from the frontend.") config_lib.DEFINE_string( "Frontend.static_url_path_prefix", "/static/", "The URL prefix for all streams served publicly from the frontend.")
#!/usr/bin/env python """Configuration parameters for the artifact subsystem.""" import os from grr.lib import config_lib config_lib.DEFINE_list( "Artifacts.artifact_dirs", [os.path.normpath(os.path.dirname(__file__) + "/../../grr/artifacts"), os.path.normpath(os.path.dirname(__file__) + "/../../grr/artifacts/local") ], "A list directories to load artifacts from.") config_lib.DEFINE_list("Artifacts.knowledge_base", ["AllUsersAppDataEnvironmentVariable", "AllUsersProfileEnvironmentVariable", "CurrentControlSet", "ProgramFiles", "ProgramFilesx86", "SystemDriveEnvironmentVariable", "SystemRoot", "TempEnvironmentVariable", "UserShellFolders", "WinCodePage", "WinDirEnvironmentVariable", "WinDomainName", "WinPathEnvironmentVariable", "WinTimeZone", "WindowsRegistryProfiles", "WindowsWMIProfileUsers",
default="%(Test.data_dir)/localtest.yaml", help="The path to a test config with local customizations.") config_lib.DEFINE_string("Test.tmpdir", "/tmp/", help="Somewhere to write temporary files.") config_lib.DEFINE_string("Test.data_store", "FakeDataStore", "The data store to run the tests against.") config_lib.DEFINE_integer("Test.remote_pdb_port", 2525, "Remote debugger port.") config_lib.DEFINE_list( "Test.end_to_end_client_ids", [], "List of client ids to perform regular end_to_end tests" " on. These clients should be always on and connected" " to the network.") config_lib.DEFINE_list( "Test.end_to_end_client_hostnames", [], "List of hostnames to perform regular end_to_end tests" " on. These clients should be always on and connected" " to the network.") config_lib.DEFINE_string( "Test.end_to_end_result_check_wait", "50m", "rdfvalue.Duration string that determines how long we " "wait after starting the endtoend test hunt before we " "check the results. Should be long enough that all " "clients will have picked up the hunt, but not so "
"URL of the 'Report a problem' link.") config_lib.DEFINE_string("AdminUI.help_url", "/help/index.html", "URL of the 'Help' link.") config_lib.DEFINE_string("AdminUI.github_docs_location", "https://github.com/google/grr-doc/blob/master", "Base path for GitHub-hosted GRR documentation. ") config_lib.DEFINE_string( "AdminUI.new_hunt_wizard.default_output_plugin", None, "Output plugin that will be added by default in the " "'New Hunt' wizard output plugins selection page.") # Temporary option that allows limiting access to legacy UI renderers. Useful # when giving access to GRR AdminUI to parties that have to use the HTTP API # only. # TODO(user): remove as soon as legacy rendering system is removed. config_lib.DEFINE_list( "AdminUI.legacy_renderers_allowed_groups", [], "Users belonging to these groups can access legacy GRR renderers, " "which are still used for some GRR features (manage binaries, legacy " "browse virtual filesystem pane, etc). If this option is not set, then " "no additional checks are performed when legacy renderers are used.") config_lib.DEFINE_string( "AdminUI.debug_impersonate_user", None, "NOTE: for debugging purposes only! If set, every request AdminUI gets " "will be attributed to the specified user. Useful for checking how AdminUI " "looks like for an access-restricted user.")
config_lib.DEFINE_string( name="ClientBuilder.config_filename", default="%(Client.binary_name).yaml", help=("The name of the configuration file which will be embedded in the " "deployable binary.")) config_lib.DEFINE_string( name="ClientBuilder.autorun_command_line", default=("%(Client.binary_name) --install " "--config %(ClientBuilder.config_filename)"), help=("The command that the installer will execute after " "unpacking the package.")) config_lib.DEFINE_list( name="ClientBuilder.installer_plugins", default=[], help="Plugins that will copied to the client installation file and run " "at install time.") config_lib.DEFINE_list( name="ClientBuilder.plugins", default=[], help="Plugins that will copied to the client installation file and run when" "the client is running.") config_lib.DEFINE_string( name="ClientBuilder.client_logging_filename", default="%(Logging.path)/%(Client.name)_log.txt", help="Filename for logging, to be copied to Client section in the client " "that gets built.")
#!/usr/bin/env python """Configuration parameters for the check subsystem.""" import os from grr.lib import config_lib config_lib.DEFINE_list("Checks.config_dir", [ os.path.normpath(os.path.dirname(__file__) + "/../../grr/checks"), os.path.normpath(os.path.dirname(__file__) + "/../../grr/checks/local") ], "A list of directories to load checks from.") config_lib.DEFINE_list("Checks.config_files", [], "Paths of check configurations to load at start up.") config_lib.DEFINE_integer("Checks.max_results", 50, "Maximum items to include as check results.")