#!/usr/bin/env python """Configuration parameters for the test subsystem.""" from grr.lib import config_lib # Default for running in the current directory config_lib.DEFINE_constant_string("Test.srcdir", "%(grr|module_path)/../", "The directory containing the source code.") config_lib.DEFINE_constant_string( "Test.data_dir", default="%(grr_response_test/test_data@grr-response-test|resource)", help="The directory where test data exist.") config_lib.DEFINE_constant_string( "Test.additional_test_config", default="%(Test.data_dir)/localtest.yaml", help="The path to a test config with local customizations.") config_lib.DEFINE_string("Test.tmpdir", "/tmp/", help="Somewhere to write temporary files.") config_lib.DEFINE_string("Test.data_store", "FakeDataStore", "The data store to run the tests against.") config_lib.DEFINE_integer("Test.remote_pdb_port", 2525, "Remote debugger port.") config_lib.DEFINE_string("PrivateKeys.ca_key_raw_data", "", "For testing purposes.")
#!/usr/bin/env python """API config options.""" from grr.lib import config_lib from grr.lib import rdfvalue config_lib.DEFINE_string("API.AuthorizationManager", "SimpleAPIAuthorizationManager", "API Authorization manager class to be used") config_lib.DEFINE_integer("API.DailyFlowRequestLimit", "10", "Number of flows a user can run on a single client " "per day before being blocked by throttling. Set to " "0 to disable checking.") config_lib.DEFINE_semantic(rdfvalue.Duration, "API.FlowDuplicateInterval", default="1200s", description="Amount of time " "that needs to pass before the throttler will allow " "an identical flow to run on the same client. Set " "to 0s to disable checking.") config_lib.DEFINE_string("API.RendererACLFile", "", "The file containing API acls, see " "grr/config/api_acls.yaml for an example.")
name="Config.python_hack_root", default="%(Config.aff4_root)/python_hacks", description=("The path where python hacks are stored in the aff4 " "namespace."))) # Executables must be signed and uploaded to their dedicated AFF4 namespace. config_lib.DEFINE_option( type_info.RDFValueType( rdfclass=rdfvalue.RDFURN, name="Executables.aff4_path", description="The aff4 path to signed executables.", default="%(Config.aff4_root)/executables/%(Client.platform)")) config_lib.DEFINE_string( name="Executables.installer", default=("%(Executables.aff4_path)/installers/" "%(ClientRepacker.output_basename)" "%(ClientBuilder.output_extension)"), help="The location of the generated installer in the config directory.") config_lib.DEFINE_string( name="ClientBuilder.output_extension", default=None, help="The file extension for the client (OS dependent).") config_lib.DEFINE_string( name="ClientBuilder.package_dir", default=None, help="OSX package name.") config_lib.DEFINE_string( "ClientBuilder.private_config_validator_class", default=None, help="Set this to a class name that sanity checks your client "
#!/usr/bin/env python """Configuration parameters for logging and error reporting subsystems.""" from grr.lib import config_lib from grr.lib import type_info from grr.lib.rdfvalues import standard config_lib.DEFINE_string( "Logging.domain", "localhost", "The email domain belonging to this installation. " "Leave blank to not restrict email to this domain") config_lib.DEFINE_string( "Logging.from_domain", "localhost", "The email domain emails will be sent from this install.") config_lib.DEFINE_list( "Logging.engines", ["stderr"], "Enabled logging engines. Valid values are " "combinations of stderr,file,syslog,event_log.") config_lib.DEFINE_bool("Logging.verbose", False, help="If true log more verbosely.") config_lib.DEFINE_string("Logging.path", "%(Config.prefix)/var/log/", help="Path to log file directory.") config_lib.DEFINE_string("Logging.syslog_path", "/dev/log", help="Path to syslog socket. This can be a unix "
# Default for running in the current directory config_lib.DEFINE_constant_string("Test.srcdir", "%(grr|module_path)/../", "The directory containing the source code.") config_lib.DEFINE_constant_string( "Test.data_dir", default="%(test_data@grr-response-test|resource)", help="The directory where test data exist.") config_lib.DEFINE_constant_string( "Test.additional_test_config", default="%(Test.data_dir)/localtest.yaml", help="The path to a test config with local customizations.") config_lib.DEFINE_string("Test.tmpdir", "/tmp/", help="Somewhere to write temporary files.") config_lib.DEFINE_string("Test.data_store", "FakeDataStore", "The data store to run the tests against.") config_lib.DEFINE_integer("Test.remote_pdb_port", 2525, "Remote debugger port.") config_lib.DEFINE_list( "Test.end_to_end_client_ids", [], "List of client ids to perform regular end_to_end tests" " on. These clients should be always on and connected" " to the network.") config_lib.DEFINE_list(
VERSION = grr.version() config_lib.DEFINE_integer("Source.version_major", VERSION["major"], "Major version number of client binary.") config_lib.DEFINE_integer("Source.version_minor", VERSION["minor"], "Minor version number of client binary.") config_lib.DEFINE_integer("Source.version_revision", VERSION["revision"], "Revision number of client binary.") config_lib.DEFINE_integer("Source.version_release", VERSION["release"], "Release number of client binary.") config_lib.DEFINE_string( "Source.version_string", "%(version_major).%(version_minor)." "%(version_revision).%(version_release)", "Version string of the client.") config_lib.DEFINE_integer( "Source.version_numeric", "%(version_major)%(version_minor)" "%(version_revision)%(version_release)", "Version string of the client as an integer.") # Note: Each thread adds about 8mb for stack space. config_lib.DEFINE_integer("Threadpool.size", 50, "Number of threads in the shared thread pool.") config_lib.DEFINE_integer( "Worker.queue_shards", 5, "Queue notifications will be sharded across " "this number of datastore subjects.")
#!/usr/bin/env python """Configuration parameters for the admin UI.""" from grr.lib import config_lib # The Admin UI web application. config_lib.DEFINE_integer("AdminUI.port", 8000, "port to listen on") config_lib.DEFINE_integer("AdminUI.port_max", None, "If set and AdminUI.port is in use, attempt to " "use ports between AdminUI.port and " "AdminUI.port_max.") # Override this if you want to access admin ui extenally. Make sure it is # secured (i.e. AdminUI.webauth_manager is not NullWebAuthManager)! config_lib.DEFINE_string("AdminUI.bind", "127.0.0.1", "interface to bind to.") config_lib.DEFINE_string("AdminUI.document_root", "%(grr/gui/static|resource)", "The main path to the static HTML pages.") config_lib.DEFINE_string("AdminUI.template_root", "%(grr/gui/templates|resource)", "The main path to the templates.") config_lib.DEFINE_string("AdminUI.local_document_root", "%(grr/gui/local/static|resource)", "The main path to the localized static HTML pages.") config_lib.DEFINE_string("AdminUI.help_root", "%(docs|resource)", "The main path to the locally cached documentation.")
#!/usr/bin/env python """API config options.""" from grr.lib import config_lib from grr.lib import rdfvalue config_lib.DEFINE_integer("API.DailyFlowRequestLimit", "10", "Number of flows a user can run on a single client " "per day before being blocked by throttling. Set to " "0 to disable checking.") config_lib.DEFINE_semantic(rdfvalue.Duration, "API.FlowDuplicateInterval", default="1200s", description="Amount of time " "that needs to pass before the throttler will allow " "an identical flow to run on the same client. Set " "to 0s to disable checking.") config_lib.DEFINE_string("API.RouterACLConfigFile", "", "The file containing API acls, see " "grr/config/api_acls.yaml for an example.") config_lib.DEFINE_string("API.DefaultRouter", "DisabledApiCallRouter", "The default router used by the API if there are no " "rules defined in API.RouterACLConfigFile or if none " "of these rules matches.")
import time import logging from grr.lib import access_control from grr.lib import config_lib from grr.lib import data_store from grr.lib import rdfvalue from grr.lib import registry from grr.lib import stats config_lib.DEFINE_string("StatsStore.process_id", default="", help="Id used to identify stats data of the current " "process. This should be different for different GRR " "processes. I.e. if you have 4 workers, for every " "worker the subject should be different. For example: " "worker_1, worker_2, worker_3, worker_4.") config_lib.DEFINE_integer("StatsStore.write_interval", default=60, help="Time in seconds between the dumps of stats " "data into the stats store.") config_lib.DEFINE_integer("StatsStore.ttl", default=60 * 60 * 24 * 7, help="Maximum lifetime (in seconds) of data in the " "stats store. Default is one week.") class StatsStore(object): """Implementation of the long-term storage of collected stats data.
config_lib.DEFINE_integer("Worker.flow_lease_time", 600, "Duration of flow lease time in seconds.") config_lib.DEFINE_integer( "Frontend.throttle_average_interval", 60, "Time interval over which average request rate is " "calculated when throttling is enabled.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["aff4:/flows/W:TransferStore", "aff4:/flows/W:Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smpt server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") # Server Cryptographic settings. config_lib.DEFINE_semantic( rdfvalue.PEMPrivateKey, "PrivateKeys.ca_key", description="CA private key. Used to sign for client enrollment.", ) config_lib.DEFINE_semantic(rdfvalue.PEMPrivateKey, "PrivateKeys.server_key", description="Private key for the front end server.") config_lib.DEFINE_semantic(
#!/usr/bin/env python """Configuration parameters for server output plugins.""" from grr.lib import config_lib from grr.lib import rdfvalue config_lib.DEFINE_string("BigQuery.service_acct_json", None, "The json contents of the service account file.") config_lib.DEFINE_string("BigQuery.project_id", None, "The BigQuery project_id.") config_lib.DEFINE_string("BigQuery.dataset_id", "grr", "The BigQuery project_id.") config_lib.DEFINE_integer( "BigQuery.max_file_post_size", 5 * 1000 * 1000, "Max size of file to put in each POST " "to bigquery. Note enforcement is not exact.") config_lib.DEFINE_integer("BigQuery.retry_max_attempts", 2, "Total number of times to retry an upload.") config_lib.DEFINE_integer( "BigQuery.max_upload_failures", 100, "Total number of times to try uploading to BigQuery" " for a given hunt or flow.") config_lib.DEFINE_semantic(rdfvalue.Duration, "BigQuery.retry_interval", "2s", "Time to wait before first retry.")
#!/usr/bin/env python """Configuration parameters for the aff4 subsystem.""" from grr.lib import config_lib config_lib.DEFINE_integer( "AFF4.cache_age", 5, "The number of seconds AFF4 objects live in the cache.") config_lib.DEFINE_integer("AFF4.cache_max_size", 10000, "Maximum size of the AFF4 objects cache.") config_lib.DEFINE_integer( "AFF4.intermediate_cache_age", 600, "The number of seconds AFF4 urns live in index cache.") config_lib.DEFINE_integer("AFF4.intermediate_cache_max_size", 2000, "Maximum size of the AFF4 index cache.") config_lib.DEFINE_integer( "AFF4.notification_rules_cache_age", 60, "The number of seconds AFF4 notification rules are cached.") config_lib.DEFINE_string( "AFF4.change_email", None, "Email used by AFF4NotificationEmailListener to notify " "about AFF4 changes.")
#!/usr/bin/env python """Configuration parameters for logging and error reporting subsystems.""" from grr.lib import config_lib from grr.lib import rdfvalue from grr.lib import type_info config_lib.DEFINE_string( "Logging.domain", "localhost", "The email domain belonging to this installation. " "Leave blank to not restrict email to this domain") config_lib.DEFINE_list( "Logging.engines", ["stderr"], "Enabled logging engines. Valid values are " "combinations of stderr,file,syslog,event_log.") config_lib.DEFINE_bool("Logging.verbose", False, help="If true log more verbosely.") config_lib.DEFINE_string("Logging.path", "%(TEMP|env)/tmp/", help="Path to log file directory.") config_lib.DEFINE_string("Logging.syslog_path", "localhost:514", help="Path to syslog socket. This can be a unix " "domain socket or in a UDP host:port notation.") config_lib.DEFINE_string("Logging.filename", "%(Logging.path)/GRRlog.txt",
"Flows who got stuck in the worker for more than this time (in seconds) " "are forcibly terminated") config_lib.DEFINE_integer( "Frontend.throttle_average_interval", 60, "Time interval over which average request rate is " "calculated when throttling is enabled.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["aff4:/flows/W:TransferStore", "aff4:/flows/W:Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smtp server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") config_lib.DEFINE_bool("Worker.smtp_starttls", False, "Enable TLS for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_user", None, "Username for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_password", None, "Password for the smtp connection.") # Server Cryptographic settings. config_lib.DEFINE_semantic( rdfvalue.PEMPrivateKey,
#!/usr/bin/env python """Configuration parameters for the client.""" from grr.lib import config_lib from grr.lib.rdfvalues import crypto # General Client options. config_lib.DEFINE_string("Client.name", "GRR", "The name of the client. This will be used as a base " "name to generate many other default parameters such " "as binary names and service names. Note that on " "Linux we lowercase the name to confirm with most " "linux naming conventions.") config_lib.DEFINE_string("Client.binary_name", "%(Client.name)", "The name of the client binary.") config_lib.DEFINE_list("Client.labels", [], "Labels for this client.") config_lib.DEFINE_string("Client.company_name", "GRR Project", "The name of the company which made the client.") config_lib.DEFINE_string("Client.description", "%(name) %(platform) %(arch)", "A description of this specific client build.") config_lib.DEFINE_string("Client.platform", "windows", "The platform we are running on.") config_lib.DEFINE_string("Client.arch", "amd64", "The architecture we are running on.")
#!/usr/bin/env python """Settings for ACLs/approvals system.""" from grr.lib import config_lib config_lib.DEFINE_string( "ACL.approvers_config_file", "%(Config.directory)/approvers.yaml", "File that defines who can approve access to " "clients with certain labels.") config_lib.DEFINE_integer("ACL.approvers_required", 2, "The number of approvers required for access.") config_lib.DEFINE_integer( "ACL.cache_age", 600, "The number of seconds " "approval objects live in the cache.") config_lib.DEFINE_string( "ACL.group_access_manager_class", "NoGroupAccess", "This class handles interfacing with corporate group" "directories for granting access. Override with a " "class that understands your LDAP/AD/whatever setup.") config_lib.DEFINE_integer( "ACL.token_expiry", 7 * 24 * 60 * 60, "The duration in seconds of a valid approval token. " "Default of one week.")
#!/usr/bin/env python """Configuration parameters for the data stores.""" from grr.lib import config_lib config_lib.DEFINE_integer("Datastore.maximum_blob_size", 15 * 1024 * 1024, "Maximum blob size we may store in the datastore.") config_lib.DEFINE_string("Datastore.security_manager", "NullAccessControlManager", "The ACL manager for controlling access to data.") config_lib.DEFINE_string("Datastore.implementation", "FakeDataStore", "Storage subsystem to use.") config_lib.DEFINE_string("Blobstore.implementation", "MemoryStreamBlobstore", "Blob storage subsystem to use.") config_lib.DEFINE_integer("Datastore.transaction_timeout", default=600, help="How long do we wait for a transaction lock.") DATASTORE_PATHING = [r"%{(?P<path>files/hash/generic/sha256/...).*}", r"%{(?P<path>files/hash/generic/sha1/...).*}", r"%{(?P<path>files/hash/generic/md5/...).*}", r"%{(?P<path>files/hash/pecoff/md5/...).*}", r"%{(?P<path>files/hash/pecoff/sha1/...).*}", r"%{(?P<path>files/nsrl/...).*}", r"%{(?P<path>W/[^/]+).*}", r"%{(?P<path>CA/[^/]+).*}", r"%{(?P<path>C\..\{1,16\}?)($|/.*)}",
#!/usr/bin/env python """Configuration parameters for client builder and server packaging.""" import os import time from grr.lib import config_lib from grr.lib import rdfvalue from grr.lib import type_info # Windows Memory driver information. config_lib.DEFINE_string("MemoryDriver.driver_service_name", "Pmem", "The SCCM service name for the driver.") config_lib.DEFINE_string("MemoryDriver.driver_display_name", "%(Client.name) Pmem", "The SCCM display name for the driver.") config_lib.DEFINE_list("MemoryDriver.driver_files", [], "The default drivers to use.") config_lib.DEFINE_list("MemoryDriver.aff4_paths", [], "The AFF4 paths to the driver objects.") config_lib.DEFINE_string( "MemoryDriver.device_path", r"\\\\.\\pmem", "The device path which the client will open after " "installing this driver.") config_lib.DEFINE_string( "MemoryDriver.service_name", "pmem", "The name of the service created for " "the driver (Windows).")
#!/usr/bin/env python """API config options.""" from grr.lib import config_lib from grr.lib import rdfvalue config_lib.DEFINE_list("API.access_groups", [], "Groups allowed to call the AdminUI API. Empty list" " means anyone can call. Note you MUST replace the " "lib.authorizations.local.groups.GroupAccess class " "with a class that can retrieve group information to" " use this setting.") config_lib.DEFINE_string("API.access_groups_label", "api_access", "The access that API.access_groups will be granted. " "This config option is useful if you have multiple " "API servers which should be accessed by different " "API.access_group groups. You likely don't want to " "change this.") config_lib.DEFINE_integer("API.DailyFlowRequestLimit", "10", "Number of flows a user can run on a single client " "per day before being blocked by throttling. Set to " "0 to disable checking.") config_lib.DEFINE_semantic(rdfvalue.Duration, "API.FlowDuplicateInterval", default="1200s", description="Amount of time " "that needs to pass before the throttler will allow " "an identical flow to run on the same client. Set " "to 0s to disable checking.") config_lib.DEFINE_string("API.HandlerACLFile", "",
#!/usr/bin/env python """Configuration parameters for the client.""" from grr.lib import config_lib from grr.lib.rdfvalues import crypto # General Client options. config_lib.DEFINE_string( "Client.name", "GRR", "The name of the client. This will be used as a base " "name to generate many other default parameters such " "as binary names and service names. Note that on " "Linux we lowercase the name to confirm with most " "linux naming conventions.") config_lib.DEFINE_string("Client.binary_name", "%(Client.name)", "The name of the client binary.") config_lib.DEFINE_list("Client.labels", [], "Labels for this client.") config_lib.DEFINE_string("Client.company_name", "GRR Project", "The name of the company which made the client.") config_lib.DEFINE_string("Client.description", "%(name) %(platform) %(arch)", "A description of this specific client build.") config_lib.DEFINE_string("Client.platform", "windows", "The platform we are running on.") config_lib.DEFINE_string("Client.arch", "amd64", "The architecture we are running on.")
"Worker.stuck_flows_timeout", 60 * 60 * 6, "Flows who got stuck in the worker for more than this time (in seconds) " "are forcibly terminated") config_lib.DEFINE_integer( "Frontend.throttle_average_interval", 60, "Time interval over which average request rate is " "calculated when throttling is enabled.") config_lib.DEFINE_list( "Frontend.well_known_flows", ["TransferStore", "Stats"], "Allow these well known flows to run directly on the " "frontend. Other flows are scheduled as normal.") # Smtp settings. config_lib.DEFINE_string("Worker.smtp_server", "localhost", "The smtp server for sending email alerts.") config_lib.DEFINE_integer("Worker.smtp_port", 25, "The smtp server port.") config_lib.DEFINE_bool("Worker.smtp_starttls", False, "Enable TLS for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_user", None, "Username for the smtp connection.") config_lib.DEFINE_string("Worker.smtp_password", None, "Password for the smtp connection.") # Server Cryptographic settings. config_lib.DEFINE_semantic( rdf_crypto.PEMPrivateKey,
#!/usr/bin/env python """Configuration parameters for the admin UI.""" from grr.lib import config_lib # The Admin UI web application. config_lib.DEFINE_integer("AdminUI.port", 8000, "port to listen on") config_lib.DEFINE_integer( "AdminUI.port_max", None, "If set and AdminUI.port is in use, attempt to " "use ports between AdminUI.port and " "AdminUI.port_max.") # Override this if you want to access admin ui extenally. Make sure it is # secured (i.e. AdminUI.webauth_manager is not NullWebAuthManager)! config_lib.DEFINE_string("AdminUI.bind", "127.0.0.1", "interface to bind to.") config_lib.DEFINE_string("AdminUI.document_root", "%(grr/gui/static|resource)", "The main path to the static HTML pages.") config_lib.DEFINE_string("AdminUI.local_document_root", "%(grr/gui/local/static|resource)", "The main path to the localized static HTML pages.") config_lib.DEFINE_string("AdminUI.help_root", "%(docs|resource)", "The main path to the locally cached documentation.") config_lib.DEFINE_string( "AdminUI.webauth_manager", "NullWebAuthManager", "The web auth manager for controlling access to the UI.")
#!/usr/bin/env python """Configuration parameters for the data stores.""" from grr.lib import config_lib from grr.lib import rdfvalue config_lib.DEFINE_integer("Datastore.maximum_blob_size", 512 * 1024, "Maximum blob size we may store in the datastore.") config_lib.DEFINE_string("Datastore.implementation", "FakeDataStore", "Storage subsystem to use.") config_lib.DEFINE_string("Blobstore.implementation", "MemoryStreamBlobstore", "Blob storage subsystem to use.") DATASTORE_PATHING = [ r"%{(?P<path>files/hash/generic/sha256/...).*}", r"%{(?P<path>files/hash/generic/sha1/...).*}", r"%{(?P<path>files/hash/generic/md5/...).*}", r"%{(?P<path>files/hash/pecoff/md5/...).*}", r"%{(?P<path>files/hash/pecoff/sha1/...).*}", r"%{(?P<path>files/nsrl/...).*}", r"%{(?P<path>W/[^/]+).*}", r"%{(?P<path>CA/[^/]+).*}", r"%{(?P<path>C\..\{1,16\}?)($|/.*)}", r"%{(?P<path>hunts/[^/]+).*}", r"%{(?P<path>blobs/[^/]+).*}", r"%{(?P<path>[^/]+).*}" ] config_lib.DEFINE_list("Datastore.pathing", DATASTORE_PATHING, ("Path selection for subjects in the file-based data " "stores (by priority)."))
#!/usr/bin/env python """Configuration parameters for server output plugins.""" from grr.lib import config_lib from grr.lib import rdfvalue config_lib.DEFINE_string("BigQuery.service_account", None, "The service account email address for BigQuery.") config_lib.DEFINE_string( "BigQuery.private_key", None, "The private key entry from the service account " "credential file.") config_lib.DEFINE_string("BigQuery.project_id", None, "The BigQuery project_id.") config_lib.DEFINE_string("BigQuery.dataset_id", "grr", "The BigQuery project_id.") config_lib.DEFINE_integer( "BigQuery.max_file_post_size", 5 * 1000 * 1000, "Max size of file to put in each POST " "to bigquery. Note enforcement is not exact.") config_lib.DEFINE_integer("BigQuery.retry_max_attempts", 2, "Total number of times to retry an upload.") config_lib.DEFINE_integer( "BigQuery.max_upload_failures", 100, "Total number of times to try uploading to BigQuery"