def verify_signature(signed_file_path, output_file_path): """Verifies the signed file's signature. Returns: True : If the signature is valid. False : If the signature is invalid. """ cmd = GPG_DECRYPT_BASE_CMD keyring_path = configuration.get_gpg_public_keyring_path() # if a keyring is specified in the conf, used it, else use default one if keyring_path != "": cmd += [GPG_NO_DEFAULT_KEYRING_OPTION, GPG_KEYRING_ARG, keyring_path] cmd += ["--output", output_file_path, signed_file_path] proc = subprocessfactory.create_subprocess(cmd=cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() if proc.poll() == 0: tracer.log_debug_trace("Signature is valid.") return True tracer.log_debug_trace("Signature is invalid.[exception=" + str(stderr) + "]") return False
def start_runbook_subprocess(self): """Creates the runbook subprocess based on the script language and using properties set by the derived class. Requires self.base_cmd & self.runbook_file_path to be set by derived class. """ cmd = self.base_cmd + [self.runbook.runbook_file_path] job_parameters = self.job_data.parameters if job_parameters is not None and len(job_parameters) > 0: for parameter in job_parameters: tracer.log_debug_trace("Parameter is: \n" + str(parameter)) if self.runbook.definition_kind_str == "PowerShell" and parameter["Name"]: # Handle named parameters for PowerShell arriving out of order cmd += ["-%s" % parameter["Name"]] cmd += [str(json.loads(parameter["Value"]))] # Do not copy current process env var to the sandbox process env = os.environ.copy() env.update({"AUTOMATION_JOB_ID": str(self.job_data.job_id), "AUTOMATION_ACTIVITY_ID": str(tracer.u_activity_id), "PYTHONPATH": str( configuration.get_source_directory_path())}) # windows env have to be str (not unicode) self.runbook_subprocess = subprocessfactory.create_subprocess(cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def verify_signature(signed_file_path, output_file_path): """Verifies the signed file's signature. Returns: True : If the signature is valid. False : If the signature is invalid. """ cmd = ["gpg", "-d"] keyring_path = configuration.get_gpg_public_keyring_path() # if a keyring is specified in the conf, used it, else use default one if keyring_path != "": cmd += [GPG_NO_DEFAULT_KEYRING_OPTION, GPG_KEYRING_ARG, keyring_path] cmd += ["--output", output_file_path, signed_file_path] # temporary workaround for the omi/gpg bug causing gpg to create a .gpg folder in the wrong home dir # only apply the workaround for oms installation env = None if "nxOMSAutomationWorkerResource" in os.path.abspath(__file__): env = os.environ.copy() env["HOME"] = "/var/opt/microsoft/omsagent/run" proc = subprocessfactory.create_subprocess(cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() if proc.poll() == 0: tracer.log_debug_trace("Signature is valid.") return True tracer.log_sandbox_job_runbook_signature_validation_failed(stderr) return False
def start_runbook_subprocess(self): """Creates the runbook subprocess based on the script language and using properties set by the derived class. Requires self.base_cmd & self.runbook_file_path to be set by derived class. """ cmd = self.base_cmd + [self.runbook.runbook_file_path] job_parameters = self.job_data.parameters if job_parameters is not None and len(job_parameters) > 0: for parameter in job_parameters: tracer.log_debug_trace("Parameter is: \n" + str(parameter)) if (self.runbook.definition_kind_str == "PowerShell" or self.runbook.definition_kind_str == "PowerShell7") and parameter["Name"]: # Handle named parameters for PowerShell arriving out of order cmd += ["-%s" % parameter["Name"]] try: cmd += [str(json.loads(parameter["Value"]))] except: cmd += [str(parameter["Value"])] # Do not copy current process env var to the sandbox process env = os.environ.copy() env.update({"AUTOMATION_JOB_ID": str(self.job_data.job_id), "AUTOMATION_ACTIVITY_ID": str(tracer.u_activity_id), "PYTHONPATH": str(configuration.get_source_directory_path()), "HOME": str(os.getcwd())}) # windows env have to be str (not unicode) self.runbook_subprocess = subprocessfactory.create_subprocess(cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def routine(self): self.stop_tracking_terminated_sandbox() sandbox_actions = self.jrds_client.get_sandbox_actions() tracer.log_debug_trace("Get sandbox action. Found " + str(len(sandbox_actions)) + " action(s).") for action in sandbox_actions: tracer.log_worker_sandbox_action_found(len(sandbox_actions)) sandbox_id = str(action["SandboxId"]) # prevent duplicate sandbox from running if sandbox_id in self.running_sandboxes: return # create sandboxes folder if needed sandboxes_base_path = "sandboxes" sandbox_working_dir = os.path.join(configuration.get_working_directory_path(), sandboxes_base_path, sandbox_id) try: iohelper.assert_or_create_path(sandbox_working_dir) except OSError: tracer.log_debug_trace("Failed to create sandbox folder.") pass cmd = ["python", os.path.join(configuration.get_source_directory_path(), "sandbox.py")] process_env_variables = {"sandbox_id": sandbox_id} sandbox_process = subprocessfactory.create_subprocess(cmd=cmd, env=process_env_variables, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=sandbox_working_dir) self.running_sandboxes[sandbox_id] = sandbox_process self.monitor_sandbox_process_outputs(sandbox_id, sandbox_process) tracer.log_worker_starting_sandbox(sandbox_id, str(sandbox_process.pid))
def verify_signature(signed_file_path, output_file_path): """Verifies the signed file's signature. Returns: True : If the signature is valid. False : If the signature is invalid. """ keyrings = configuration.get_gpg_public_keyrings_path() if (len(keyrings) == 0) or (len(keyrings) == 1 and keyrings[0] == configuration.DEFAULT_GPG_PUBLIC_KEYRING_PATH): raise GPGKeyringNotConfigured() for keyring_path in keyrings: # do not rely on default user keyring if keyring_path == "" or keyring_path == configuration.DEFAULT_GPG_PUBLIC_KEYRING_PATH: continue cmd = [GPG, GPG_BATCH_OPTION, GPG_YES_OPTION, GPG_DECRYPT_OPTION] # if a keyring is specified in the conf, used it, else use default one if keyring_path != "": cmd += [ GPG_NO_DEFAULT_KEYRING_OPTION, GPG_KEYRING_OPTION, keyring_path ] cmd += [GPG_OUTPUT_OPTION, output_file_path, signed_file_path] # temporary workaround for the omi/gpg bug causing gpg to create a .gpg folder in the wrong home dir # only apply the workaround for oms installation env = None if os.name.lower( ) != "nt" and "nxOMSAutomationWorkerResource" in os.path.abspath( __file__): import pwd env = os.environ.copy() current_username = pwd.getpwuid(os.getuid()).pw_name if "omsagent" in current_username: env["HOME"] = "/var/opt/microsoft/omsagent/run" elif "nxautomation" in current_username: env["HOME"] = "/var/opt/microsoft/omsagent/run/automationworker" proc = subprocessfactory.create_subprocess(cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() stderr = stderr.decode() if isinstance(stderr, bytes) else stderr if proc.poll() == 0: tracer.log_sandbox_job_runbook_signature_validation_succeeded( keyring_path) return True else: tracer.log_sandbox_job_runbook_signature_validation_failed( keyring_path, stderr) tracer.log_sandbox_job_runbook_signature_invalid() return False
def issue_request(self, url, headers, method, data): data_file_path = None headers = self.merge_headers(self.default_headers, headers) # if a body is included, write it to a temporary file (prevent body from leaking in ps/top) if method != self.GET and data is not None: serialized_data = self.json.dumps(data) # write data to disk data_file_name = base64.standard_b64encode( str(time.time()) + str(random.randint(0, sys.maxint)) + str(random.randint(0, sys.maxint)) + str(random.randint(0, sys.maxint)) + str(random.randint(0, sys.maxint))) data_file_path = os.path.join( configuration.get_temporary_request_directory_path(), data_file_name) iohelper.write_to_file(data_file_path, serialized_data) # insert Content-Type header headers.update( {self.CONTENT_TYPE_HEADER_KEY: self.APP_JSON_HEADER_VALUE}) # ** nesting of try statement is required since try/except/finally isn't supported prior to 2.5 ** try: try: cmd = self.build_request_cmd(url, headers, method=method, data_file_path=data_file_path) env = os.environ.copy() if self.os_is_redhat(): env["NSS_SDB_USE_CACHE"] = "no" p = subprocessfactory.create_subprocess(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != EXIT_SUCCESS: raise Exception( "Http request failed due to curl error. [returncode=" + str(p.returncode) + "]" + "[stderr=" + str(err) + "]") return self.parse_raw_output(out) except Exception, e: raise Exception( "Unknown exception while issuing request. [exception=" + str(e) + "]" + "[stacktrace=" + str(traceback.format_exc()) + "]") finally: if data_file_path is not None: os.remove(data_file_path)
def get_default_python_interpreter_major_version(): """Return the default "python" alias interpreter version. Returns: int, the interpreter major version None, if the default interpreter version cannot be detected """ cmd = ["python", "-c", "import sys;print(sys.version[0])"] # need to use print() for python3 compatibility p = subprocessfactory.create_subprocess(cmd=cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) default_interpreter_version, error = p.communicate() if p.returncode == 0: return int(default_interpreter_version.strip()) else: return None
def get_default_python_interpreter_major_version(): """Return the default "python" alias interpreter version. Returns: int, the interpreter major version None, if the default interpreter version cannot be detected """ cmd = ["python", "-c", "import sys;print(sys.version[0])"] # need to use print() for python3 compatibility p = subprocessfactory.create_subprocess(cmd=cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) default_interpreter_version, error = p.communicate() if p.returncode == 0: return int(default_interpreter_version.strip()) else: return None
def routine(self): self.stop_tracking_terminated_sandbox() sandbox_actions = self.jrds_client.get_sandbox_actions() tracer.log_debug_trace("Get sandbox action. Found " + str(len(sandbox_actions)) + " action(s).") for action in sandbox_actions: tracer.log_worker_sandbox_action_found(len(sandbox_actions)) sandbox_id = str(action["SandboxId"]) # prevent duplicate sandbox from running if sandbox_id in self.running_sandboxes: return # create sandboxes folder if needed sandbox_working_dir = os.path.join( configuration.get_working_directory_path(), sandboxes_root_folder_name, sandbox_id) try: iohelper.assert_or_create_path(sandbox_working_dir) except OSError, exception: tracer.log_worker_failed_to_create_sandbox_root_folder( sandbox_id, exception) pass # copy current process env variable (contains configuration) and add the sanbox_id key process_env_variables = os.environ.copy() process_env_variables["sandbox_id"] = sandbox_id cmd = [ "python", os.path.join(configuration.get_source_directory_path(), "sandbox.py") ] tracer.log_worker_starting_sandbox(sandbox_id) sandbox_process = subprocessfactory.create_subprocess( cmd=cmd, env=process_env_variables, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=sandbox_working_dir) self.running_sandboxes[sandbox_id] = sandbox_process tracer.log_worker_started_tracking_sandbox(sandbox_id) self.monitor_sandbox_process_outputs(sandbox_id, sandbox_process) tracer.log_worker_sandbox_process_started(sandbox_id, str(sandbox_process.pid))
def verify_signature(signed_file_path, output_file_path): """Verifies the signed file's signature. Returns: True : If the signature is valid. False : If the signature is invalid. """ keyrings = configuration.get_gpg_public_keyrings_path() if (len(keyrings) == 0) or (len(keyrings) == 1 and keyrings[0] == configuration.DEFAULT_GPG_PUBLIC_KEYRING_PATH): raise GPGKeyringNotConfigured() for keyring_path in keyrings: # do not rely on default user keyring if keyring_path == "" or keyring_path == configuration.DEFAULT_GPG_PUBLIC_KEYRING_PATH: continue cmd = [GPG, GPG_BATCH_OPTION, GPG_YES_OPTION, GPG_DECRYPT_OPTION] # if a keyring is specified in the conf, used it, else use default one if keyring_path != "": cmd += [GPG_NO_DEFAULT_KEYRING_OPTION, GPG_KEYRING_OPTION, keyring_path] cmd += [GPG_OUTPUT_OPTION, output_file_path, signed_file_path] # temporary workaround for the omi/gpg bug causing gpg to create a .gpg folder in the wrong home dir # only apply the workaround for oms installation env = None if os.name.lower() != "nt" and "nxOMSAutomationWorkerResource" in os.path.abspath(__file__): import pwd env = os.environ.copy() current_username = pwd.getpwuid(os.getuid()).pw_name if "omsagent" in current_username: env["HOME"] = "/var/opt/microsoft/omsagent/run" elif "nxautomation" in current_username: env["HOME"] = "/var/opt/microsoft/omsagent/run/automationworker" proc = subprocessfactory.create_subprocess(cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() if proc.poll() == 0: tracer.log_sandbox_job_runbook_signature_validation_succeeded(keyring_path) return True else: tracer.log_sandbox_job_runbook_signature_validation_failed(keyring_path, stderr) tracer.log_sandbox_job_runbook_signature_invalid() return False
def start_runbook_subprocess(self): """Creates the runbook subprocess based on the script language and using properties set by the derived class. Requires self.base_cmd & self.runbook_file_path to be set by derived class. """ cmd = self.base_cmd + [self.runbook.runbook_file_path] job_parameters = self.job_data.parameters if job_parameters is not None and len(job_parameters) > 0: for parameter in job_parameters: cmd += [json.loads(parameter["Value"])] # Do not copy current process env var to the sandbox process env = os.environ.copy() env.update({ "AUTOMATION_JOB_ID": str(self.job_data.job_id), "PYTHONPATH": str(configuration.get_source_directory_path()) }) # windows env have to be str (not unicode) self.runbook_subprocess = subprocessfactory.create_subprocess( cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def routine(self): self.stop_tracking_terminated_sandbox() sandbox_actions = self.jrds_client.get_sandbox_actions() tracer.log_debug_trace("Get sandbox action. Found " + str(len(sandbox_actions)) + " action(s).") for action in sandbox_actions: tracer.log_worker_sandbox_action_found(len(sandbox_actions)) sandbox_id = str(action["SandboxId"]) # prevent duplicate sandbox from running if sandbox_id in self.running_sandboxes: return # create sandboxes folder if needed sandboxes_base_path = "sandboxes" sandbox_working_dir = os.path.join( configuration.get_working_directory_path(), sandboxes_base_path, sandbox_id) try: iohelper.assert_or_create_path(sandbox_working_dir) except OSError: tracer.log_debug_trace("Failed to create sandbox folder.") pass cmd = [ "python", os.path.join(configuration.get_source_directory_path(), "sandbox.py") ] process_env_variables = {"sandbox_id": sandbox_id} sandbox_process = subprocessfactory.create_subprocess( cmd=cmd, env=process_env_variables, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=sandbox_working_dir) self.running_sandboxes[sandbox_id] = sandbox_process self.monitor_sandbox_process_outputs(sandbox_id, sandbox_process) tracer.log_worker_starting_sandbox(sandbox_id, str(sandbox_process.pid))
def start_runbook_subprocess(self): """Creates the runbook subprocess based on the script language and using properties set by the derived class. Requires self.base_cmd & self.runbook_file_path to be set by derived class. """ cmd = self.base_cmd + [self.runbook.runbook_file_path] job_parameters = self.job_data["parameters"] if job_parameters is not None and len(job_parameters) > 0: for parameter in job_parameters: cmd += [json.loads(parameter["Value"])] env = os.environ.copy() env["AUTOMATION_JOB_ID"] = str( self.job_data["jobId"]) # TODO (dalbe) : review key name # TODO(dalbe): Apply only for Python env["PYTHONPATH"] = str(configuration.get_source_directory_path() ) # windows env have to be str (not unicode) self.runbook_subprocess = subprocessfactory.create_subprocess( cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def start_runbook_subprocess(self): """Creates the runbook subprocess based on the script language and using properties set by the derived class. Requires self.base_cmd & self.runbook_file_path to be set by derived class. """ cmd = self.base_cmd + [self.runbook.runbook_file_path] job_parameters = self.job_data["parameters"] if job_parameters is not None and len(job_parameters) > 0: for parameter in job_parameters: cmd += [json.loads(parameter["Value"])] env = os.environ.copy() env["AUTOMATION_JOB_ID"] = str(self.job_data["jobId"]) # TODO (dalbe) : review key name # TODO(dalbe): Apply only for Python env["PYTHONPATH"] = str(configuration.get_source_directory_path()) # windows env have to be str (not unicode) self.runbook_subprocess = subprocessfactory.create_subprocess(cmd=cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def issue_request(self, url, headers, method, data): data_file_path = None headers = self.merge_headers(self.default_headers, headers) # if a body is included, write it to a temporary file (prevent body from leaking in ps/top) if method != self.GET and data is not None: serialized_data = self.json.dumps(data) # write data to disk data_file_name = base64.standard_b64encode(str(time.time()) + str(random.randint(0, sys.maxint)) + str(random.randint(0, sys.maxint)) + str(random.randint(0, sys.maxint)) + str(random.randint(0, sys.maxint))) data_file_path = os.path.join("/tmp", data_file_name) f = open(data_file_path, "wb") f.write(serialized_data) f.close() # insert Content-Type header headers.update({self.CONTENT_TYPE_HEADER_KEY: self.APP_JSON_HEADER_VALUE}) # ** nesting of try statement is required since try/except/finally isn't supported prior to 2.5 ** try: try: cmd = self.build_request_cmd(url, headers, method=method, data_file_path=data_file_path) env = os.environ.copy() p = subprocessfactory.create_subprocess(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if p.returncode != EXIT_SUCCESS: raise Exception("Http request failed due to curl error. [returncode=" + str(p.returncode) + "]" + "[stderr=" + str(err) + "]") return self.parse_raw_output(out) except Exception, e: raise Exception("Unknown exception while issuing request. [exception=" + str(e) + "]" + "[stacktrace=" + str(traceback.format_exc()) + "]") finally: if data_file_path is not None: os.remove(data_file_path)
def verify_signature(signed_file_path, output_file_path): """Verifies the signed file's signature. Returns: True : If the signature is valid. False : If the signature is invalid. """ cmd = GPG_DECRYPT_BASE_CMD keyring_path = configuration.get_gpg_public_keyring_path() # if a keyring is specified in the conf, used it, else use default one if keyring_path != "": cmd += [GPG_NO_DEFAULT_KEYRING_OPTION, GPG_KEYRING_ARG, keyring_path] cmd += ["--output", output_file_path, signed_file_path] proc = subprocessfactory.create_subprocess(cmd=cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() if proc.poll() == 0: tracer.log_debug_trace("Signature is valid.") return True tracer.log_debug_trace("Signature is invalid.[exception=" + str(stderr) + "]") return False