def __init__(self, job_info): """Initialize runner.""" # initializing logger load_logger() self.logger = logging.getLogger('scan-worker') self.job_info = job_info DOCKER_HOST = "127.0.0.1" DOCKER_PORT = "4243" try: # docker client connection to CentOS 7 system self.conn = docker.Client( base_url="tcp://{}:{}".format(DOCKER_HOST, DOCKER_PORT)) except Exception as e: self.logger.fatal( "Error connecting to Docker daemon. Error {}".format(e), exc_info=True) # Receive and send `namespace` key-value as is self.job_namespace = job_info.get('namespace') self.scanners = { "registry.centos.org/pipeline-images/pipeline-scanner": PipelineScanner, "registry.centos.org/pipeline-images/scanner-rpm-verify": ScannerRPMVerify, "registry.centos.org/pipeline-images/misc-package-updates": MiscPackageUpdates, "registry.centos.org/pipeline-images/" "container-capabilities-scanner": ContainerCapabilities }
def __init__(self): """Scanner verify initialization.""" self.scanner_name = "scanner-rpm-verify" self.full_scanner_name = \ "registry.centos.org/pipeline-images/scanner-rpm-verify" self.atomic_object = Atomic() # Add logger load_logger() self.logger = logging.getLogger('scan-worker')
def __init__(self, job): """Initialize runner.""" # initializing logger load_logger() self.logger = logging.getLogger('scan-worker') self.docker_conn = self.docker_client() self.job = job # register all scanners self.scanners = [ PipelineScanner, ScannerRPMVerify, MiscPackageUpdates, ContainerCapabilities ]
def __init__(self, job): """Initialize runner.""" # initializing logger load_logger() self.logger = logging.getLogger('scan-worker') self.docker_conn = self.docker_client() self.job = job # register all scanners self.scanners = [ PipelineScanner, ScannerRPMVerify, MiscPackageUpdates, ContainerCapabilities ]
def __init__(self, image_under_test, scanner_name, full_scanner_name, to_process_output): """Scanner initialization.""" # to be provided by child class self.scanner_name = scanner_name self.full_scanner_name = full_scanner_name self.image_under_test = image_under_test # Scanner class's own attributes self.image_id = Atomic().get_input_id(self.image_under_test) self.to_process_output = to_process_output # Add logger load_logger() self.logger = logging.getLogger('scan-worker')
def __init__(self): """Get atomic object for the image.""" self.scanner_name = "pipeline-scanner" self.full_scanner_name = \ "registry.centos.org/pipeline-images/pipeline-scanner" self.mount_object = mount.Mount() # Add logger load_logger() self.logger = logging.getLogger('scan-worker') DOCKER_HOST = "127.0.0.1" DOCKER_PORT = "4243" try: # docker client connection to CentOS 7 system self.conn = docker.Client( base_url="tcp://{}:{}".format(DOCKER_HOST, DOCKER_PORT)) except Exception as e: self.logger.fatal( "Error connecting to Docker daemon. Error {}".format(e), exc_info=True)
def __init__(self, image, scanner, result_file): # container/image under test self.image = image # scanner name / as installed /not full URL self.scanner = scanner # name of the output result file by scanner self.result_file = result_file # image_id self.image_id = Atomic().get_input_id(self.image) # set logger or set console load_logger() self.logger = logging.getLogger("scan-worker") # Flag to indicate if image is mounted on local filesystem self.is_mounted = False # image mount path self.image_mountpath = os.path.join("/", self.image_id) # initialize the atomic mount object self.mount_obj = mount.Mount() # provide image id to mount object self.mount_obj.image = self.image_id # provide mount option read/write self.mount_obj.options = ["rw"] # provide mount point self.mount_obj.mountpoint = self.image_mountpath
def handle_delivery_failure(self): """ Puts the job back to the delivery tube for later attempt at delivery and requests to notify the user about failure to deliver """ self.job["build_status"] = False self.job['action'] = "notify_user" self.queue.put(json.dumps(self.job), 'master_tube') self.logger.warning( "Delivery is not successful. Notifying the user.") # data = { # 'action': 'notify_user', # 'namespace': self.job["namespace"], # 'build_status': False, # 'notify_email': self.job['notify_email'], # 'delivery_logs_file': os.path.join( # self.job['logs_dir'], 'delivery_logs.txt'), # 'logs_dir': self.job['logs_dir'], # 'project_name': self.job["project_name"], # 'job_name': self.job['jobid'], # 'test_tag': self.job['test_tag']} # self.notify(data) if __name__ == "__main__": load_logger() logger = logging.getLogger('delivery-worker') worker = DeliveryWorker(logger, sub='start_delivery', pub='delivery_failed') worker.run()
Handler job pushed to master tube """ action = job.get('action') if action not in self.ACTIONS: self.logger.debug('Unknown action: {}'.format(action)) return # The name of tube and action are same self.queue.put(json.dumps(job), action) self.logger.info('Moved job to tube: {}'.format(action)) def run(self): """Run worker""" while True: job_obj = self.queue.get() job = json.loads(job_obj.body) self.logger.info('Got job: {}'.format(job)) try: self.handle_job(job) except Exception as e: self.logger.error( 'Error in handling job: {}\nJob details: {}'.format( e, job), extra={'locals': locals()}, exc_info=True) self.queue.delete(job_obj) if __name__ == '__main__': load_logger() logger = logging.getLogger('dispatcher') worker = DispatcherWorker(logger, sub='master_tube') worker.run()