Example #1
0
    def __init__(self, config, connections={}):
        self.config = config

        merge_root = get_default(self.config, 'merger', 'git_dir',
                                 '/var/lib/zuul/merger-git')
        merge_email = get_default(self.config, 'merger', 'git_user_email')
        merge_name = get_default(self.config, 'merger', 'git_user_name')
        speed_limit = get_default(
            config, 'merger', 'git_http_low_speed_limit', '1000')
        speed_time = get_default(
            config, 'merger', 'git_http_low_speed_time', '30')
        git_timeout = get_default(config, 'merger', 'git_timeout', 300)
        self.merger = merger.Merger(
            merge_root, connections, merge_email, merge_name, speed_limit,
            speed_time, git_timeout=git_timeout)
        self.command_map = dict(
            stop=self.stop)
        command_socket = get_default(
            self.config, 'merger', 'command_socket',
            '/var/lib/zuul/merger.socket')
        self.command_socket = commandsocket.CommandSocket(command_socket)

        self.jobs = {
            'merger:merge': self.merge,
            'merger:cat': self.cat,
            'merger:refstate': self.refstate,
            'merger:fileschanges': self.fileschanges,
        }
        self.gearworker = ZuulGearWorker(
            'Zuul Merger',
            'zuul.MergeServer',
            'merger-gearman-worker',
            self.config,
            self.jobs)
Example #2
0
    def __init__(self, config, component, connections=None):
        self.connections = connections or {}
        self.merge_email = get_default(config, 'merger', 'git_user_email')
        self.merge_name = get_default(config, 'merger', 'git_user_name')
        self.merge_speed_limit = get_default(config, 'merger',
                                             'git_http_low_speed_limit',
                                             '1000')
        self.merge_speed_time = get_default(config, 'merger',
                                            'git_http_low_speed_time', '30')
        self.git_timeout = get_default(config, 'merger', 'git_timeout', 300)

        self.merge_root = get_default(config, component, 'git_dir',
                                      '/var/lib/zuul/{}-git'.format(component))

        # This merger and its git repos are used to maintain
        # up-to-date copies of all the repos that are used by jobs, as
        # well as to support the merger:cat functon to supply
        # configuration information to Zuul when it starts.
        self.merger = self._getMerger(self.merge_root, None)

        self.config = config

        # Repo locking is needed on the executor
        self.repo_locks = self._repo_locks_class()

        self.merger_jobs = {
            'merger:merge': self.merge,
            'merger:cat': self.cat,
            'merger:refstate': self.refstate,
            'merger:fileschanges': self.fileschanges,
        }
        self.merger_gearworker = ZuulGearWorker('Zuul Merger',
                                                'zuul.BaseMergeServer',
                                                'merger-gearman-worker',
                                                self.config, self.merger_jobs)
Example #3
0
 def __init__(self, connection):
     self.config = connection.sched.config
     self.connection = connection
     handler = "gitlab:%s:payload" % self.connection.connection_name
     self.jobs = {
         handler: self.handle_payload,
     }
     self.gearworker = ZuulGearWorker('Zuul Gitlab Worker',
                                      'zuul.GitlabGearmanWorker', 'gitlab',
                                      self.config, self.jobs)
Example #4
0
class GitlabGearmanWorker(object):
    """A thread that answers gearman requests"""
    log = logging.getLogger("zuul.GitlabGearmanWorker")

    def __init__(self, connection):
        self.config = connection.sched.config
        self.connection = connection
        handler = "gitlab:%s:payload" % self.connection.connection_name
        self.jobs = {
            handler: self.handle_payload,
        }
        self.gearworker = ZuulGearWorker(
            'Zuul Gitlab Worker',
            'zuul.GitlabGearmanWorker',
            'gitlab',
            self.config,
            self.jobs)

    def handle_payload(self, job):
        args = json.loads(job.arguments)
        payload = args["payload"]

        self.log.info(
            "Gitlab Webhook Received event kind: %(object_kind)s" % payload)

        try:
            self.__dispatch_event(payload)
            output = {'return_code': 200}
        except Exception:
            output = {'return_code': 503}
            self.log.exception("Exception handling Gitlab event:")

        job.sendWorkComplete(json.dumps(output))

    def __dispatch_event(self, payload):
        self.log.info(payload)
        event = payload['object_kind']
        try:
            self.log.info("Dispatching event %s" % event)
            self.connection.addEvent(payload, event)
        except Exception as err:
            message = 'Exception dispatching event: %s' % str(err)
            self.log.exception(message)
            raise Exception(message)

    def start(self):
        self.gearworker.start()

    def stop(self):
        self.gearworker.stop()
Example #5
0
    def __init__(self, config, sched):
        self.config = config
        self.sched = sched

        self.jobs = {}

        for func in self.functions:
            f = getattr(self, 'handle_%s' % func)
            self.jobs['zuul:%s' % func] = f
        self.gearworker = ZuulGearWorker(
            'Zuul RPC Listener',
            self.log.name,
            self.thread_name,
            self.config,
            self.jobs)
Example #6
0
class RPCListenerBase(metaclass=ABCMeta):
    log = logging.getLogger("zuul.RPCListenerBase")
    thread_name = 'zuul-rpc-gearman-worker'
    functions = []  # type: List[str]

    def __init__(self, config, sched):
        self.config = config
        self.sched = sched

        self.jobs = {}

        for func in self.functions:
            f = getattr(self, 'handle_%s' % func)
            self.jobs['zuul:%s' % func] = f
        self.gearworker = ZuulGearWorker(
            'Zuul RPC Listener',
            self.log.name,
            self.thread_name,
            self.config,
            self.jobs)

    def start(self):
        self.gearworker.start()

    def stop(self):
        self.log.debug("Stopping")
        self.gearworker.stop()
        self.log.debug("Stopped")

    def join(self):
        self.gearworker.join()
Example #7
0
    def __init__(self, config, sched):
        self.config = config
        self.sched = sched

        self.jobs = {}
        functions = [
            'autohold',
            'autohold_delete',
            'autohold_info',
            'autohold_list',
            'allowed_labels_get',
            'dequeue',
            'enqueue',
            'enqueue_ref',
            'promote',
            'get_admin_tenants',
            'get_running_jobs',
            'get_job_log_stream_address',
            'tenant_list',
            'tenant_sql_connection',
            'status_get',
            'job_get',
            'job_list',
            'project_get',
            'project_list',
            'project_freeze_jobs',
            'pipeline_list',
            'key_get',
            'config_errors_list',
            'connection_list',
            'authorize_user',
        ]
        for func in functions:
            f = getattr(self, 'handle_%s' % func)
            self.jobs['zuul:%s' % func] = f
        self.gearworker = ZuulGearWorker('Zuul RPC Listener',
                                         'zuul.RPCListener',
                                         'zuul-rpc-gearman-worker',
                                         self.config, self.jobs)
Example #8
0
class RPCListener(object):
    log = logging.getLogger("zuul.RPCListener")

    def __init__(self, config, sched):
        self.config = config
        self.sched = sched

        self.jobs = {}
        functions = [
            'autohold',
            'autohold_delete',
            'autohold_info',
            'autohold_list',
            'allowed_labels_get',
            'dequeue',
            'enqueue',
            'enqueue_ref',
            'promote',
            'get_admin_tenants',
            'get_running_jobs',
            'get_job_log_stream_address',
            'tenant_list',
            'tenant_sql_connection',
            'status_get',
            'job_get',
            'job_list',
            'project_get',
            'project_list',
            'project_freeze_jobs',
            'pipeline_list',
            'key_get',
            'config_errors_list',
            'connection_list',
            'authorize_user',
        ]
        for func in functions:
            f = getattr(self, 'handle_%s' % func)
            self.jobs['zuul:%s' % func] = f
        self.gearworker = ZuulGearWorker('Zuul RPC Listener',
                                         'zuul.RPCListener',
                                         'zuul-rpc-gearman-worker',
                                         self.config, self.jobs)

    def start(self):
        self.gearworker.start()

    def stop(self):
        self.log.debug("Stopping")
        self.gearworker.stop()
        self.log.debug("Stopped")

    def join(self):
        self.gearworker.join()

    def handle_dequeue(self, job):
        args = json.loads(job.arguments)
        tenant_name = args['tenant']
        pipeline_name = args['pipeline']
        project_name = args['project']
        change = args['change']
        ref = args['ref']
        try:
            self.sched.dequeue(tenant_name, pipeline_name, project_name,
                               change, ref)
        except Exception as e:
            job.sendWorkException(str(e).encode('utf8'))
            return
        job.sendWorkComplete()

    def handle_autohold_info(self, job):
        args = json.loads(job.arguments)
        request_id = args['request_id']
        try:
            data = self.sched.autohold_info(request_id)
        except Exception as e:
            job.sendWorkException(str(e).encode('utf8'))
            return
        job.sendWorkComplete(json.dumps(data))

    def handle_autohold_delete(self, job):
        args = json.loads(job.arguments)
        request_id = args['request_id']
        try:
            self.sched.autohold_delete(request_id)
        except Exception as e:
            job.sendWorkException(str(e).encode('utf8'))
            return
        job.sendWorkComplete()

    def handle_autohold_list(self, job):
        data = self.sched.autohold_list()
        job.sendWorkComplete(json.dumps(data))

    def handle_autohold(self, job):
        args = json.loads(job.arguments)
        params = {}

        tenant = self.sched.abide.tenants.get(args['tenant'])
        if tenant:
            params['tenant_name'] = args['tenant']
        else:
            error = "Invalid tenant: %s" % args['tenant']
            job.sendWorkException(error.encode('utf8'))
            return

        (trusted, project) = tenant.getProject(args['project'])
        if project:
            params['project_name'] = project.canonical_name
        else:
            error = "Invalid project: %s" % args['project']
            job.sendWorkException(error.encode('utf8'))
            return

        if args['change'] and args['ref']:
            job.sendWorkException("Change and ref can't be both used "
                                  "for the same request")

        if args['change']:
            # Convert change into ref based on zuul connection
            ref_filter = project.source.getRefForChange(args['change'])
        elif args['ref']:
            ref_filter = "%s" % args['ref']
        else:
            ref_filter = ".*"

        params['job_name'] = args['job']
        params['ref_filter'] = ref_filter
        params['reason'] = args['reason']

        if args['count'] < 0:
            error = "Invalid count: %d" % args['count']
            job.sendWorkException(error.encode('utf8'))
            return

        params['count'] = args['count']
        params['node_hold_expiration'] = args['node_hold_expiration']

        self.sched.autohold(**params)
        job.sendWorkComplete()

    def _common_enqueue(self, job):
        args = json.loads(job.arguments)
        event = model.TriggerEvent()
        event.timestamp = time.time()
        errors = ''
        tenant = None
        project = None
        pipeline = None

        tenant = self.sched.abide.tenants.get(args['tenant'])
        if tenant:
            event.tenant_name = args['tenant']

            (trusted, project) = tenant.getProject(args['project'])
            if project:
                event.project_hostname = project.canonical_hostname
                event.project_name = project.name
            else:
                errors += 'Invalid project: %s\n' % (args['project'], )

            pipeline = tenant.layout.pipelines.get(args['pipeline'])
            if pipeline:
                event.forced_pipeline = args['pipeline']
            else:
                errors += 'Invalid pipeline: %s\n' % (args['pipeline'], )
        else:
            errors += 'Invalid tenant: %s\n' % (args['tenant'], )

        return (args, event, errors, project)

    def handle_enqueue(self, job):
        (args, event, errors, project) = self._common_enqueue(job)

        if not errors:
            event.change_number, event.patch_number = args['change'].split(',')
            try:
                ch = project.source.getChange(event, refresh=True)
                if ch.project.name != project.name:
                    errors += ('Change %s does not belong to project "%s", ' %
                               (args['change'], project.name))
            except Exception:
                errors += 'Invalid change: %s\n' % (args['change'], )

        if errors:
            job.sendWorkException(errors.encode('utf8'))
        else:
            self.sched.enqueue(event)
            job.sendWorkComplete()

    def handle_enqueue_ref(self, job):
        (args, event, errors, project) = self._common_enqueue(job)

        if not errors:
            event.ref = args['ref']
            event.oldrev = args['oldrev']
            event.newrev = args['newrev']
            try:
                int(event.oldrev, 16)
                if len(event.oldrev) != 40:
                    errors += 'Old rev must be 40 character sha1: ' \
                              '%s\n' % event.oldrev
            except Exception:
                errors += 'Old rev must be base16 hash: ' \
                          '%s\n' % event.oldrev
            try:
                int(event.newrev, 16)
                if len(event.newrev) != 40:
                    errors += 'New rev must be 40 character sha1: ' \
                              '%s\n' % event.newrev
            except Exception:
                errors += 'New rev must be base16 hash: ' \
                          '%s\n' % event.newrev

        if errors:
            job.sendWorkException(errors.encode('utf8'))
        else:
            self.sched.enqueue(event)
            job.sendWorkComplete()

    def handle_promote(self, job):
        args = json.loads(job.arguments)
        tenant_name = args['tenant']
        pipeline_name = args['pipeline']
        change_ids = args['change_ids']
        self.sched.promote(tenant_name, pipeline_name, change_ids)
        job.sendWorkComplete()

    def handle_get_running_jobs(self, job):
        # args = json.loads(job.arguments)
        # TODO: use args to filter by pipeline etc
        running_items = []
        for tenant in self.sched.abide.tenants.values():
            for pipeline_name, pipeline in tenant.layout.pipelines.items():
                for queue in pipeline.queues:
                    for item in queue.queue:
                        running_items.append(item.formatJSON())

        job.sendWorkComplete(json.dumps(running_items))

    def handle_get_job_log_stream_address(self, job):
        # TODO: map log files to ports. Currently there is only one
        #       log stream for a given job. But many jobs produce many
        #       log files, so this is forwards compatible with a future
        #       where there are more logs to potentially request than
        #       "console.log"
        def find_build(uuid):
            for tenant in self.sched.abide.tenants.values():
                for pipeline_name, pipeline in tenant.layout.pipelines.items():
                    for queue in pipeline.queues:
                        for item in queue.queue:
                            for bld in item.current_build_set.getBuilds():
                                if bld.uuid == uuid:
                                    return bld
            return None

        args = json.loads(job.arguments)
        uuid = args['uuid']
        # TODO: logfile = args['logfile']
        job_log_stream_address = {}
        build = find_build(uuid)
        if build:
            job_log_stream_address['server'] = build.worker.hostname
            job_log_stream_address['port'] = build.worker.log_port
        job.sendWorkComplete(json.dumps(job_log_stream_address))

    def _is_authorized(self, tenant, claims):
        authorized = False
        if tenant:
            rules = tenant.authorization_rules
            for rule in rules:
                if rule not in self.sched.abide.admin_rules.keys():
                    self.log.error('Undefined rule "%s"' % rule)
                    continue
                debug_msg = ('Applying rule "%s" from tenant '
                             '"%s" to claims %s')
                self.log.debug(debug_msg % (rule, tenant, json.dumps(claims)))
                authorized = self.sched.abide.admin_rules[rule](claims, tenant)
                if authorized:
                    if '__zuul_uid_claim' in claims:
                        uid = claims['__zuul_uid_claim']
                    else:
                        uid = json.dumps(claims)
                    msg = '%s authorized on tenant "%s" by rule "%s"'
                    self.log.info(msg % (uid, tenant, rule))
                    break
        return authorized

    def handle_authorize_user(self, job):
        args = json.loads(job.arguments)
        tenant_name = args['tenant']
        claims = args['claims']
        tenant = self.sched.abide.tenants.get(tenant_name)
        authorized = self._is_authorized(tenant, claims)
        job.sendWorkComplete(json.dumps(authorized))

    def handle_get_admin_tenants(self, job):
        args = json.loads(job.arguments)
        claims = args['claims']
        admin_tenants = []
        for tenant_name, tenant in self.sched.abide.tenants.items():
            if self._is_authorized(tenant, claims):
                admin_tenants.append(tenant_name)
        job.sendWorkComplete(json.dumps(admin_tenants))

    def handle_tenant_list(self, job):
        output = []
        for tenant_name, tenant in self.sched.abide.tenants.items():
            queue_size = 0
            for pipeline_name, pipeline in tenant.layout.pipelines.items():
                for queue in pipeline.queues:
                    for item in queue.queue:
                        if item.live:
                            queue_size += 1

            output.append({
                'name': tenant_name,
                'projects': len(tenant.untrusted_projects),
                'queue': queue_size
            })
        job.sendWorkComplete(json.dumps(output))

    def handle_tenant_sql_connection(self, job):
        args = json.loads(job.arguments)
        sql_driver = self.sched.connections.drivers['sql']
        conn = sql_driver.tenant_connections.get(args['tenant'])
        if conn:
            name = conn.connection_name
        else:
            name = ''
        job.sendWorkComplete(json.dumps(name))

    def handle_status_get(self, job):
        args = json.loads(job.arguments)
        output = self.sched.formatStatusJSON(args.get("tenant"))
        job.sendWorkComplete(output)

    def handle_job_get(self, gear_job):
        args = json.loads(gear_job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        if not tenant:
            gear_job.sendWorkComplete(json.dumps(None))
            return
        jobs = tenant.layout.jobs.get(args.get("job"), [])
        output = []
        for job in jobs:
            output.append(job.toDict(tenant))
        gear_job.sendWorkComplete(json.dumps(output, cls=ZuulJSONEncoder))

    def handle_job_list(self, job):
        args = json.loads(job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        output = []
        if not tenant:
            job.sendWorkComplete(json.dumps(None))
        for job_name in sorted(tenant.layout.jobs):
            desc = None
            tags = set()
            variants = []
            for variant in tenant.layout.jobs[job_name]:
                if not desc and variant.description:
                    desc = variant.description.split('\n')[0]
                if variant.tags:
                    tags.update(list(variant.tags))
                job_variant = {}
                if not variant.isBase():
                    if variant.parent:
                        job_variant['parent'] = str(variant.parent)
                    else:
                        job_variant['parent'] = tenant.default_base_job
                branches = variant.getBranches()
                if branches:
                    job_variant['branches'] = branches
                if job_variant:
                    variants.append(job_variant)

            job_output = {
                "name": job_name,
            }
            if desc:
                job_output["description"] = desc
            if variants:
                job_output["variants"] = variants
            if tags:
                job_output["tags"] = list(tags)
            output.append(job_output)
        job.sendWorkComplete(json.dumps(output))

    def handle_project_get(self, gear_job):
        args = json.loads(gear_job.arguments)
        tenant = self.sched.abide.tenants.get(args["tenant"])
        if not tenant:
            gear_job.sendWorkComplete(json.dumps(None))
            return
        trusted, project = tenant.getProject(args["project"])
        if not project:
            gear_job.sendWorkComplete(json.dumps({}))
            return
        result = project.toDict()
        result['configs'] = []
        configs = tenant.layout.getAllProjectConfigs(project.canonical_name)
        for config_obj in configs:
            config = config_obj.toDict()
            config['pipelines'] = []
            for pipeline_name, pipeline_config in sorted(
                    config_obj.pipelines.items()):
                pipeline = pipeline_config.toDict()
                pipeline['name'] = pipeline_name
                pipeline['jobs'] = []
                for jobs in pipeline_config.job_list.jobs.values():
                    job_list = []
                    for job in jobs:
                        job_list.append(job.toDict(tenant))
                    pipeline['jobs'].append(job_list)
                config['pipelines'].append(pipeline)
            result['configs'].append(config)

        gear_job.sendWorkComplete(json.dumps(result, cls=ZuulJSONEncoder))

    def handle_project_list(self, job):
        args = json.loads(job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        if not tenant:
            job.sendWorkComplete(json.dumps(None))
            return
        output = []
        for project in tenant.config_projects:
            pobj = project.toDict()
            pobj['type'] = "config"
            output.append(pobj)
        for project in tenant.untrusted_projects:
            pobj = project.toDict()
            pobj['type'] = "untrusted"
            output.append(pobj)
        job.sendWorkComplete(
            json.dumps(sorted(output, key=lambda project: project["name"])))

    def handle_project_freeze_jobs(self, gear_job):
        args = json.loads(gear_job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        project = None
        pipeline = None
        if tenant:
            (trusted, project) = tenant.getProject(args.get("project"))
            pipeline = tenant.layout.pipelines.get(args.get("pipeline"))
        if not project or not pipeline:
            gear_job.sendWorkComplete(json.dumps(None))
            return

        change = model.Branch(project)
        change.branch = args.get("branch", "master")
        queue = model.ChangeQueue(pipeline)
        item = model.QueueItem(queue, change, None)
        item.layout = tenant.layout
        item.freezeJobGraph(skip_file_matcher=True)

        output = []

        for job in item.job_graph.getJobs():
            job.setBase(tenant.layout)
            output.append({
                'name':
                job.name,
                'dependencies':
                list(map(lambda x: x.toDict(), job.dependencies)),
            })

        gear_job.sendWorkComplete(json.dumps(output))

    def handle_allowed_labels_get(self, job):
        args = json.loads(job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        if not tenant:
            job.sendWorkComplete(json.dumps(None))
            return
        ret = {}
        ret['allowed_labels'] = tenant.allowed_labels or []
        ret['disallowed_labels'] = tenant.disallowed_labels or []
        job.sendWorkComplete(json.dumps(ret))

    def handle_pipeline_list(self, job):
        args = json.loads(job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        if not tenant:
            job.sendWorkComplete(json.dumps(None))
            return
        output = []
        for pipeline, pipeline_config in tenant.layout.pipelines.items():
            triggers = []
            for trigger in pipeline_config.triggers:
                if isinstance(trigger.connection, BaseConnection):
                    name = trigger.connection.connection_name
                else:
                    # Trigger not based on a connection doesn't use this attr
                    name = trigger.name
                triggers.append({
                    "name": name,
                    "driver": trigger.driver.name,
                })
            output.append({"name": pipeline, "triggers": triggers})
        job.sendWorkComplete(json.dumps(output))

    def handle_key_get(self, job):
        args = json.loads(job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        project = None
        if tenant:
            (trusted, project) = tenant.getProject(args.get("project"))
        if not project:
            job.sendWorkComplete("")
            return
        keytype = args.get('key', 'secrets')
        if keytype == 'secrets':
            job.sendWorkComplete(
                encryption.serialize_rsa_public_key(
                    project.public_secrets_key))
        elif keytype == 'ssh':
            job.sendWorkComplete(project.public_ssh_key)
        else:
            job.sendWorkComplete("")
            return

    def handle_config_errors_list(self, job):
        args = json.loads(job.arguments)
        tenant = self.sched.abide.tenants.get(args.get("tenant"))
        output = []
        if not tenant:
            job.sendWorkComplete(json.dumps(None))
            return
        for err in tenant.layout.loading_errors.errors:
            output.append({
                'source_context': err.key.context.toDict(),
                'error': err.error
            })
        job.sendWorkComplete(json.dumps(output))

    def handle_connection_list(self, job):
        output = []
        for source in self.sched.connections.getSources():
            output.append(source.connection.toDict())
        job.sendWorkComplete(json.dumps(output))
Example #9
0
class MergeServer(object):
    log = logging.getLogger("zuul.MergeServer")

    def __init__(self, config, connections={}):
        self.config = config

        merge_root = get_default(self.config, 'merger', 'git_dir',
                                 '/var/lib/zuul/merger-git')
        merge_email = get_default(self.config, 'merger', 'git_user_email')
        merge_name = get_default(self.config, 'merger', 'git_user_name')
        speed_limit = get_default(
            config, 'merger', 'git_http_low_speed_limit', '1000')
        speed_time = get_default(
            config, 'merger', 'git_http_low_speed_time', '30')
        git_timeout = get_default(config, 'merger', 'git_timeout', 300)
        self.merger = merger.Merger(
            merge_root, connections, merge_email, merge_name, speed_limit,
            speed_time, git_timeout=git_timeout)
        self.command_map = dict(
            stop=self.stop)
        command_socket = get_default(
            self.config, 'merger', 'command_socket',
            '/var/lib/zuul/merger.socket')
        self.command_socket = commandsocket.CommandSocket(command_socket)

        self.jobs = {
            'merger:merge': self.merge,
            'merger:cat': self.cat,
            'merger:refstate': self.refstate,
            'merger:fileschanges': self.fileschanges,
        }
        self.gearworker = ZuulGearWorker(
            'Zuul Merger',
            'zuul.MergeServer',
            'merger-gearman-worker',
            self.config,
            self.jobs)

    def start(self):
        self.log.debug("Starting worker")
        self.gearworker.start()

        self._command_running = True
        self.log.debug("Starting command processor")
        self.command_socket.start()
        self.command_thread = threading.Thread(
            target=self.runCommand, name='command')
        self.command_thread.daemon = True
        self.command_thread.start()

    def stop(self):
        self.log.debug("Stopping")
        self.gearworker.stop()
        self._command_running = False
        self.command_socket.stop()
        self.log.debug("Stopped")

    def join(self):
        self.gearworker.join()

    def runCommand(self):
        while self._command_running:
            try:
                command = self.command_socket.get().decode('utf8')
                if command != '_stop':
                    self.command_map[command]()
            except Exception:
                self.log.exception("Exception while processing command")

    def merge(self, job):
        self.log.debug("Got merge job: %s" % job.unique)
        args = json.loads(job.arguments)
        zuul_event_id = args.get('zuul_event_id')
        ret = self.merger.mergeChanges(
            args['items'], args.get('files'),
            args.get('dirs'), args.get('repo_state'),
            branches=args.get('branches'),
            zuul_event_id=zuul_event_id)
        result = dict(merged=(ret is not None))
        if ret is None:
            result['commit'] = result['files'] = result['repo_state'] = None
        else:
            (result['commit'], result['files'], result['repo_state'],
             recent, orig_commit) = ret
        result['zuul_event_id'] = zuul_event_id
        job.sendWorkComplete(json.dumps(result))

    def refstate(self, job):
        self.log.debug("Got refstate job: %s" % job.unique)
        args = json.loads(job.arguments)
        zuul_event_id = args.get('zuul_event_id')
        success, repo_state = self.merger.getRepoState(
            args['items'], branches=args.get('branches'))
        result = dict(updated=success,
                      repo_state=repo_state)
        result['zuul_event_id'] = zuul_event_id
        job.sendWorkComplete(json.dumps(result))

    def cat(self, job):
        self.log.debug("Got cat job: %s" % job.unique)
        args = json.loads(job.arguments)
        self.merger.updateRepo(args['connection'], args['project'])
        files = self.merger.getFiles(args['connection'], args['project'],
                                     args['branch'], args['files'],
                                     args.get('dirs'))
        result = dict(updated=True,
                      files=files)
        job.sendWorkComplete(json.dumps(result))

    def fileschanges(self, job):
        self.log.debug("Got fileschanges job: %s" % job.unique)
        args = json.loads(job.arguments)
        zuul_event_id = args.get('zuul_event_id')
        self.merger.updateRepo(args['connection'], args['project'],
                               zuul_event_id=zuul_event_id)
        files = self.merger.getFilesChanges(
            args['connection'], args['project'], args['branch'], args['tosha'],
            zuul_event_id=zuul_event_id)
        result = dict(updated=True,
                      files=files)
        result['zuul_event_id'] = zuul_event_id
        job.sendWorkComplete(json.dumps(result))
Example #10
0
class BaseMergeServer(metaclass=ABCMeta):
    log = logging.getLogger("zuul.BaseMergeServer")

    _repo_locks_class = BaseRepoLocks

    def __init__(self, config, component, connections=None):
        self.connections = connections or {}
        self.merge_email = get_default(config, 'merger', 'git_user_email',
                                       '*****@*****.**')
        self.merge_name = get_default(config, 'merger', 'git_user_name',
                                      'Zuul Merger Default')
        self.merge_speed_limit = get_default(
            config, 'merger', 'git_http_low_speed_limit', '1000')
        self.merge_speed_time = get_default(
            config, 'merger', 'git_http_low_speed_time', '30')
        self.git_timeout = get_default(config, 'merger', 'git_timeout', 300)

        self.merge_root = get_default(config, component, 'git_dir',
                                      '/var/lib/zuul/{}-git'.format(component))

        # This merger and its git repos are used to maintain
        # up-to-date copies of all the repos that are used by jobs, as
        # well as to support the merger:cat functon to supply
        # configuration information to Zuul when it starts.
        self.merger = self._getMerger(self.merge_root, None)

        self.config = config

        # Repo locking is needed on the executor
        self.repo_locks = self._repo_locks_class()

        self.merger_jobs = {
            'merger:merge': self.merge,
            'merger:cat': self.cat,
            'merger:refstate': self.refstate,
            'merger:fileschanges': self.fileschanges,
        }
        self.merger_gearworker = ZuulGearWorker(
            'Zuul Merger',
            'zuul.BaseMergeServer',
            'merger-gearman-worker',
            self.config,
            self.merger_jobs)

    def _getMerger(self, root, cache_root, logger=None):
        return merger.Merger(
            root, self.connections, self.merge_email, self.merge_name,
            self.merge_speed_limit, self.merge_speed_time, cache_root, logger,
            execution_context=True, git_timeout=self.git_timeout)

    def _repoLock(self, connection_name, project_name):
        # The merger does not need locking so return a null lock.
        return nullcontext()

    def _update(self, connection_name, project_name, zuul_event_id=None):
        self.merger.updateRepo(connection_name, project_name,
                               zuul_event_id=zuul_event_id)

    def start(self):
        self.log.debug('Starting merger worker')
        self.merger_gearworker.start()

    def stop(self):
        self.log.debug('Stopping merger worker')
        self.merger_gearworker.stop()

    def join(self):
        self.merger_gearworker.join()

    def pause(self):
        self.log.debug('Pausing merger worker')
        self.merger_gearworker.unregister()

    def unpause(self):
        self.log.debug('Resuming merger worker')
        self.merger_gearworker.register()

    def cat(self, job):
        self.log.debug("Got cat job: %s" % job.unique)
        args = json.loads(job.arguments)

        connection_name = args['connection']
        project_name = args['project']
        self._update(connection_name, project_name)

        lock = self.repo_locks.getRepoLock(connection_name, project_name)
        try:
            self._update(connection_name, project_name)
            with lock:
                files = self.merger.getFiles(connection_name, project_name,
                                             args['branch'], args['files'],
                                             args.get('dirs'))
        except Exception:
            result = dict(update=False)
        else:
            result = dict(updated=True, files=files)

        job.sendWorkComplete(json.dumps(result))

    def merge(self, job):
        self.log.debug("Got merge job: %s" % job.unique)
        args = json.loads(job.arguments)
        zuul_event_id = args.get('zuul_event_id')

        ret = self.merger.mergeChanges(
            args['items'], args.get('files'),
            args.get('dirs', []),
            args.get('repo_state'),
            branches=args.get('branches'),
            repo_locks=self.repo_locks,
            zuul_event_id=zuul_event_id)

        result = dict(merged=(ret is not None))
        if ret is None:
            result['commit'] = result['files'] = result['repo_state'] = None
        else:
            (result['commit'], result['files'], result['repo_state'],
             recent, orig_commit) = ret
        result['zuul_event_id'] = zuul_event_id
        job.sendWorkComplete(json.dumps(result))

    def refstate(self, job):
        self.log.debug("Got refstate job: %s" % job.unique)
        args = json.loads(job.arguments)
        zuul_event_id = args.get('zuul_event_id')
        success, repo_state, item_in_branches = \
            self.merger.getRepoState(
                args['items'], branches=args.get('branches'),
                repo_locks=self.repo_locks)
        result = dict(updated=success,
                      repo_state=repo_state,
                      item_in_branches=item_in_branches)
        result['zuul_event_id'] = zuul_event_id
        job.sendWorkComplete(json.dumps(result))

    def fileschanges(self, job):
        self.log.debug("Got fileschanges job: %s" % job.unique)
        args = json.loads(job.arguments)
        zuul_event_id = args.get('zuul_event_id')

        connection_name = args['connection']
        project_name = args['project']
        self._update(connection_name, project_name,
                     zuul_event_id=zuul_event_id)

        lock = self.repo_locks.getRepoLock(connection_name, project_name)
        try:
            self._update(connection_name, project_name,
                         zuul_event_id=zuul_event_id)
            with lock:
                files = self.merger.getFilesChanges(
                    connection_name, project_name,
                    args['branch'], args['tosha'],
                    zuul_event_id=zuul_event_id)
        except Exception:
            result = dict(update=False)
        else:
            result = dict(updated=True, files=files)

        result['zuul_event_id'] = zuul_event_id
        job.sendWorkComplete(json.dumps(result))