Exemple #1
0
    def handle(self, *args, **options):
       
        worker_pools = options.get('queue')
        sleep_time = options.get('sleep')
        force = options.get('force')

        LOG.info("started...")

        self.plugin_loader = PluginLoader()
        self.planner_plugins = self.plugin_loader.get_autoscaling_planner_plugins()
        self.executor_plugins = self.plugin_loader.get_autoscaling_executor_plugins()

        while True:

            for pool in worker_pools:
                worker_pool = self.get_worker_pool(pool)
                if worker_pool is None:
                    continue
                planner = self.planner_plugins[worker_pool.planner]
                executor = self.executor_plugins[worker_pool.executor]
                self.handle_pool(worker_pool, planner, executor, force)
                time.sleep(1)

            if force:
                break
            else:
                time.sleep(sleep)

        LOG.info("exited...")
Exemple #2
0
 def __init__(self, builder, build):
     self.builder = builder
     self.build = build
     self.project = self.build.project
     self.working_dir = self.build.working_dir
     self.isolation = self.project.worker_pool.isolation_method
     self.plugin_loader = PluginLoader()
     self.provider = self.get_provider()
Exemple #3
0
 def __init__(self, builder, build):
     self.builder = builder
     self.build = build
     self.project = self.build.project
     self.repo = self.project.repo_url
     self.scm_type = self.project.scm_type
     
     self.plugin_loader = PluginLoader()
     self.provider = self.get_provider()
Exemple #4
0
    def __init__(self, builder, build):
        """
        Constructor takes a build reference.
        """
        self.builder = builder
        self.build = build

        self.plugin_loader = PluginLoader()
        self.pre_trigger_plugins = self.plugin_loader.get_pre_trigger_plugins()
        self.success_trigger_plugins = self.plugin_loader.get_success_trigger_plugins(
        )
        self.failure_trigger_plugins = self.plugin_loader.get_failure_trigger_plugins(
        )
Exemple #5
0
class SecretsManager(object):

    HEADER = "[VESPENE-CLOAK]"

    def __init__(self):
        self.plugin_loader = PluginLoader()
        self.plugins = self.plugin_loader.get_secrets_plugins()

    def is_cloaked(self, msg):
        if not msg:
            return False
        return msg.startswith(self.HEADER)

    def decloak(self, msg):
        if not self.is_cloaked(msg):
            return msg
        else:
            for plugin in self.plugins:
                if plugin.recognizes(msg):
                    return plugin.decloak(msg)
            raise Exception("no plugin found to decloak value")

    def cloak(self, msg):
        if not msg or self.is_cloaked(msg) or len(self.plugins) == 0:
            return msg
        return self.plugins[0].cloak(msg)
Exemple #6
0
class PermissionsManager(object):
    def __init__(self):
        self.plugin_loader = PluginLoader()
        self.plugins = self.plugin_loader.get_authorization_plugins()

    def _all_true(self, method, subject, request, *args, **kwargs):
        for plugin in self.plugins:
            fn = getattr(plugin, method)
            if not fn(subject, request, *args, **kwargs):
                return False
        return True

    def filter_queryset_for_list(self, qs, request, *args, **kwargs):
        result_qs = qs
        for plugin in self.plugins:
            result_qs = plugin.filter_queryset_for_list(
                result_qs, request, *args, **kwargs)
        return result_qs

    def filter_queryset_for_view(self, qs, request, *args, **kwargs):
        result_qs = qs
        for plugin in self.plugins:
            result_qs = plugin.filter_queryset_for_view(
                result_qs, request, *args, **kwargs)
        return result_qs

    def filter_queryset_for_delete(self, qs, request, *args, **kwargs):
        result_qs = qs
        for plugin in self.plugins:
            result_qs = plugin.filter_queryset_for_delete(
                result_qs, request, *args, **kwargs)
        return result_qs

    def filter_queryset_for_edit(self, qs, request, *args, **kwargs):
        result_qs = qs
        for plugin in self.plugins:
            result_qs = plugin.filter_queryset_for_edit(
                result_qs, request, *args, **kwargs)
        return result_qs

    def check_can_view(self, obj, request, *args, **kwargs):
        return self._all_true('check_can_view', obj, request, *args, **kwargs)

    def check_can_edit(self, obj, request, *args, **kwargs):
        return self._all_true('check_can_edit', obj, request, *args, **kwargs)

    def check_can_delete(self, obj, request, *args, **kwargs):
        return self._all_true('check_can_delete', obj, request, *args,
                              **kwargs)

    def check_can_create(self, cls, request, *args, **kwargs):
        return self._all_true('check_can_create', cls, request, *args,
                              **kwargs)

    def check_can_start(self, obj, request, *args, **kwargs):
        return self._all_true('check_can_start', obj, request, *args, **kwargs)

    def check_can_stop(self, obj, request, *args, **kwargs):
        return self._all_true('check_can_view', obj, request, *args, **kwargs)
Exemple #7
0
class OutputManager(object):
    def __init__(self):
        self.plugin_loader = PluginLoader()
        self.plugins = self.plugin_loader.get_output_plugins()

    def get_msg(self, build, msg):
        for p in self.plugins:
            msg = p.filter(build, msg)
        return msg
Exemple #8
0
class VariableManager(object):
    def __init__(self, project):
        self.project = project
        self.plugin_loader = PluginLoader()

    def compute(self):
        results = dict()
        variable_plugins = self.plugin_loader.get_variable_plugins()
        for plugin in variable_plugins:
            computed = plugin.compute(self.project, results)
            assert computed is not None
            results.update(computed)
        return results
Exemple #9
0
class ScmManager(object):
    def __init__(self, builder, build):
        self.builder = builder
        self.build = build
        self.project = self.build.project
        self.repo = self.project.repo_url
        self.scm_type = self.project.scm_type

        self.plugin_loader = PluginLoader()
        self.provider = self.get_provider()

    # -------------------------------------------------------------------------

    def get_provider(self):
        """
        Return the management object for the given repo type.
        """
        plugins = self.plugin_loader.get_scm_plugins()
        plugin = plugins.get(self.scm_type)
        if plugin is None:
            raise Exception(
                "no scm plugin configurated for project scm type: %s" %
                self.scm_type)
        plugin.setup(self.build)
        return plugin

    # -------------------------------------------------------------------------

    def checkout(self):
        """
        Perform a checkout in the already configured build dir
        """
        self.provider.checkout()

    # -------------------------------------------------------------------------

    def get_revision(self):
        """
        Find out what the source control revision is.
        """
        return self.provider.get_revision()

    # -------------------------------------------------------------------------

    def get_last_commit_user(self):
        """
        Find out what user made the last commit on this branch
        """
        return self.provider.get_last_commit_user()
Exemple #10
0
class IsolationManager(object):
    def __init__(self, builder, build):
        self.builder = builder
        self.build = build
        self.project = self.build.project
        self.working_dir = self.build.working_dir
        self.isolation = self.project.worker_pool.isolation_method
        self.plugin_loader = PluginLoader()
        self.provider = self.get_provider()

    # -------------------------------------------------------------------------

    def get_provider(self):
        """
        Return the management object for the given repo type.
        """
        plugins = self.plugin_loader.get_isolation_plugins()
        plugin = plugins.get(self.isolation)
        if plugin is None:
            raise Exception(
                "no isolation plugin configurated for worker pool isolation type: %s"
                % self.isolation)
        plugin.setup(self.build)
        return plugin

    # -------------------------------------------------------------------------

    def begin(self):
        """
        Begin isolation (chroot, container, sudo,  etc)
        """
        self.provider.begin()

    # -------------------------------------------------------------------------

    def execute(self):
        """
        Code that launches the build
        """
        return self.provider.execute()

    # -------------------------------------------------------------------------

    def end(self):
        """
        End isolation
        """
        return self.provider.end()
Exemple #11
0
class SecretsManager(object):

    def __init__(self):
        self.plugin_loader = PluginLoader()
        self.plugins = self.plugin_loader.get_secrets_plugins()

    def cloak(self, msg):
        if len(self.plugins) == 0:
            return msg
        if not self.is_cloaked(msg):
            return self.plugins[0].cloak(msg)
        else:
            # already cloaked, will not re-cloak
            return msg

    def is_cloaked(self, msg):
        return msg.startswith(HEADER)

    def decloak(self, msg):
        remainder = msg.replace(HEADER, "", 1)
        for plugin in self.plugins:
            if plugin.recognizes(remainder):
                return plugin.decloak(remainder)
        return remainder
Exemple #12
0
class Command(BaseCommand):
    help = 'Runs autoscaling logic for one or more configured worker pools'

    def add_arguments(self, parser):
        parser.add_argument('--queue', action='append', type=str, help='name of the queue, use \'general\' for the unassigned queue')
        parser.add_argument('--sleep', type=int, help='how long to sleep between checks (in seconds)', default=20)
        parser.add_argument('--force', action='store_true', help='ignore timers and run the detector, then exit')

    def get_worker_pool(self, pool_name):
        
        worker_pools = WorkerPool.objects.filter(name=pool_name)
        if not worker_pools.exists():
            LOG.info("worker pool does not exist: %s" % pool_name)
            return None
        return worker_pools.first()

    def handle_pool(self, worker_pool, planner, executor, force):

        if worker_pool is None:
            LOG.warning("there is no worker pool named %s yet" % worker_pool)
            # probably a provisioning order issue, this will degrade performance but should not be fatal
            # just avoid hammering the system until it exists
            time.sleep(60)
            return

        if not worker_pool.autoscaling_enabled:
            return

        now = datetime.now(tz=timezone.utc)
        autoscale_status = 0
        last_autoscaled = worker_pool.last_autoscaled

        try:
            if not (force or planner.is_time_to_adjust(worker_pool)):
                return

            parameters = planner.get_parameters(worker_pool)
            LOG.debug("autoscaling parameters: %s for %s" % (parameters, worker_pool.name))
            
            result = executor.scale_worker_pool(worker_pool, parameters)
            
            LOG.info("autoscaling success for %s" % worker_pool.name)
            last_autoscaled = datetime.now(tz=timezone.utc)

        except subprocess.CalledProcessError as cpe:
            
            LOG.error("autoscaling failed, return code: %s" % cpe.returncode)
            autoscale_status = cpe.returncode

        except:
            
            traceback.print_exc()
            LOG.error("autoscaling failed for %s" % worker_pool.name)
            autoscale_status = 1

        finally:
            WorkerPool.objects.filter(
                pk=worker_pool.pk
            ).update(last_autoscaled=last_autoscaled, autoscaling_status=autoscale_status)

    def handle(self, *args, **options):
       
        worker_pools = options.get('queue')
        sleep_time = options.get('sleep')
        force = options.get('force')

        LOG.info("started...")

        self.plugin_loader = PluginLoader()
        self.planner_plugins = self.plugin_loader.get_autoscaling_planner_plugins()
        self.executor_plugins = self.plugin_loader.get_autoscaling_executor_plugins()

        while True:

            for pool in worker_pools:
                worker_pool = self.get_worker_pool(pool)
                if worker_pool is None:
                    continue
                planner = self.planner_plugins[worker_pool.planner]
                executor = self.executor_plugins[worker_pool.executor]
                self.handle_pool(worker_pool, planner, executor, force)
                time.sleep(1)

            if force:
                break
            else:
                time.sleep(sleep)

        LOG.info("exited...")
Exemple #13
0
 def __init__(self, project):
     self.project = project
     self.plugin_loader = PluginLoader()
Exemple #14
0
 def __init__(self):
     self.plugin_loader = PluginLoader()
     self.plugins = self.plugin_loader.get_authorization_plugins()
Exemple #15
0
from vespene.models.snippet import Snippet
from vespene.models.variable_set import VariableSet
from vespene.models.worker import Worker
from vespene.models.worker_pool import WorkerPool
from vespene.models.organization import Organization
from vespene.common.plugin_loader import PluginLoader
from vespene.manager import Shared

from django import forms
from django.contrib.auth.models import User, Group
from django.db.models import Q
from crispy_forms.bootstrap import TabHolder, Tab
from crispy_forms.layout import Layout
from crispy_forms.helper import FormHelper

PLUGIN_LOADER = PluginLoader()
ISOLATION_CHOICES = PLUGIN_LOADER.get_isolation_choices()
SCM_CHOICES = PLUGIN_LOADER.get_scm_choices()
ORGANIZATION_CHOICES = [['github', 'github']]
PLANNER_CHOICES = PLUGIN_LOADER.get_autoscaling_planner_choices()
EXECUTOR_CHOICES = PLUGIN_LOADER.get_autoscaling_executor_choices()


class BaseForm(forms.ModelForm):
    def __init__(self, *args, **kwargs):
        super(BaseForm, self).__init__(*args, **kwargs)

    def make_read_only(self):
        template_names = [
            'django/forms/widgets/text.html',
            'django/forms/widgets/textarea.html'
Exemple #16
0
from vespene.models.snippet import Snippet
from vespene.models.variable_set import VariableSet
from vespene.models.worker import Worker
from vespene.models.worker_pool import WorkerPool
from vespene.models.organization import Organization
from vespene.common.plugin_loader import PluginLoader
from vespene.manager import Shared

from django import forms
from django.contrib.auth.models import User, Group
from django.db.models import Q
from crispy_forms.bootstrap import TabHolder, Tab
from crispy_forms.layout import Layout
from crispy_forms.helper import FormHelper

PLUGIN_LOADER = PluginLoader()
ISOLATION_CHOICES = PLUGIN_LOADER.get_isolation_choices()
SCM_CHOICES = PLUGIN_LOADER.get_scm_choices()
ORGANIZATION_CHOICES = [['github', 'github']]


class BaseForm(forms.ModelForm):
    def __init__(self, *args, **kwargs):
        super(BaseForm, self).__init__(*args, **kwargs)

    def make_read_only(self):
        for x in self.Meta.fields:
            self.fields[x].widget.attrs['disabled'] = True
        return self

Exemple #17
0
class TriggerManager(object):
    """
    Trigger manager handles activation of pre and post build
    triggers.
    """

    # -----------------------------------------------------

    def __init__(self, builder, build):
        """
        Constructor takes a build reference.
        """
        self.builder = builder
        self.build = build

        self.plugin_loader = PluginLoader()
        self.pre_trigger_plugins = self.plugin_loader.get_pre_trigger_plugins()
        self.success_trigger_plugins = self.plugin_loader.get_success_trigger_plugins(
        )
        self.failure_trigger_plugins = self.plugin_loader.get_failure_trigger_plugins(
        )

    # -----------------------------------------------------

    def run_all_pre(self):
        """
        Run all pre hooks - which can be scripts
        that simply take command line flags or recieve
        more detail on standard input. See docs for details.
        """
        self.build.append_message("----------\nPre hooks...")
        context = self.pre_context()
        for plugin in self.pre_trigger_plugins:
            plugin.execute_hook(self.build, context)

    # -----------------------------------------------------

    def run_all_post(self):
        """
        Similar to post hooks, pre hooks can be set to run
        only on success or failure.
        """
        self.build.append_message("----------\nPost hooks...")
        context = self.post_context()

        if self.build.status == SUCCESS:
            for plugin in self.success_trigger_plugins:
                plugin.execute_hook(self.build, context)

        else:
            for plugin in self.failure_trigger_plugins:
                plugin.execute_hook(self.build, context)

    # -----------------------------------------------------

    def pre_context(self):
        """
        This dictionary is passed as JSON on standard
        input to pre hooks.
        """
        return dict(hook='pre', build=self.build, project=self.build.project)

    # -----------------------------------------------------

    def post_context(self):
        """
        This dictionary is passed as JSON on standard
        input to post hooks.
        """
        return dict(hook='post', build=self.build, project=self.build.project)
Exemple #18
0
 def __init__(self):
     self.plugin_loader = PluginLoader()
     self.plugins = self.plugin_loader.get_output_plugins()
Exemple #19
0
class ImportManager(object):
    def __init__(self, organization):
        self.organization = organization
        self.plugin_loader = PluginLoader()
        self.provider = self.get_provider()
        self.now = datetime.now(tz=timezone.utc)

    # -------------------------------------------------------------------------

    def get_provider(self):
        """
        Return the management object for the given repo type.
        """
        plugins = self.plugin_loader.get_organization_plugins()
        org_type = self.organization.organization_type
        plugin = plugins.get(org_type)
        if plugin is None:
            raise Exception("no organization plugin found: %s" % org_type)
        return plugin

    # -------------------------------------------------------------------------

    def needs_import(self):
        """
        An organization needs import if it is turned on, is configured to use THIS
        worker pool, and hasn't been imported too recently. The worker pool
        filtering was done in daemon.py already.
        """
        interval = self.organization.refresh_minutes

        if self.organization.active_build:
            return False
        if self.organization.last_build is None:
            return True
        if self.organization.force_rescan:
            return True

        end_time = self.organization.last_build.end_time
        delta = self.now - end_time
        minutes = delta.total_seconds() / 60.0
        return (minutes > self.organization.refresh_minutes)

    def make_working_dir(self, build):
        # FIXME: this is copied from builder.py and really should be moved into common code in workers/common.py
        # or something similar.
        #
        # TODO: this isn't cross platform yet but is seemingly more reliable than 'os' functions on OS X
        # will need to detect the platform and make the appropriate changes for Windows.
        path = os.path.join(settings.BUILD_ROOT, str(build.id))
        commands.execute_command(build, "mkdir -p %s" % path)
        commands.execute_command(build, "chmod 770 %s" % path)
        build.working_dir = path
        build.save()
        return path

    def make_stub_build(self):
        """
        Make a new empty build object to track the progress.
        We'll be logging some basic things to this object.
        It represents the import, not the projects about to be created
        or modified.
        """
        build = Build(project=None,
                      organization=self.organization,
                      status=RUNNING,
                      worker_pool=self.organization.default_worker_pool,
                      start_time=self.now)
        build.save()
        return build

    def finalize_build(self, build, failures):
        """
        Flag the build is done and update the organization
        to reference this.
        """
        now = datetime.now(tz=timezone.utc)

        if failures > 0:
            build.status = FAILURE
        else:
            self.organization.last_successful_build = build
            build.status = SUCCESS
        build.return_code = failures
        build.end_time = now
        build.save()
        self.organization.active_build = None
        self.organization.last_build = build
        self.organization.force_rescan = False
        self.organization.save()

    def find_all_repos(self, build):
        return self.provider.find_all_repos(self.organization, build)

    def clone_repo(self, build, repo, count):
        return self.provider.clone_repo(self.organization, build, repo, count)

    def read_vespene_file(self, build, path):
        """
        Return the data that would be in a .vespene, if it so exists
        """
        path = os.path.join(path, ".vespene")
        if os.path.exists(path):
            fh = open(path)
            data = fh.read()
            fh.close()
            return yaml.safe_load(data)
        return None

    def get_defaults(self, build, repo):
        """
        There is only a very minimal set of .vespene info for projects that don't
        specify anything. There's good reason for this - we want to use the ORM
        defaults.  This makes some fields requierd because the ORM will choke
        if they are not set - like the worker_pool!
        """
        repo_ending = repo.split("/")[-1]
        defaults = dict(name=repo_ending, script="", webhook_enabled=True)
        return defaults

    def find_project(self, build, repo):
        """
        See if we can find the project based on the repository address. The
        project name specified in the .vespene file is given ZERO authority to prevent
        manipulation of other repo configurations.
        """
        projects = Project.objects.filter(repo_url=repo)
        if projects.exists():
            return projects.first()
        else:
            return None

    def adjust_database_config(self, build, project, repo, config, path):
        """
        We have determined what the .vespene file says, mixed
        in with any defaults. Now look at the policy of the organization
        and decide what attributes we can set, and manipulate
        the ORM model to match.
        """

        project_name = config['name']

        if project is None:

            qs = Project.objects.filter(name=project_name).exclude(
                repo_url=repo)
            if qs.exists():

                # ok so we found a project matching the repo, but the name is wrong.  Attempting to set the name *WILL* fail due to DB constraints
                # while we could generate a name, it would add database clutter and this corner case shouldn't be too common
                # so just abort the import process
                self.log(
                    build,
                    "another project already exists with the name chosen but a different repo address, please rename them to match or remove the name the .vespene file"
                )
                return

            project = Project(
                repo_url=repo,
                name=project_name,
                scm_type='git',
                worker_pool=self.organization.default_worker_pool)

        org = self.organization

        if project.scm_login is None:
            project.scm_login = self.organization.scm_login

        # -----------------------------------------------
        # various helper functions to allow loading from the .vespene
        # config structure and then saving results on the ORM models

        def attr_manage(key):
            value = config.get(key, None)
            if value is not None:
                setattr(project, key, value)

        def m2m_manage(attribute, model):
            relation = getattr(project, attribute)
            original_names = [x.name for x in relation.all()]
            set_names = config.get(attribute, None)
            if set_names is None:
                return

            for name in set_names:
                if name not in original_names:
                    obj = model.objects.filter(name=name)
                    if obj.exists():
                        relation.add(obj.first())

            for name in original_names:
                if name not in set_names:
                    obj = model.objects.filter(name=name)
                    if obj.exists():
                        relation.remove(obj.first())

        def fk_manage(attribute, model):
            if attribute not in config:
                return
            value = config.get(attribute)

            objs = model.objects.filter(name=value)
            if not objs.exists():
                self.log(
                    build,
                    "object of type %s not found: %s " % (type(model), value))
                return
            obj = objs.first()
            setattr(project, attribute, obj)

        def stage_manage(pipeline, pipeline_stage_names, stage_number):
            """
            Given a pipeline object and a list of stage names, configure
            the pipeline at a given stage slot to point to that stage, 
            creating that stage if it does not yet exist. If less than a full
            list of stages are supplied, the remaining slots will be set
            to None.
            """
            index = stage_number - 1
            if len(pipeline_stage_names) < stage_number:
                stage = None
            else:
                name = pipeline_stage_names[index]
                stages = Stage.objects.filter(name=name)
                if stages.exists():
                    stage = stages.first()
                else:
                    stage = Stage(name=name)
                    stage.save()
            attribute = "stage%s" % stage_number
            setattr(pipeline, attribute, stage)

        def pipeline_manage(pipeline_name, pipeline_stages):
            """
            Given a pipeline name and a list of stages, create the pipeline if it does
            not exist, and then assign each stage to match
            """
            pipelines = Pipeline.objects.filter(name=pipeline_name)
            pipeline = None
            if pipelines.exists():
                pipeline = pipelines.first()
            else:
                pipeline = Pipeline(name=pipeline_name, enabled=True)
                for stage_number in range(1, 7):
                    stage_manage(pipeline, pipeline_stages, stage_number)
                pipeline.save()

        # --------------------------------------------------
        # apply the config file settings, using defaults if needed

        if org.overwrite_project_name and config['name']:
            project.name = config['name']

        if org.overwrite_project_script and config['script']:
            script = config['script']
            script_path = os.path.join(path, script)
            if os.path.exists(script_path):
                fh = open(script_path)
                data = fh.read()
                fh.close()
                project.script = data
            else:
                self.log(
                    build,
                    "build script as referenced in .vespene is missing: %s" %
                    script)

        if org.allow_worker_pool_assignment:
            fk_manage('worker_pool', WorkerPool)

        attributes = [
            'timeout', 'container_base_image', 'repo_branch',
            'webhook_enabled', 'launch_questions', 'variables'
        ]
        if org.overwrite_configurations:
            for attribute in attributes:
                attr_manage(attribute)

        project.save()

        if org.allow_pipeline_definition:
            value = config.get('pipeline_definition', None)
            pipeline = config.get('pipeline', None)
            if value and pipeline:
                pipeline_manage(pipeline, value)

        if org.overwrite_configurations:
            # the permissions controls in the Organization object for these managers could be split
            # up in future versions
            fk_manage('pipeline', Pipeline)
            fk_manage('stage', Stage)
            m2m_manage('owner_groups', Group)
            m2m_manage('launch_groups', Group)

        project.save()
        return project

    # -------------------------------------------------------------------------

    def log(self, build, msg):
        build.append_message(msg)

    # -------------------------------------------------------------------------

    def import_single_repo(self, build, repo, count):

        # clone the repo into the build root and return the path
        # chosen, which will append on the build number just like
        # the other regular builds

        self.log(build, "begin repo import: %s" % repo)

        path = self.clone_repo(build, repo, count)

        # find the project by looking up the repo address
        # if not found, this will create the project and try to give it
        # a decent name based on the repo, or using the .vespene
        # file if it exists (this is sorted out later)

        project = self.find_project(build, repo)

        # see if there is a .vespene (YAML) config in the repo (consult web docs
        # on import features)

        self.log(build, "project id: %s" % project)
        config = self.read_vespene_file(build, path)

        has_dotfile = True
        if config is None:
            if not self.organization.import_without_dotfile:
                self.log(build, ".vespene file is missing, skipping")
                return
            has_dotfile = False
            config = dict()

        # have some defaults for certain parameters, but not others.
        # why? mostly we want to use the ORM defaults. Here we just enable
        # the webhook and have a default name in case it isn't set.

        defaults = self.get_defaults(build, repo)

        # the calculated config is the merging of the actual config with the defaults
        # the .vespene file will win out.
        defaults.update(config)
        config = defaults
        self.log(build, "applying config: %s" % config)

        # now we modify the project object, whether created or loaded

        project = self.adjust_database_config(build, project, repo, config,
                                              path)

        # the import of this particular repo is done, but we have more to do in the
        # organization.  Clean up the buildroot to save space, and then keep trucking

        shutil.rmtree(path)
        self.log(build, "complete\n")

    # -------------------------------------------------------------------------

    def do_import(self):
        """
        main logic of the .vespene github organization import code
        """

        # FIXME: disabled for development only!

        if not self.needs_import():
            # skip organizations that are already up to date or are disabled
            return

        # create a dummy build object so we can track the progress
        # this is a "special" build in that it tracks an organization and
        # not a project (it does MODIFY other projects, don't get confused
        # about that.. it's not buildilng any projects)

        build = self.make_stub_build()
        build.working_dir = self.make_working_dir(build)
        self.log(
            build, "begin organizational import, scratch space: %s" %
            build.working_dir)

        # find all the repos in the organization, using the organization plugin
        # for instance, vespene.plugins.organizations.github

        repos = self.find_all_repos(build)

        failures = 0
        count = 0
        for repo in repos:
            try:
                self.import_single_repo(build, repo, count)
                count = count + 1
            except DatabaseError as dbe:
                traceback.print_exc()
                self.log(build, str(dbe))
                transaction.rollback()
                failures += 1
            except Exception as e:
                build.append_output(
                    "ow: repo processing failure, moving on with the next one..."
                )
                traceback.print_exc()
                build.append_output(str(e))
                # FIXME: DEVELOPMENT ONLY!
                self.log(build, "repo import failed")
                failures += 1

        # mark the build object as completed.  Ordinarily we should flag if there
        # are any errors, but we're mostly trying to be fault tolerant.

        self.finalize_build(build, failures)

        # since the build root itself won't have anything meaningful in it, we can
        # also throw away the (should be empty) directory at this time.

        shutil.rmtree(build.working_dir)
Exemple #20
0
 def __init__(self):
     self.plugin_loader = PluginLoader()
     self.plugins = self.plugin_loader.get_secrets_plugins()
Exemple #21
0
 def __init__(self, organization):
     self.organization = organization
     self.plugin_loader = PluginLoader()
     self.provider = self.get_provider()
     self.now = datetime.now(tz=timezone.utc)