Beispiel #1
0
    def __init__(self,
                 project,
                 force=False,
                 endpoint_name='foo',
                 service_name=None):

        # custom init
        self.custom_project = project
        self.force_yes = force
        self.original_name = endpoint_name
        self.service_name = service_name

        names = []
        self.class_name = ''
        endpoint_name = endpoint_name.lstrip('/')
        for piece in endpoint_name.lower().replace(' ', '-').split('-'):
            if not piece.isalnum():
                log.exit("Only alpha-numeric chars are allowed: {}", piece)
            else:
                names.append(piece)
                self.class_name += piece.capitalize()

        self.endpoint_dir = '_'.join(names)
        self.endpoint_name = self.endpoint_dir.replace('_', '')
        self.specs_file = 'specs.yaml'

        # setting the base dir for all scaffold things inside the project
        self.backend_dir = Path(PROJECT_DIR, self.custom_project, BACKEND_DIR)

        self.base_backend_dir = Path(SUBMODULES_DIR, BACKEND_DIR,
                                     BACKEND_PACKAGE)
Beispiel #2
0
def clone(online_url,
          path,
          branch='master',
          do=False,
          check=True,
          expand_path=True):

    if expand_path:
        local_path = os.path.join(os.curdir, SUBMODULES_DIR, path)
    else:
        local_path = path
    local_path_exists = os.path.exists(local_path)

    if local_path_exists:
        log.debug("Path {} already exists", local_path)
        gitobj = Repo(local_path)
    elif do:
        gitobj = Repo.clone_from(url=online_url, to_path=local_path)
        log.info("Cloned repo {}@{} as {}", online_url, branch, path)
    else:
        log.exit("Repo {} missing as {}. You should init your project".format(
            online_url, local_path))

    if do:
        switch_branch(gitobj, branch)

    if check:
        compare_repository(gitobj, branch, online_url=online_url, path=path)

    return gitobj
Beispiel #3
0
 def exec_command(self,
                  service,
                  user=None,
                  command=None,
                  disable_tty=False,
                  nofailure=False):
     """
         Execute a command on a running container
     """
     shell_command, shell_args = self.split_command(command)
     options = {
         'SERVICE': service,
         'COMMAND': shell_command,
         'ARGS': shell_args,
         '--index': '1',
         '--user': user,
         '-T': disable_tty,
         '--env': None,
         '--workdir': None,
         # '-d': False,
         '--detach': False,
         '--privileged': False,
     }
     if shell_command is not None:
         log.debug("Command: {}({}+{})", service.lower(), shell_command,
                   shell_args)
     try:
         out = self.command('exec_command', options, nofailure=nofailure)
     except NoSuchService:
         if nofailure:
             raise AttributeError("Cannot find service: {}".format(service))
         else:
             log.exit("Cannot find a running container called {}", service)
     else:
         return out
Beispiel #4
0
 def config(self):
     try:
         compose_output_tuple = get_config_from_options('.', self.options)
         # NOTE: for compatibility with docker-compose > 1.13
         # services is always the second element
         services_list = compose_output_tuple[1]
     except conferrors.ConfigurationError as e:
         log.exit("Wrong compose configuration:\n{}", e)
     else:
         return services_list
Beispiel #5
0
def fetch(path, gitobj, fetch_remote='origin'):

    for remote in gitobj.remotes:
        if remote.name != fetch_remote:
            log.verbose("Skipping fetch of remote {} on {}", remote, path)
            continue
        log.verbose("Fetching {} on {}", remote, path)
        try:
            remote.fetch()
        except GitCommandError as e:
            log.exit(str(e))
Beispiel #6
0
    def info(self):

        infos = '\n'
        base_endpoint = False
        endpoint = self.endpoint_name

        # look inside extended swagger definition
        backend = self.backend_dir
        needle = self.find_swagger(endpoint, backend)

        # or look inside base swagger definition of rapydo
        if needle is None:
            backend = self.base_backend_dir
            needle = self.find_swagger(endpoint, backend)
            base_endpoint = True
            python_file_dir = Path(backend, 'resources')
        else:
            python_file_dir = Path(backend, ENDPOINTS_CODE_DIR)

        if needle is None:
            log.exit('No endpoint "{}" found in current swagger definition',
                     endpoint)

        current_dir = Path.cwd()

        uri = Path(needle.get('baseuri', '/api'), endpoint)
        infos += 'Endpoint path:\t{}\n'.format(uri)

        swagger_dir = Path(current_dir, backend, SWAGGER_DIR,
                           needle.get('swagger'))
        infos += 'Swagger path:\t{}/\n'.format(swagger_dir)

        infos += 'Labels:\t\t{}\n'.format(", ".join(needle.get('labels')))

        python_file_path = Path(current_dir, python_file_dir,
                                needle.get('file') + '.py')
        infos += 'Python file:\t{}\n'.format(python_file_path)

        python_class = needle.get('class')
        infos += 'Python class:\t{}\n'.format(python_class)

        log.info("Informations about '{}':\n{}", endpoint, infos)

        if base_endpoint:
            log.warning(
                "This is a BASE endpoint of the RAPyDo framework.\n" +
                "Do not modify it unless your are not a RAPyDo developer.")

        with open(str(python_file_path)) as fh:
            content = fh.read()
            clstest = 'class {}('.format(python_class)
            if clstest not in content:
                log.critical("Class '{}' definition not found in python file",
                             python_class)
Beispiel #7
0
def check_file_younger_than(gitobj, filename, timestamp):

    try:
        commits = gitobj.blame(rev='HEAD', file=filename)
    except GitCommandError as e:
        log.exit("Failed 'blame' operation on {}.\n{}", filename, e)
    dates = []
    for commit in commits:
        current_blame = gitobj.commit(rev=str(commit[0]))
        dates.append(current_blame.committed_datetime)

    m = max(dates)
    return timestamp_from_string(timestamp) < m, timestamp, m
Beispiel #8
0
def compare_repository(gitobj,
                       branch,
                       online_url,
                       check_only=False,
                       path=None):

    # origin = gitobj.remote()
    # url = list(origin.urls).pop(0)
    url = gitobj.remotes.origin.url

    if online_url != url:

        local_url = urlparse(url)
        expected_url = urlparse(online_url)

        # Remove username in the URL, if any
        # i.e. [email protected] became github.com
        local_netloc = local_url.netloc.split("@").pop()
        expected_netloc = local_url.netloc.split("@").pop()

        if local_url.scheme != expected_url.scheme:
            url_match = False
        elif local_netloc != expected_netloc:
            url_match = False
        elif local_url.path != expected_url.path:
            url_match = False
        else:
            url_match = True

        if not url_match:
            if check_only:
                return False
            log.exit("""Unmatched local remote
Found: {}\nExpected: {}
Suggestion: remove {} and execute the init command
            """.format(url, online_url, gitobj.working_dir))

    if branch is None:
        # No more checks, we are ok
        return True

    active_branch = get_active_branch(gitobj)

    if active_branch is not None:
        if branch != active_branch:
            if check_only:
                return False
            log.exit("""{p}: wrong branch {ab}, expected {b}.
Suggestion:\n\ncd {wdir}; git fetch; git checkout {b}; cd -;\n""".format(
                p=path, ab=active_branch, b=branch, wdir=gitobj.working_dir))
    return True
Beispiel #9
0
def read_composer_yamls(composers):

    base_files = []
    all_files = []

    # YAML CHECK UP
    for name, composer in composers.items():

        if not composer.pop('if', False):
            continue

        log.verbose("Composer {}", name)

        mandatory = composer.pop('mandatory', False)
        base = composer.pop('base', False)

        try:
            f = composer.get('file')
            p = composer.get('path')
            compose = load_yaml_file(file=f, path=p, is_optional=not mandatory)

            if compose.get('services') is None or len(
                    compose.get('services', {})) < 1:
                # if mandatory:
                #     log.exit("No service defined in file {}", name)
                # else:
                #     log.verbose("No service defined in {}, skipping", name)
                log.verbose("No service defined in {}, skipping", name)
                continue

            filepath = get_yaml_path(file=f, path=p)
            all_files.append(filepath)

            if base:
                base_files.append(filepath)

        except KeyError as e:

            # if mandatory:
            #     log.exit(
            #         "Composer {}({}) is mandatory.\n{}", name, filepath, e
            #     )
            # else:
            #     log.debug("Missing '{}' composer", name)
            log.exit("Error loading {}: {}", filepath, e)

    return all_files, base_files
Beispiel #10
0
def load_yaml_file(file, path, keep_order=False, is_optional=False):
    """
    Import any data from a YAML file.
    """

    filepath = get_yaml_path(file, path=path)

    if filepath is None:
        if is_optional:
            log.info(
                "Failed to read YAML file {}/{}: File does not exist",
                path,
                file,
            )
        else:
            log.exit(
                "Failed to read YAML file {}/{}: File does not exist",
                path,
                file,
            )
        return {}

    with open(filepath) as fh:
        try:
            if keep_order:

                OrderedLoader.add_constructor(
                    yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
                    construct_mapping)
                loader = yaml.load_all(fh, OrderedLoader)
            else:
                loader = yaml.load_all(fh, yaml.loader.Loader)

            docs = list(loader)

            if len(docs) == 0:
                log.exit("YAML file is empty: {}", filepath)

            return docs[0]

        except Exception as e:
            # # IF dealing with a strange exception string (escaped)
            # import codecs
            # error, _ = codecs.getdecoder("unicode_escape")(str(error))

            log.warning("Failed to read YAML file [{}]: {}", filepath, e)
            return {}
Beispiel #11
0
def find_templates_override(services, templates):

    # Template and vanilla builds involved in override
    tbuilds = {}
    vbuilds = {}

    for service in services:

        builder = service.get('build')
        if builder is not None:

            dpath = builder.get('context')
            dockerfile = os.path.join(os.curdir, CONTAINERS_YAML_DIRNAME,
                                      dpath)
            dfp = DockerfileParser(dockerfile)

            try:
                cont = dfp.content
                if cont is None:
                    log.warning("Dockerfile is empty?")
                else:
                    log.verbose("Parsed dockerfile {}", dpath)
            except FileNotFoundError as e:
                log.exit(e)

            if dfp.baseimage is None:
                dfp.baseimage = 'unknown_build'
            # elif dfp.baseimage.endswith(':template'):
            elif dfp.baseimage.startswith('rapydo/'):
                if dfp.baseimage not in templates:
                    log.exit("""Unable to find {} in this project
\nPlease inspect the FROM image in {}/Dockerfile
                        """.format(dfp.baseimage, dockerfile))
                else:
                    vanilla_img = service.get('image')
                    template_img = dfp.baseimage
                    log.verbose("{} overrides {}", vanilla_img, template_img)
                    tbuilds[template_img] = templates.get(template_img)
                    vbuilds[vanilla_img] = template_img

    return tbuilds, vbuilds
Beispiel #12
0
def find_templates_build(base_services):

    templates = {}
    from controller.dockerizing import Dock
    docker = Dock()

    for base_service in base_services:

        template_build = base_service.get('build')

        if template_build is not None:

            template_name = base_service.get('name')
            template_image = base_service.get('image')

            if template_image is None:
                log.exit(
                    "Template builds must have a name, missing for {}".format(
                        template_name))
            else:

                if template_image not in templates:
                    templates[template_image] = {}
                    templates[template_image]['services'] = []
                    templates[template_image]['path'] = template_build.get(
                        'context')
                    templates[template_image][
                        'timestamp'] = docker.image_attribute(template_image)

                if 'service' not in templates[template_image]:
                    templates[template_image]['service'] = template_name
                else:
                    templates[template_image]['service'] = name_priority(
                        templates[template_image]['service'],
                        template_name,
                    )
                templates[template_image]['services'].append(template_name)

    return templates
Beispiel #13
0
    def command(self, command, options=None, nofailure=False):

        # NOTE: debug defaults
        # tmp = self.get_defaults(command)
        # print("TEST", tmp, type(tmp))
        # # exit(1)

        compose_handler = self.get_handle()
        method = getattr(compose_handler, command)

        if options is None:
            options = {}

        if options.get('SERVICE', None) is None:
            options['SERVICE'] = []

        log.debug("{}'{}'", compose_log, command)

        out = None
        # sometimes this import stucks... importing here to avoid unnecessary waits
        from docker.errors import APIError
        try:
            out = method(options=options)
        except SystemExit as e:
            # NOTE: we check the status here.
            # System exit is received also when a normal command finished.
            if e.code < 0:
                log.warning("Invalid code returned: {}", e.code)
            elif e.code > 0:
                log.exit("Compose received: system.exit({})",
                         e.code,
                         error_code=e.code)
            else:
                log.verbose("Executed compose {} w/{}", command, options)
        except (clierrors.UserError, cerrors.OperationFailedError,
                BuildError) as e:
            msg = "Failed command execution:\n{}".format(e)
            if nofailure:
                raise AttributeError(msg)
            else:
                log.exit(msg)
        except APIError as e:
            log.exit("Failed docker container:\n{}", e)
        except (ProjectError, NoSuchService) as e:
            log.exit(str(e))
        else:
            log.verbose("Executed compose {} w/{}", command, options)

        return out
Beispiel #14
0
 def command_defaults(command):
     if command in ['run']:
         return Compose.set_defaults(
             variables=[
                 'COMMAND',
                 'T',
                 'e',
                 'entrypoint',
                 'user',
                 'label',
                 'publish',
                 'service-ports',
                 'name',
                 'workdir',
                 'volume',
                 'no-deps',
                 'use-aliases',
             ],
             merge={'--rm': True},
         )
     else:
         log.exit("No default implemented for: {}", command)
Beispiel #15
0
    def start_containers(
        self,
        services,
        detach=True,
        scale=None,
        skip_dependencies=False,
        abort_on_container_exit=False,
        no_recreate=False,
    ):
        """
            Start containers (docker-compose up)
        """

        if scale is None:
            scale = {}

        options = {
            'SERVICE': services,
            '--no-deps': skip_dependencies,
            '--detach': detach,
            '--build': None,
            '--no-color': False,
            '--remove-orphans': False,
            '--abort-on-container-exit': abort_on_container_exit,
            '--no-recreate': no_recreate,
            '--force-recreate': False,
            '--always-recreate-deps': False,
            '--no-build': False,
            '--scale': scale,
        }

        try:
            return self.command('up', options)
        except NetworkConfigChangedError as e:
            log.exit("{}.\n{} ({})", e,
                     "Remove previously created networks and try again",
                     "you can use rapydo clean or docker system prune")
Beispiel #16
0
def update(path, gitobj):

    unstaged = get_unstaged_files(gitobj)
    changed = len(unstaged['changed']) > 0
    untracked = len(unstaged['untracked']) > 0

    if changed or untracked:
        log.critical("Unable to update {} repo, you have unstaged files", path)
        print_diff(gitobj, unstaged)
        sys.exit(1)

    for remote in gitobj.remotes:
        if remote.name == 'origin':
            try:
                branch = gitobj.active_branch
                log.info("Updating {} {} (branch {})", remote, path, branch)
                remote.pull(branch)
            except GitCommandError as e:
                log.error("Unable to update {} repo\n{}", path, e)
            except TypeError as e:
                if TESTING:
                    log.warning("Unable to update {} repo, {}", path, e)
                else:
                    log.exit("Unable to update {} repo, {}", path, e)
Beispiel #17
0
def read_configuration(default_file_path,
                       base_project_path,
                       projects_path,
                       submodules_path,
                       read_extended=True,
                       production=False):
    """
    Read default configuration
    """

    custom_configuration = load_yaml_file(file=PROJECT_CONF_FILENAME,
                                          path=base_project_path,
                                          keep_order=True)

    # Verify custom project configuration
    project = custom_configuration.get('project')
    if project is None:
        raise AttributeError("Missing project configuration")

    variables = ['title', 'description', 'version', 'rapydo']

    for key in variables:
        if project.get(key) is None:

            log.exit(
                "Project not configured, missing key '{}' in file {}/{}",
                key,
                base_project_path,
                PROJECT_CONF_FILENAME,
            )

    if default_file_path is None:
        base_configuration = {}
    else:
        base_configuration = load_yaml_file(file=PROJECTS_DEFAULTS_FILE,
                                            path=default_file_path,
                                            keep_order=True)

        if production:
            base_prod_conf = load_yaml_file(file=PROJECTS_PROD_DEFAULTS_FILE,
                                            path=default_file_path,
                                            keep_order=True)
            base_configuration = mix_configuration(base_configuration,
                                                   base_prod_conf)

    if read_extended:
        extended_project = project.get('extends')
    else:
        extended_project = None
    if extended_project is None:
        # Mix default and custom configuration
        return mix_configuration(base_configuration,
                                 custom_configuration), None, None

    extends_from = project.get('extends-from', 'projects')

    if extends_from == "projects":
        extend_path = projects_path
    elif extends_from.startswith("submodules/"):
        repository_name = (extends_from.split("/")[1]).strip()
        if repository_name == '':
            log.exit('Invalid repository name in extends-from, name is empty')

        extend_path = os.path.join(submodules_path, repository_name,
                                   projects_path)
    else:
        suggest = "Expected values: 'projects' or 'submodules/${REPOSITORY_NAME}'"
        log.exit("Invalid extends-from parameter: {}.\n{}", extends_from,
                 suggest)

    extend_path = os.path.join(extend_path, extended_project)

    if not os.path.exists(extend_path):
        log.exit("From project not found: {}", extend_path)

    extended_configuration = load_yaml_file(file=PROJECT_CONF_FILENAME,
                                            path=extend_path,
                                            keep_order=True)

    m1 = mix_configuration(base_configuration, extended_configuration)
    return mix_configuration(
        m1, custom_configuration), extended_project, extend_path
Beispiel #18
0
    def __init__(self):
        super(Dock, self).__init__()

        if not self.is_daemon_alive():
            log.exit("Docker daemon not reachable")