def _do_command(self):
    options = self.options
    gist_url = options.build_changelog_gist_url
    index = gist_url.rfind('/')
    if index < 0:
      index = gist_url.rfind(':')  # ssh gist
    gist_id = gist_url[index + 1:]

    git_dir = os.path.join(self.get_input_dir(), gist_id)
    if not os.path.exists(git_dir):
      logging.debug('Cloning gist from %s', gist_url)
      ensure_dir_exists(os.path.dirname(git_dir))
      self.__git.check_run(os.path.dirname(git_dir), 'clone ' + gist_url)
    else:
      logging.debug('Updating gist in "%s"', git_dir)
      self.__git.check_run(git_dir, 'fetch origin master')
      self.__git.check_run(git_dir, 'checkout master')

    dest_path = os.path.join(
        git_dir, '%s-raw-changelog.md' % options.git_branch)
    logging.debug('Copying "%s" to "%s"', options.changelog_path, dest_path)
    shutil.copyfile(options.changelog_path, dest_path)

    self.__git.check_run(git_dir, 'add ' + os.path.basename(dest_path))
    self.__git.check_commit_or_no_changes(
        git_dir, '-a -m "Updated %s"' % os.path.basename(dest_path))

    logging.debug('Pushing back gist')
    self.__git.check_run(git_dir, 'push -f origin master')
    def _do_command(self):
        options = self.options
        gist_url = options.build_changelog_gist_url
        index = gist_url.rfind('/')
        if index < 0:
            index = gist_url.rfind(':')  # ssh gist
        gist_id = gist_url[index + 1:]

        git_dir = os.path.join(self.get_input_dir(), gist_id)
        if not os.path.exists(git_dir):
            logging.debug('Cloning gist from %s', gist_url)
            ensure_dir_exists(os.path.dirname(git_dir))
            self.git_run_with_retries(os.path.dirname(git_dir),
                                      'clone ' + gist_url)
        else:
            logging.debug('Updating gist in "%s"', git_dir)
            self.git_run_with_retries(git_dir, 'fetch origin master')
            self.git_run_with_retries(git_dir, 'checkout master')

        dest_path = os.path.join(git_dir,
                                 '%s-raw-changelog.md' % options.git_branch)
        logging.debug('Copying "%s" to "%s"', options.changelog_path,
                      dest_path)
        shutil.copyfile(options.changelog_path, dest_path)

        self.git_run_with_retries(git_dir,
                                  'add ' + os.path.basename(dest_path))
        self.__git.check_commit_or_no_changes(
            git_dir, '-a -m "Updated %s"' % os.path.basename(dest_path))

        logging.debug('Pushing back gist')
        self.git_run_with_retries(git_dir, 'push -f origin master')
 def __generate_json_from_url(
     self, repository, server_url, output_path):
   """Build the swagger json from the swagger endpoint."""
   ensure_dir_exists(os.path.dirname(output_path))
   logging.info('Generating swagger docs for %s', repository.name)
   check_subprocess('curl -s {url} -o {output_path}'
                    .format(url=server_url, output_path=output_path))
Exemple #4
0
    def _do_repository(self, repository):
        generate_path = self.options.generate_swagger_path
        if not generate_path:
            self.__build_from_live_server(repository)
            return

        swagger_dir = os.path.dirname(generate_path)
        generate_logfile = self.get_logfile_path(repository.name + "-generate_swagger")
        check_subprocesses_to_logfile(
            "Extracting API to JSON",
            generate_logfile,
            [generate_path],
            cwd=repository.git_dir,
        )

        docs_dir = self.get_output_dir()
        ensure_dir_exists(docs_dir)
        json_path = os.path.join(docs_dir, "docs.json")
        swagger_output_path = os.path.join(
            repository.git_dir, swagger_dir, "swagger.json"
        )
        with open(swagger_output_path, "r") as stream:
            content = stream.read()
        with open(json_path, "w") as stream:
            stream.write(content)
        self.build_swagger_docs(repository, json_path)
Exemple #5
0
  def clone_repository_to_path(
      self, repository, commit=None, branch=None, default_branch=None):
    """Clone the remote repository at the given commit or branch.

    If requesting a branch and it is not found, then settle for the default
    branch, if one was explicitly specified.
    """
    # pylint: disable=too-many-arguments

    if (commit != None) and (branch != None):
      raise_and_log_error(
          ConfigError('At most one of commit or branch can be specified.'))

    origin = repository.origin
    parts = self.normalize_repo_url(repository.origin)
    if len(parts) == 3:
      pull_url = (self.make_ssh_url(*parts) if self.__options.github_pull_ssh
                  else self.make_https_url(*parts))
    else:
      pull_url = origin

    git_dir = repository.git_dir
    logging.debug('Begin cloning %s', pull_url)
    parent_dir = os.path.dirname(git_dir)
    ensure_dir_exists(parent_dir)

    clone_command = 'clone ' + pull_url
    if branch:
      branches = [branch]
      if default_branch:
        branches.append(default_branch)
      self.__check_clone_branch(pull_url, parent_dir, clone_command, branches)
    else:
      self.check_run(parent_dir, clone_command)
    logging.info('Cloned %s into %s', pull_url, parent_dir)

    if commit:
      self.check_run(git_dir, 'checkout -q ' + commit, echo=True)

    upstream = repository.upstream_or_none()
    if upstream and not self.is_same_repo(upstream, origin):
      logging.debug('Adding upstream %s with disabled push', upstream)
      self.check_run(git_dir, 'remote add upstream ' + upstream)

    which = ('upstream'
             if upstream and not self.is_same_repo(upstream, origin)
             else 'origin')
    if self.__options.github_disable_upstream_push:
      self.check_run(
          git_dir, 'remote set-url --push {which} disabled'.format(which=which))
    if which != 'origin' or not self.__options.github_disable_upstream_push:
      parts = self.normalize_repo_url(repository.origin)
      if len(parts) == 3:
        # Origin is not a local path
        logging.debug('Fixing origin push url')
        push_url = (self.make_ssh_url(*parts) if self.__options.github_push_ssh
                    else self.make_https_url(*parts))
        self.check_run(git_dir, 'remote set-url --push origin ' + push_url)

    logging.debug('Finished cloning %s', pull_url)
def check_subprocesses_to_logfile(what, logfile, cmds, append=False, **kwargs):
    """Wrapper around check_subprocess that logs output to a logfile.

  Args:
    what: [string] For logging purposes, what is the command for.
    logfile: [path] The logfile to write to.
    cmds: [list of string] A list of commands to run.
    append: [boolean] Open the log file as append if true, write new default.
    kwargs: [kwargs] Additional keyword arguments to pass to check_subprocess.
  """
    mode = 'a' if append else 'w'
    how = 'Appending' if append else 'Logging'
    logging.info('%s %s to %s', how, what, logfile)
    ensure_dir_exists(os.path.dirname(logfile))
    with open(logfile, mode) as stream:
        try:
            check_subprocess_sequence(cmds,
                                      stream=stream,
                                      embed_errors=False,
                                      **kwargs)
        except Exception:
            logging.error('%s failed. Log file [%s] follows:', what, logfile)
            with open(logfile, 'r') as readagain:
                output = readagain.read()
                log_embedded_output(logging.ERROR, logfile, output)
            logging.error('%s failed. See embedded logfile above', what)

            ensure_dir_exists(ERROR_LOGFILE_DIR)
            error_path = os.path.join('errors', os.path.basename(logfile))
            logging.info('Copying error log file to %s', error_path)
            with open(error_path, 'w') as f:
                f.write(output)
            raise
Exemple #7
0
    def clone_repository_to_path(
        self, repository, commit=None, branch=None, default_branch=None
    ):
        """Clone the remote repository at the given commit or branch.

        If requesting a branch and it is not found, then settle for the default
        branch, if one was explicitly specified.
        """
        # pylint: disable=too-many-arguments

        if (commit is not None) and (branch is not None):
            raise_and_log_error(
                ConfigError("At most one of commit or branch can be specified.")
            )

        pull_url = self.determine_pull_url(repository.origin)
        git_dir = repository.git_dir
        logging.debug("Begin cloning %s", pull_url)
        parent_dir = os.path.dirname(git_dir)
        ensure_dir_exists(parent_dir)

        clone_command = "clone " + pull_url
        if branch:
            branches = [branch]
            if default_branch:
                branches.append(default_branch)
            self.__check_clone_branch(pull_url, parent_dir, clone_command, branches)
        else:
            self.check_run(parent_dir, clone_command)
        logging.info("Cloned %s into %s", pull_url, parent_dir)

        if commit:
            self.checkout(repository, commit)

        upstream = repository.upstream_or_none()
        origin = repository.origin
        if upstream and not self.is_same_repo(upstream, origin):
            logging.debug("Adding upstream %s with disabled push", upstream)
            self.check_run(git_dir, "remote add upstream " + upstream)

        which = (
            "upstream"
            if upstream and not self.is_same_repo(upstream, origin)
            else "origin"
        )
        if self.__options.github_disable_upstream_push:
            self.check_run(
                git_dir, "remote set-url --push {which} disabled".format(which=which)
            )
        if which != "origin" or not self.__options.github_disable_upstream_push:
            parts = self.normalize_repo_url(repository.origin)
            if len(parts) == 3:
                # Origin is not a local path
                logging.debug("Fixing origin push url")
                push_url = self.determine_push_url(repository.origin)
                self.check_run(git_dir, "remote set-url --push origin " + push_url)

        logging.debug("Finished cloning %s", pull_url)
Exemple #8
0
    def test_ensure_dir(self):
        want = os.path.join(self.base_temp_dir, "ensure", "a", "b", "c")
        self.assertFalse(os.path.exists(want))
        ensure_dir_exists(want)
        self.assertTrue(os.path.exists(want))

        # Ok if already exists
        ensure_dir_exists(want)
        self.assertTrue(os.path.exists(want))
Exemple #9
0
 def __generate_json_from_url(self, repository, server_url, output_path):
     """Build the swagger json from the swagger endpoint."""
     ensure_dir_exists(os.path.dirname(output_path))
     logging.info("Generating swagger docs for %s", repository.name)
     check_subprocess(
         "curl -s {url} -o {output_path}".format(
             url=server_url, output_path=output_path
         )
     )
Exemple #10
0
  def test_ensure_dir(self):
    want = os.path.join(self.base_temp_dir, 'ensure', 'a', 'b', 'c')
    self.assertFalse(os.path.exists(want))
    ensure_dir_exists(want)
    self.assertTrue(os.path.exists(want))

    # Ok if already exists
    ensure_dir_exists(want)
    self.assertTrue(os.path.exists(want))
 def get_output_dir(self, command=None):
     """Return the output dir for persistent build output from this command."""
     command = command or self.__options.command
     output_command_path = os.path.join(self.__options.output_dir, command)
     # FIXME: We manually ensure the output dir is there if it doesn't exist.
     # This should be created before the command is run.
     if not os.path.isdir(output_command_path):
         logging.debug('making dir %s', output_command_path)
         ensure_dir_exists(output_command_path)
     return output_command_path
Exemple #12
0
  def clone_repository_to_path(
      self, repository, commit=None, branch=None, default_branch=None):
    """Clone the remote repository at the given commit or branch.

    If requesting a branch and it is not found, then settle for the default
    branch, if one was explicitly specified.
    """
    # pylint: disable=too-many-arguments

    if (commit != None) and (branch != None):
      raise_and_log_error(
          ConfigError('At most one of commit or branch can be specified.'))

    pull_url = self.determine_pull_url(repository)
    git_dir = repository.git_dir
    logging.debug('Begin cloning %s', pull_url)
    parent_dir = os.path.dirname(git_dir)
    ensure_dir_exists(parent_dir)

    clone_command = 'clone ' + pull_url
    if branch:
      branches = [branch]
      if default_branch:
        branches.append(default_branch)
      self.__check_clone_branch(pull_url, parent_dir, clone_command, branches)
    else:
      self.check_run(parent_dir, clone_command)
    logging.info('Cloned %s into %s', pull_url, parent_dir)

    if commit:
      self.check_run(git_dir, 'checkout -q ' + commit, echo=True)

    upstream = repository.upstream_or_none()
    origin = repository.origin
    if upstream and not self.is_same_repo(upstream, origin):
      logging.debug('Adding upstream %s with disabled push', upstream)
      self.check_run(git_dir, 'remote add upstream ' + upstream)

    which = ('upstream'
             if upstream and not self.is_same_repo(upstream, origin)
             else 'origin')
    if self.__options.github_disable_upstream_push:
      self.check_run(
          git_dir, 'remote set-url --push {which} disabled'.format(which=which))
    if which != 'origin' or not self.__options.github_disable_upstream_push:
      parts = self.normalize_repo_url(repository.origin)
      if len(parts) == 3:
        # Origin is not a local path
        logging.debug('Fixing origin push url')
        push_url = self.determine_push_url(repository)
        self.check_run(git_dir, 'remote set-url --push origin ' + push_url)

    logging.debug('Finished cloning %s', pull_url)
Exemple #13
0
    def __build_from_live_server(self, repository):
        """Implements CommandProcessor interface."""
        docs_url_path = SWAGGER_URL_PATHS[repository.name]
        env = dict(os.environ)
        port = unused_port()
        env['SERVER_PORT'] = str(port)
        base_url = 'http://localhost:' + str(port)

        gate_logfile = self.get_logfile_path(repository.name +
                                             '-apidocs-server')
        logging.info(
            'Starting up prototype %s so we can extract docs from it.'
            ' We will log this instance to %s', repository.name, gate_logfile)
        boot_run_cmd = './gradlew'  # default will run
        ensure_dir_exists(os.path.dirname(gate_logfile))
        gate_logstream = open(gate_logfile, 'w')
        process = start_subprocess(boot_run_cmd,
                                   stream=gate_logstream,
                                   stdout=gate_logstream,
                                   cwd=repository.git_dir,
                                   env=env)

        max_wait_secs = self.options.max_wait_secs_startup
        # pylint: disable=broad-except
        try:
            logging.info('Waiting up to %s secs for %s to be ready on port %d',
                         max_wait_secs, repository.name, port)
            self.wait_for_url(base_url + '/health', max_wait_secs)
            json_path = os.path.join(self.get_output_dir(), 'docs.json')
            self.__generate_json_from_url(repository,
                                          base_url + '/' + docs_url_path,
                                          json_path)
            self.build_swagger_docs(repository, json_path)
        finally:
            try:
                gate_logstream.flush()
                gate_logstream.write(
                    '\n' + log_timestring() +
                    ' ***** buildtool is killing subprocess  *****\n')
                logging.info(
                    'Killing %s subprocess %s now that we are done with it',
                    repository.name, process.pid)
                process.kill()
                wait_subprocess(process)
                gate_logstream.close()
            except Exception as ex:
                maybe_log_exception(
                    self.name, ex,
                    'Ignoring exception while stopping {name} subprocess {pid}.'
                    .format(name=repository.name, pid=process.pid))
Exemple #14
0
    def __collect_halconfig_files(self, repository):
        """Gets the component config files and writes them into the output_dir."""
        name = repository.name
        if (name not in SPINNAKER_BOM_REPOSITORY_NAMES
                or name in ['spinnaker']):
            logging.debug('%s does not use config files -- skipping', name)
            return

        if name == 'spinnaker-monitoring':
            config_root = os.path.join(repository.git_dir,
                                       'spinnaker-monitoring-daemon')
        else:
            config_root = repository.git_dir

        service_name = self.scm.repository_name_to_service_name(
            repository.name)
        target_dir = os.path.join(self.get_output_dir(), 'halconfig',
                                  service_name)
        ensure_dir_exists(target_dir)

        config_path = os.path.join(config_root, 'halconfig')
        logging.info('Copying configs from %s...', config_path)
        if not os.path.exists(config_path) and repository.name == 'kayenta':
            logging.warning('Kayenta does not yet have a halconfig.')
            return
        for profile in os.listdir(config_path):
            profile_path = os.path.join(config_path, profile)
            if os.path.isfile(profile_path):
                shutil.copyfile(profile_path,
                                os.path.join(target_dir, profile))
                logging.debug('Copied profile to %s', profile_path)
            elif not os.path.isdir(profile_path):
                logging.warning('%s is neither file nor directory -- ignoring',
                                profile_path)
                continue
            else:
                tar_path = os.path.join(
                    target_dir, '{profile}.tar.gz'.format(profile=profile))
                file_list = ' '.join(os.listdir(profile_path))

                # NOTE: For historic reasons this is not actually compressed
                # even though the tar_path says ".tar.gz"
                check_subprocess(
                    'tar cf {path} -C {profile} {file_list}'.format(
                        path=tar_path,
                        profile=profile_path,
                        file_list=file_list))
                logging.debug('Copied profile to %s', tar_path)
def ensure_gist_repo(git, input_dir, gist_url):
    index = gist_url.rfind("/")
    if index < 0:
        index = gist_url.rfind(":")  # ssh gist
    gist_id = gist_url[index + 1 :]

    git_dir = os.path.join(input_dir, gist_id)
    if not os.path.exists(git_dir):
        logging.debug("Cloning gist from %s", gist_url)
        ensure_dir_exists(os.path.dirname(git_dir))
        git_run_with_retries(git, os.path.dirname(git_dir), "clone " + gist_url)
    else:
        logging.debug('Updating gist in "%s"', git_dir)
        git_run_with_retries(git, git_dir, "fetch origin main")
        git_run_with_retries(git, git_dir, "checkout main")
        git_run_with_retries(git, git_dir, "reset --hard origin/main")
    return git_dir
  def __build_from_live_server(self, repository):
    """Implements CommandProcessor interface."""
    docs_url_path = SWAGGER_URL_PATHS[repository.name]
    env = dict(os.environ)
    port = unused_port()
    env['SERVER_PORT'] = str(port)
    base_url = 'http://localhost:' + str(port)

    gate_logfile = self.get_logfile_path(repository.name + '-apidocs-server')
    logging.info('Starting up prototype %s so we can extract docs from it.'
                 ' We will log this instance to %s',
                 repository.name, gate_logfile)
    boot_run_cmd = './gradlew'  # default will run
    ensure_dir_exists(os.path.dirname(gate_logfile))
    gate_logstream = open(gate_logfile, 'w')
    process = start_subprocess(
        boot_run_cmd, stream=gate_logstream, stdout=gate_logstream,
        cwd=repository.git_dir, env=env)

    max_wait_secs = self.options.max_wait_secs_startup
    # pylint: disable=broad-except
    try:
      logging.info('Waiting up to %s secs for %s to be ready on port %d',
                   max_wait_secs, repository.name, port)
      self.wait_for_url(base_url + '/health', max_wait_secs)
      json_path = os.path.join(self.get_output_dir(), 'docs.json')
      self.__generate_json_from_url(
          repository, base_url + '/' + docs_url_path, json_path)
      self.build_swagger_docs(repository, json_path)
    finally:
      try:
        gate_logstream.flush()
        gate_logstream.write(
            '\n' + log_timestring()
            + ' ***** buildtool is killing subprocess  *****\n')
        logging.info('Killing %s subprocess %s now that we are done with it',
                     repository.name, process.pid)
        process.kill()
        wait_subprocess(process)
        gate_logstream.close()
      except Exception as ex:
        maybe_log_exception(
            self.name, ex,
            'Ignoring exception while stopping {name} subprocess {pid}.'
            .format(name=repository.name, pid=process.pid))
def ensure_gist_repo(git, input_dir, gist_url):
    index = gist_url.rfind('/')
    if index < 0:
        index = gist_url.rfind(':')  # ssh gist
    gist_id = gist_url[index + 1:]

    git_dir = os.path.join(input_dir, gist_id)
    if not os.path.exists(git_dir):
        logging.debug('Cloning gist from %s', gist_url)
        ensure_dir_exists(os.path.dirname(git_dir))
        git_run_with_retries(git, os.path.dirname(git_dir),
                             'clone ' + gist_url)
    else:
        logging.debug('Updating gist in "%s"', git_dir)
        git_run_with_retries(git, git_dir, 'fetch origin master')
        git_run_with_retries(git, git_dir, 'checkout master')
        git_run_with_retries(git, git_dir, 'reset --hard origin/master')
    return git_dir
  def build_swagger_docs(self, repository, docs_url):
    """Build the API from the swagger endpoint."""
    if repository.name != 'gate':
      raise_and_log_error(
          UnexpectedError('Repo "%s" != "gate"' % repository.name))

    docs_dir = self.get_output_dir()
    ensure_dir_exists(docs_dir)
    docs_path = os.path.join(docs_dir, 'docs.json')

    logging.info('Generating swagger docs for %s', repository.name)
    check_subprocess('curl -s {url} -o {docs_path}'
                     .format(url=docs_url, docs_path=docs_path))
    check_subprocess(
        'java -jar {jar_path} generate -i {docs_path} -l html2'
        ' -o {output_dir} -t {templates_directory}'
        .format(jar_path=self.options.swagger_codegen_cli_jar_path,
                docs_path=docs_path, output_dir=docs_dir,
                templates_directory=self.__templates_directory))
    logging.info('Writing docs to directory %s', docs_dir)
Exemple #19
0
  def __collect_halconfig_files(self, repository):
    """Gets the component config files and writes them into the output_dir."""
    name = repository.name
    if (name not in SPINNAKER_BOM_REPOSITORY_NAMES
        or name in ['spin']):
      logging.debug('%s does not use config files -- skipping', name)
      return

    if name == 'spinnaker-monitoring':
      config_root = os.path.join(
          repository.git_dir, 'spinnaker-monitoring-daemon')
    else:
      config_root = repository.git_dir

    service_name = self.scm.repository_name_to_service_name(repository.name)
    target_dir = os.path.join(self.get_output_dir(), 'halconfig', service_name)
    ensure_dir_exists(target_dir)

    config_path = os.path.join(config_root, 'halconfig')
    logging.info('Copying configs from %s...', config_path)
    for profile in os.listdir(config_path):
      profile_path = os.path.join(config_path, profile)
      if os.path.isfile(profile_path):
        shutil.copyfile(profile_path, os.path.join(target_dir, profile))
        logging.debug('Copied profile to %s', profile_path)
      elif not os.path.isdir(profile_path):
        logging.warning('%s is neither file nor directory -- ignoring',
                        profile_path)
        continue
      else:
        tar_path = os.path.join(
            target_dir, '{profile}.tar.gz'.format(profile=profile))
        file_list = ' '.join(os.listdir(profile_path))

        # NOTE: For historic reasons this is not actually compressed
        # even though the tar_path says ".tar.gz"
        check_subprocess(
            'tar cf {path} -C {profile} {file_list}'.format(
                path=tar_path, profile=profile_path, file_list=file_list))
        logging.debug('Copied profile to %s', tar_path)
def check_subprocesses_to_logfile(what, logfile, cmds, append=False, **kwargs):
    """Wrapper around check_subprocess that logs output to a logfile.

    Args:
      what: [string] For logging purposes, what is the command for.
      logfile: [path] The logfile to write to.
      cmds: [list of string] A list of commands to run.
      append: [boolean] Open the log file as append if true, write new default.
      kwargs: [kwargs] Additional keyword arguments to pass to check_subprocess.
    """
    mode = "a" if append else "w"
    how = "Appending" if append else "Logging"
    logging.info("%s %s to %s", how, what, logfile)
    ensure_dir_exists(os.path.dirname(logfile))
    with io.open(logfile, mode, encoding="utf-8") as stream:
        try:
            check_subprocess_sequence(cmds, stream=stream, embed_errors=False, **kwargs)
        except Exception as ex:
            logging.error("%s failed. Log file [%s] follows:", what, logfile)
            import traceback

            traceback.print_exc()

            with io.open(logfile, "r", encoding="utf-8") as readagain:
                output = readagain.read()
                log_embedded_output(logging.ERROR, logfile, output)
            logging.error(
                "Caught exception %s\n%s failed. See embedded logfile above", ex, what
            )

            ensure_dir_exists(ERROR_LOGFILE_DIR)
            error_path = os.path.join("errors", os.path.basename(logfile))
            logging.info("Copying error log file to %s", error_path)
            with io.open(error_path, "w", encoding="utf-8") as f:
                f.write(output)
                f.write("\n--------\n")
                f.write("Exeception caught in parent process:\n%s" % ex)

            raise
  def _do_repository(self, repository):
    generate_path = self.options.generate_swagger_path
    if not generate_path:
      self.__build_from_live_server(repository)
      return

    swagger_dir = os.path.dirname(generate_path)
    generate_logfile = self.get_logfile_path(
        repository.name + '-generate_swagger')
    check_subprocesses_to_logfile(
        'Extracting API to JSON', generate_logfile, [generate_path],
        cwd=repository.git_dir)

    docs_dir = self.get_output_dir()
    ensure_dir_exists(docs_dir)
    json_path = os.path.join(docs_dir, 'docs.json')
    swagger_output_path = os.path.join(
        repository.git_dir, swagger_dir, 'swagger.json')
    with open(swagger_output_path, 'r') as stream:
      content = stream.read()
    with open(json_path, 'w') as stream:
      stream.write(content)
    self.build_swagger_docs(repository, json_path)
Exemple #22
0
    def _do_repository(self, repository):
        generate_path = self.options.generate_swagger_path
        if not generate_path:
            self.__build_from_live_server(repository)
            return

        docs_url_path = SWAGGER_URL_PATHS[repository.name]
        swagger_dir = os.path.dirname(generate_path)
        generate_logfile = self.get_logfile_path(repository.name +
                                                 '-generate_swagger')
        check_subprocesses_to_logfile('Extracting API to JSON',
                                      generate_logfile, [generate_path],
                                      cwd=repository.git_dir)

        docs_dir = self.get_output_dir()
        ensure_dir_exists(docs_dir)
        json_path = os.path.join(docs_dir, 'docs.json')
        swagger_output_path = os.path.join(repository.git_dir, swagger_dir,
                                           'swagger.json')
        with open(swagger_output_path, 'r') as stream:
            content = stream.read()
        with open(json_path, 'w') as stream:
            stream.write(content)
        self.build_swagger_docs(repository, json_path)