Esempio n. 1
0
 def write_archive(self, filename, items):
   p = self.file_path(filename)
   if path.exists(p):
     raise IOError('already exists {}'.format(filename))
   extension = file_util.extension(filename)
   tmp_archive = temp_archive.make_temp_archive(items, extension)
   file_util.rename(tmp_archive, p)
    def download(self, full_version, output_filename):
        check.check_string(full_version)
        check.check_string(output_filename, allow_none=True)

        tmp_package = self.installer.download(full_version)
        output_filename = output_filename or path.basename(tmp_package)
        file_util.rename(tmp_package, output_filename)
        return 0
Esempio n. 3
0
 def extract_member_to_file(self, member, filename):
     tmp_dir = temp_file.make_temp_dir()
     tmp_member = path.join(tmp_dir, member)
     self.extract(tmp_dir, include=[member])
     if not path.exists(tmp_member):
         raise RuntimeError('Failed to extract member from {}: {}'.format(
             self.filename, member))
     if not path.isfile(tmp_member):
         raise RuntimeError('Member is not a file {}: {}'.format(
             self.filename, member))
     file_util.rename(tmp_member, filename)
Esempio n. 4
0
 def _apply_operation_rename(self, try_git):
     git_worked = False
     if try_git:
         root_dir = git.find_root_dir(start_dir=path.dirname(self.src))
         should_ignore = git.check_ignore(root_dir, self.src)
         if not should_ignore:
             try:
                 git.move(root_dir, self.src, self.dst)
                 git_worked = True
             except git_error as ex:
                 print(f'caught: {ex}')
     if not git_worked:
         file_util.rename(self.src, self.dst)
Esempio n. 5
0
 def transform(clazz, archive, operations):
     'Transform an archive with one or more operations.'
     check.check_string(archive)
     operations = object_util.listify(operations)
     check.check_archive_operation_seq(operations)
     tmp_dir = clazz.extract_all_temp_dir(archive)
     for operation in operations:
         if not check.is_archive_operation(operation):
             raise TypeError(
                 'Operation should be a subclass of archive_operation_base: {}'
                 .format(operation))
         operation.execute(tmp_dir)
     tmp_new_archive = clazz.create_temp_file(
         archive_extension.extension_for_filename(archive), tmp_dir)
     file_util.rename(tmp_new_archive, archive)
Esempio n. 6
0
 def _fetch_framework(self, revision):
   assert revision != 'latest'
   
   tmp_dir = temp_file.make_temp_dir(prefix = path.basename(self.framework_dir),
                                     dir = path.normpath(path.join(self.framework_dir, path.pardir)),
                                     delete = False)
   options = git_clone_options(depth = 1,
                               num_tries = 3,
                               retry_wait_seconds = 5,
                               branch = revision)
   repo = git_repo(tmp_dir, address = self._options.address)
   repo.clone(options = options)
   file_util.remove(self.framework_dir)
   src_dir = path.join(tmp_dir, 'bash', 'bes_bash_one_file')
   file_util.rename(src_dir, self.framework_dir)
   file_util.remove(tmp_dir)
Esempio n. 7
0
 def get_tarball(self, address, revision):
     'Return the local filesystem path to the tarball with address and revision.'
     local_address_path = self.path_for_address(address)
     tarball_filename = '%s.tar.gz' % (revision)
     tarball_path = path.join(local_address_path, tarball_filename)
     if path.exists(tarball_path):
         return tarball_path
     tmp_dir = temp_file.make_temp_dir()
     if path.isdir(address):
         name = path.basename(address)
     else:
         name = git_address_util.name(address)
     tmp_full_path = path.join(tmp_dir, tarball_filename)
     git.archive(address, revision, name, tmp_full_path)
     file_util.rename(tmp_full_path, tarball_path)
     return tarball_path
Esempio n. 8
0
  def cached_archive_get(self, revision, cache_dir = None):
    if not self.address:
      raise git_error('cached_archive only works for repos cloned from a remote address.')
    if not self._cached_archive_revision_is_valid(revision):
      raise git_error('revision should be a valid tag or commit hash: ""'.format(revision))
      
    cache_dir = self._cached_archive_resolve_cache_dir(cache_dir = cache_dir)
    local_address_path = self._cached_archive_path_for_address(cache_dir, self.address)
    tarball_filename = '{}.tar.gz'.format(revision)
    tarball_path = path.join(local_address_path, tarball_filename)
    if path.exists(tarball_path):
      return tarball_path

    tmp_dir = temp_file.make_temp_dir()
    name = git_address_util.name(self.address)
    tmp_full_path = path.join(tmp_dir, tarball_filename)

    prefix = '{}-{}'.format(name, revision)
    self.archive_to_file(prefix, revision, tmp_full_path, archive_format = 'tar.gz', short_hash = True)
    file_util.rename(tmp_full_path, tarball_path)
    return tarball_path
Esempio n. 9
0
    def copy_file(clazz, image_id, src_filename, dst_filename):
        if not path.isabs(src_filename):
            raise docker_error(
                'src_filename should be an absolute path: {}'.format(
                    src_filename))
        tmp = temp_file.make_temp_file()
        file_util.remove(tmp)
        container_id = None
        success = False
        try:
            container_id = clazz.create(image_id)
            args = ['cp', '{}:{}'.format(container_id, src_filename), tmp]
            docker_exe.call_docker(args, non_blocking=False)
            if path.isfile(tmp):
                success = True
                file_util.rename(tmp, dst_filename)
        finally:
            if container_id:
                docker_container.remove_container(container_id)

        if not success:
            raise docker_error('failed to copy {} from {}'.format(
                src_filename, image_id))
Esempio n. 10
0
 def get_url(self,
             url,
             checksum=None,
             cookies=None,
             debug=False,
             auth=None,
             uncompress=True):
     'Return the local filesystem path to the tarball with address and revision.'
     self.log.log_d('get_url: url=%s; checksum=%s; cookies=%s' %
                    (url, checksum, cookies))
     local_cached_path = self._local_path_for_url(url)
     local_cached_path_rel = path.relpath(local_cached_path)
     self.log.log_d('get_url: local_cached_path=%s' %
                    (local_cached_path_rel))
     if checksum:
         if path.exists(local_cached_path):
             if self._local_checksum(local_cached_path) == checksum:
                 self.log.log_d(
                     'get_url: found in cache with good checksum. using: %s'
                     % (local_cached_path_rel))
                 result = self._uncompress_if_needed(
                     local_cached_path, uncompress)
                 self.log.log_d('get_url: 1 result={}'.format(result))
                 return result
             else:
                 self.log.log_w(
                     'get_url: found in cache with BAD checksum. removing: %s'
                     % (local_cached_path_rel))
                 file_util.remove(local_cached_path)
     else:
         if path.exists(local_cached_path):
             self.log.log_d('get_url: found in cache. using: %s' %
                            (local_cached_path_rel))
             result = self._uncompress_if_needed(local_cached_path,
                                                 uncompress)
             self.log.log_d('get_url: 2 result={}'.format(result))
             return result
     tmp = self._download_to_tmp_file(url,
                                      cookies=cookies,
                                      debug=debug,
                                      auth=auth)
     self.download_count += 1
     self.log.log_d('get_url: downloaded url to %s' % (tmp))
     if not tmp:
         self.log.log_d('get_url: failed to download: %s' % (url))
         self.log.log_d('get_url: 3 result={}'.format(None))
         return None
     if not checksum:
         if self.compressed:
             compressed_file.compress(tmp, local_cached_path)
             if uncompress:
                 result = tmp
             else:
                 result = local_cached_path
             self.log.log_d('get_url: 4 result={}'.format(result))
             return result
         else:
             file_util.rename(tmp, local_cached_path)
             self.log.log_d(
                 'get_url: 5 result={}'.format(local_cached_path))
             return local_cached_path
     actual_checksum = file_util.checksum('sha256', tmp)
     if actual_checksum == checksum:
         self.log.log_d(
             'get_url: download succesful and checksum is good.  using: %s'
             % (local_cached_path_rel))
         if self.compressed:
             compressed_file.compress(tmp, local_cached_path)
             if uncompress:
                 result = tmp
             else:
                 result = local_cached_path
             self.log.log_d('get_url: 6 result={}'.format(result))
             return result
         else:
             file_util.rename(tmp, local_cached_path)
             self.log.log_d(
                 'get_url: 7 result={}'.format(local_cached_path))
             return local_cached_path
     else:
         self.log.log_e(
             'get_url: download worked but checksum was WRONG: {}'.format(
                 url))
         self.log.log_e('get_url:  cookies: %s' % (cookies))
         self.log.log_e('get_url: expected: %s' % (checksum))
         self.log.log_e('get_url:   actual: %s' % (actual_checksum))
         #self.log.log_e('content:\n{}\n'.format(file_util.read(tmp, codec = 'utf8')))
         self.log.log_d('get_url: 8 result={}'.format(None))
         return None
Esempio n. 11
0
 def recreate(clazz, archive, output_archive, base_dir):
     'Recreate the archive with the new a base_dir.  output_archive can be same as archive.'
     tmp_archive = clazz.recreate_temp_file(archive, base_dir)
     file_util.rename(tmp_archive, output_archive)