Exemplo n.º 1
0
def delete_storage_object(object_name):
    """Delete an object on Google Storage."""
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    command.extend(['rm', '-R', object_name])
    print 'Running command: %s' % command
    shell_utils.run(command)
Exemplo n.º 2
0
def copy_dir_contents(remote_src_dir,
                      remote_dest_dir,
                      gs_acl='private',
                      http_header_lines=None):
    """Copy contents of one Google Storage directory to another.

  params:
    remote_src_dir: source GS URL (gs://BUCKETNAME/PATH)
    remote_dest_dir: dest GS URL (gs://BUCKETNAME/PATH)
    gs_acl: which predefined ACL to apply to the new files; see
        https://developers.google.com/storage/docs/accesscontrol#extension
    http_header_lines: a list of HTTP header strings to add, if any

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.

  Performs the copy in multithreaded mode, in case there are a large number of
  files.
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil, '-m']
    if http_header_lines:
        for http_header_line in http_header_lines:
            command.extend(['-h', http_header_line])
    command.extend(['cp', '-a', gs_acl, '-R', remote_src_dir, remote_dest_dir])
    print 'Running command: %s' % command
    shell_utils.run(command)
Exemplo n.º 3
0
def move_storage_directory(src_dir, dest_dir):
    """Move a directory on Google Storage."""
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    command.extend(['mv', '-p', src_dir, dest_dir])
    print 'Running command: %s' % command
    shell_utils.run(command)
Exemplo n.º 4
0
def upload_dir_contents(local_src_dir,
                        remote_dest_dir,
                        gs_acl='private',
                        http_header_lines=None):
    """Upload contents of a local directory to Google Storage.

  params:
    local_src_dir: directory on local disk to upload contents of
    remote_dest_dir: GS URL (gs://BUCKETNAME/PATH)
    gs_acl: which predefined ACL to apply to the files on Google Storage; see
        https://developers.google.com/storage/docs/accesscontrol#extension
    http_header_lines: a list of HTTP header strings to add, if any

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.

  We upload each file as a separate call to gsutil.  This takes longer than
  calling "gsutil -m cp -R <source> <dest>", which can perform the uploads in
  parallel... but in http://skbug.com/2618 ('The Case of the Missing
  Mandrills') we figured out that was silently failing in some cases!

  TODO(epoger): Use the google-api-python-client API, like we do in
  https://skia.googlesource.com/skia/+/master/tools/pyutils/gs_utils.py ,
  rather than calling out to the gsutil tool.  See http://skbug.com/2618

  TODO(epoger): Upload multiple files simultaneously to reduce latency.

  TODO(epoger): Add a "noclobber" mode that will not upload any files would
  overwrite existing files in Google Storage.

  TODO(epoger): Consider adding a do_compress parameter that would compress
  the file using gzip before upload, and add a "Content-Encoding:gzip" header
  so that HTTP downloads of the file would be unzipped automatically.
  See https://developers.google.com/storage/docs/gsutil/addlhelp/
              WorkingWithObjectMetadata#content-encoding
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    if http_header_lines:
        for http_header_line in http_header_lines:
            command.extend(['-h', http_header_line])
    command.extend(['cp', '-a', gs_acl])

    abs_local_src_dir = os.path.abspath(local_src_dir)
    for (abs_src_dirpath, _, filenames) in os.walk(abs_local_src_dir):
        if abs_src_dirpath == abs_local_src_dir:
            # This file is within local_src_dir; no need to add subdirs to
            # abs_dest_dirpath.
            abs_dest_dirpath = remote_dest_dir
        else:
            # This file is within a subdir, so add subdirs to abs_dest_dirpath.
            abs_dest_dirpath = posixpath.join(
                remote_dest_dir,
                _convert_to_posixpath(
                    os.path.relpath(abs_src_dirpath, abs_local_src_dir)))
        for filename in sorted(filenames):
            abs_src_filepath = os.path.join(abs_src_dirpath, filename)
            abs_dest_filepath = posixpath.join(abs_dest_dirpath, filename)
            shell_utils.run(command + [abs_src_filepath, abs_dest_filepath])
Exemplo n.º 5
0
def does_storage_object_exist(object_name):
    """Checks if an object exists on Google Storage.

  Returns True if it exists else returns False.
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    command.extend(['ls', object_name])
    print 'Running command: %s' % command
    try:
        shell_utils.run(command)
        return True
    except shell_utils.CommandFailedException:
        return False
Exemplo n.º 6
0
def download_dir_contents(remote_src_dir, local_dest_dir, multi=True):
    """Download contents of a Google Storage directory to local disk.

  params:
    remote_src_dir: GS URL (gs://BUCKETNAME/PATH)
    local_dest_dir: directory on local disk to write the contents into
    multi: boolean; whether to perform the copy in multithreaded mode.

  The copy operates as a "merge with overwrite": any files in src_dir will be
  "overlaid" on top of the existing content in dest_dir.  Existing files with
  the same names will be overwritten.
  """
    gsutil = slave_utils.GSUtilSetup()
    command = [gsutil]
    if multi:
        command.append('-m')
    command.extend(['cp', '-R', remote_src_dir, local_dest_dir])
    print 'Running command: %s' % command
    shell_utils.run(command)
Exemplo n.º 7
0
    def Compile(self, target):
        """ Compile the Skia executables. """
        # Add gsutil to PATH
        gsutil = slave_utils.GSUtilSetup()
        os.environ['PATH'] += os.pathsep + os.path.dirname(gsutil)

        # Run the chromeos_make script.
        make_cmd = os.path.join('platform_tools', 'chromeos', 'bin',
                                'chromeos_make')
        cmd = [
            make_cmd,
            '-d',
            self._step.args['board'],
            target,
            'BUILDTYPE=%s' % self._step.configuration,
        ]

        cmd.extend(self._step.default_make_flags)
        cmd.extend(self._step.make_flags)
        shell_utils.run(cmd)
Exemplo n.º 8
0
def upload_file(local_src_path,
                remote_dest_path,
                gs_acl='private',
                http_header_lines=None,
                only_if_modified=False):
    """Upload contents of a local file to Google Storage.

  params:
    local_src_path: path to file on local disk
    remote_dest_path: GS URL (gs://BUCKETNAME/PATH)
    gs_acl: which predefined ACL to apply to the file on Google Storage; see
        https://developers.google.com/storage/docs/accesscontrol#extension
    http_header_lines: a list of HTTP header strings to add, if any
    only_if_modified: if True, only upload the file if it would actually change
        the content on Google Storage (uploads the file if remote_dest_path
        does not exist, or if it exists but has different contents than
        local_src_path).  Note that this may take longer than just uploading the
        file without checking first, due to extra round-trips!

  TODO(epoger): Consider adding a do_compress parameter that would compress
  the file using gzip before upload, and add a "Content-Encoding:gzip" header
  so that HTTP downloads of the file would be unzipped automatically.
  See https://developers.google.com/storage/docs/gsutil/addlhelp/
              WorkingWithObjectMetadata#content-encoding
  """
    gsutil = slave_utils.GSUtilSetup()

    if only_if_modified:
        # Return early if we don't need to do the upload.
        command = [gsutil, 'ls', '-L', remote_dest_path]
        try:
            ls_output = shell_utils.run(command)
            matches = ETAG_REGEX.search(ls_output)
            if matches:
                # TODO(epoger): In my testing, this has always returned an MD5 hash
                # that is comparable to local_md5 below.  But from my reading of
                # https://developers.google.com/storage/docs/hashes-etags , this is
                # not something we can always rely on ("composite objects don't support
                # MD5 hashes"; I'm not sure if we ever encounter composite objects,
                # though).  It would be good for us to find a more reliable hash, but
                # I haven't found a way to get one out of gsutil yet.
                #
                # For now: if the remote_md5 is not found, or is computed in
                # such a way that is different from local_md5, then we will re-upload
                # the file even if it did not change.
                remote_md5 = matches.group(1)
                hasher = hashlib.md5()
                with open(local_src_path, 'rb') as filereader:
                    while True:
                        data = filereader.read(BUFSIZE)
                        if not data:
                            break
                        hasher.update(data)
                local_md5 = hasher.hexdigest()
                if local_md5 == remote_md5:
                    print(
                        'local_src_path %s and remote_dest_path %s have same hash %s'
                        % (local_src_path, remote_dest_path, local_md5))
                    return
        except shell_utils.CommandFailedException:
            # remote_dest_path probably does not exist. Go ahead and do the upload.
            pass

    command = [gsutil]
    if http_header_lines:
        for http_header_line in http_header_lines:
            command.extend(['-h', http_header_line])
    command.extend(['cp', '-a', gs_acl, local_src_path, remote_dest_path])
    print 'Running command: %s' % command
    shell_utils.run(command)
Exemplo n.º 9
0
def upload_directory_contents_if_changed(gs_base,
                                         gs_relative_dir,
                                         gs_acl,
                                         local_dir,
                                         force_upload=False,
                                         upload_chunks=False,
                                         files_to_upload=None):
    """Compares the TIMESTAMP_LAST_UPLOAD_COMPLETED and uploads if different.

  Args:
    gs_base: str - The Google Storage base. Eg: gs://rmistry.
    gs_relative_dir: str - Relative directory to the Google Storage base.
    gs_acl: str - ACL to use when uploading to Google Storage.
    local_dir: str - The local directory to upload.
    force_upload: bool - Whether upload should be done regardless of timestamps
        matching or not.
    upload_chunks: bool - Whether upload should be done in chunks or in a single
        command.
    files_to_upload: str seq - Specific files that should be uploaded, if not
        specified then all files in local_dir are uploaded. If upload_chunks is
        True then files will be uploaded in chunks else they will be uploaded
        one at a time. The Google Storage directory is not cleaned before upload
        if files_to_upload is specified.

  The goal of download_directory_contents_if_changed and
  upload_directory_contents_if_changed is to attempt to replicate directory
  level rsync functionality to the Google Storage directories we care about.

  Returns True if contents were uploaded, else returns False.
  """
    if not force_upload and _are_timestamps_equal(gs_base, gs_relative_dir,
                                                  local_dir):
        print '\n\n=======Local directory is current=======\n\n'
        return False
    else:
        local_src = os.path.join(local_dir, '*')
        gs_dest = posixpath.join(gs_base, gs_relative_dir)
        timestamp_value = time.time()

        if not files_to_upload:
            print '\n\n=======Delete Storage directory before uploading=======\n\n'
            delete_storage_object(gs_dest)

        print '\n\n=======Writing new TIMESTAMP_LAST_UPLOAD_STARTED=======\n\n'
        write_timestamp_file(timestamp_file_name=TIMESTAMP_STARTED_FILENAME,
                             timestamp_value=timestamp_value,
                             gs_base=gs_base,
                             gs_relative_dir=gs_relative_dir,
                             local_dir=local_dir,
                             gs_acl=gs_acl)

        if upload_chunks:
            if files_to_upload:
                local_files = [
                    os.path.join(local_dir, local_file)
                    for local_file in files_to_upload
                ]
            else:
                local_files = [
                    os.path.join(local_dir, local_file)
                    for local_file in os.listdir(local_dir)
                ]
            for files_chunk in _get_chunks(local_files, FILES_CHUNK):
                gsutil = slave_utils.GSUtilSetup()
                command = [gsutil, 'cp'] + files_chunk + [gs_dest]
                try:
                    shell_utils.run(command)
                except shell_utils.CommandFailedException:
                    raise Exception(
                        'Could not upload the chunk to Google Storage! The chunk: %s'
                        % files_chunk)
        else:
            if files_to_upload:
                for file_to_upload in files_to_upload:
                    if slave_utils.GSUtilDownloadFile(src=os.path.join(
                            local_dir, file_to_upload),
                                                      dst=gs_dest) != 0:
                        raise Exception(
                            'Could not upload %s to Google Storage!' %
                            file_to_upload)
            else:
                if slave_utils.GSUtilDownloadFile(src=local_src,
                                                  dst=gs_dest) != 0:
                    raise Exception('Could not upload %s to Google Storage!' %
                                    local_src)

        print '\n\n=======Writing new TIMESTAMP_LAST_UPLOAD_COMPLETED=======\n\n'
        write_timestamp_file(timestamp_file_name=TIMESTAMP_COMPLETED_FILENAME,
                             timestamp_value=timestamp_value,
                             gs_base=gs_base,
                             gs_relative_dir=gs_relative_dir,
                             local_dir=local_dir,
                             gs_acl=gs_acl)
        return True
Exemplo n.º 10
0
sys.path.append(
    os.path.join(BUILDBOT_PATH, 'third_party', 'chromium_buildbot', 'scripts',
                 'common'))
sys.path.append(
    os.path.join(BUILDBOT_PATH, 'third_party', 'chromium_buildbot',
                 'site_config'))
sys.path.append(
    os.path.join(BUILDBOT_PATH, 'third_party', 'chromium_buildbot',
                 'third_party', 'twisted_10_2'))

from py.utils import shell_utils
from slave import slave_utils
import old_gs_utils as gs_utils
import unittest

GSUTIL_LOCATION = slave_utils.GSUtilSetup()

TEST_TIMESTAMP = '1354128965'
TEST_TIMESTAMP_2 = '1354128985'


class TestGSUtils(unittest.TestCase):
    def setUp(self):
        self._expected_commands = []
        self._test_temp_file = None
        self._test_gs_base = None
        self._test_destdir = None
        self._test_gs_acl = None
        self._local_tempdir = tempfile.mkdtemp()

        def _MockCommand(command):
 def AddGsutilToPath(self):
     # Add gsutil to PATH
     gsutil_dir = os.path.dirname(slave_utils.GSUtilSetup())
     if gsutil_dir not in os.environ['PATH'].split(os.pathsep):
         os.environ['PATH'] += os.pathsep + gsutil_dir