def FetchFromCloudStorage(bucket_name, source_path, destination_dir): """Fetches file(s) from the Google Cloud Storage. As a side-effect, this prints messages to stdout about what's happening. Args: bucket_name: Google Storage bucket name. source_path: Source file path. destination_dir: Destination file path. Returns: Local file path of downloaded file if it was downloaded. If the file does not exist in the given bucket, or if there was an error while downloading, None is returned. """ target_file = os.path.join(destination_dir, os.path.basename(source_path)) gs_url = 'gs://%s/%s' % (bucket_name, source_path) try: if cloud_storage.Exists(bucket_name, source_path): logging.info('Fetching file from %s...', gs_url) cloud_storage.Get(bucket_name, source_path, target_file) if os.path.exists(target_file): return target_file else: logging.info('File %s not found in cloud storage.', gs_url) return None except Exception as e: logging.warn('Exception while fetching from cloud storage: %s', e) if os.path.exists(target_file): os.remove(target_file) return None
def _CheckWprShaFiles(input_api, output_api): """Check whether the wpr sha files have matching URLs.""" old_sys_path = sys.path try: perf_dir = input_api.PresubmitLocalPath() catapult_path = os.path.abspath(os.path.join( perf_dir, '..', '..', 'third_party', 'catapult', 'catapult_base')) sys.path.insert(1, catapult_path) from catapult_base import cloud_storage # pylint: disable=import-error finally: sys.path = old_sys_path results = [] for affected_file in input_api.AffectedFiles(include_deletes=False): filename = affected_file.AbsoluteLocalPath() if not filename.endswith('wpr.sha1'): continue expected_hash = cloud_storage.ReadHash(filename) is_wpr_file_uploaded = any( cloud_storage.Exists(bucket, expected_hash) for bucket in cloud_storage.BUCKET_ALIASES.itervalues()) if not is_wpr_file_uploaded: wpr_filename = filename[:-5] results.append(output_api.PresubmitError( 'The file matching %s is not in Cloud Storage yet.\n' 'You can upload your new WPR archive file with the command:\n' 'depot_tools/upload_to_google_storage.py --bucket ' '<Your pageset\'s bucket> %s.\nFor more info: see ' 'http://www.chromium.org/developers/telemetry/' 'record_a_page_set#TOC-Upload-the-recording-to-Cloud-Storage' % (filename, wpr_filename))) return results
def testExistsReturnsFalse(self, subprocess_mock): p_mock = mock.Mock() subprocess_mock.Popen.return_value = p_mock p_mock.communicate.return_value = ( '', 'CommandException: One or more URLs matched no objects.\n') p_mock.returncode_result = 1 self.assertFalse( cloud_storage.Exists('fake bucket', 'fake remote path'))
def BuildIsAvailable(bucket_name, remote_path): """Checks whether a build is currently archived at some place.""" logging.info('Checking existence: gs://%s/%s' % (bucket_name, remote_path)) try: exists = cloud_storage.Exists(bucket_name, remote_path) logging.info('Exists? %s' % exists) return exists except cloud_storage.CloudStorageError: return False
def testExistsReturnsFalse(self): stubs = system_stub.Override(cloud_storage, ['subprocess']) try: stubs.subprocess.Popen.communicate_result = ( '', 'CommandException: One or more URLs matched no objects.\n') stubs.subprocess.Popen.returncode_result = 1 self.assertFalse( cloud_storage.Exists('fake bucket', 'fake remote path')) finally: stubs.Restore()
def _ConditionallyUploadToCloudStorage(self, img_name, page, tab, screenshot): """Uploads the screenshot to cloud storage as the reference image for this test, unless it already exists. Returns True if the upload was actually performed.""" if not self.options.refimg_cloud_storage_bucket: raise Exception( '--refimg-cloud-storage-bucket argument is required') cloud_name = self._FormatReferenceImageName(img_name, page, tab) if not cloud_storage.Exists(self.options.refimg_cloud_storage_bucket, cloud_name): self._UploadBitmapToCloudStorage( self.options.refimg_cloud_storage_bucket, cloud_name, screenshot) return True return False
def Upload(self, force=False): """Upload all pending files and then write the updated config to disk. Will attempt to copy files existing in the upload location to a backup location in the same bucket in cloud storage if |force| is True. Args: force: True if files should be uploaded to cloud storage even if a file already exists in the upload location. Raises: CloudStorageUploadConflictError: If |force| is False and the potential upload location of a file already exists. CloudStorageError: If copying an existing file to the backup location or uploading the new file fails. """ if cloud_storage.Exists(self._cs_bucket, self._cs_remote_path): if not force: #pylint: disable=nonstandard-exception raise exceptions.CloudStorageUploadConflictError( self._cs_bucket, self._cs_remote_path) #pylint: enable=nonstandard-exception logging.debug( 'A file already exists at upload path %s in self.cs_bucket' ' %s', self._cs_remote_path, self._cs_bucket) try: cloud_storage.Copy(self._cs_bucket, self._cs_bucket, self._cs_remote_path, self._cs_backup_path) self._backed_up = True except cloud_storage.CloudStorageError: logging.error( 'Failed to copy existing file %s in cloud storage bucket ' '%s to backup location %s', self._cs_remote_path, self._cs_bucket, self._cs_backup_path) raise try: cloud_storage.Insert(self._cs_bucket, self._cs_remote_path, self._local_path) except cloud_storage.CloudStorageError: logging.error( 'Failed to upload %s to %s in cloud_storage bucket %s', self._local_path, self._cs_remote_path, self._cs_bucket) raise self._updated = True
def _CheckWprShaFiles(input_api, output_api): """Check whether the wpr sha files have matching URLs.""" from catapult_base import cloud_storage results = [] for affected_file in input_api.AffectedFiles(include_deletes=False): filename = affected_file.AbsoluteLocalPath() if not filename.endswith('wpr.sha1'): continue expected_hash = cloud_storage.ReadHash(filename) is_wpr_file_uploaded = any( cloud_storage.Exists(bucket, expected_hash) for bucket in cloud_storage.BUCKET_ALIASES.itervalues()) if not is_wpr_file_uploaded: wpr_filename = filename[:-5] results.append(output_api.PresubmitError( 'There is no URLs matched for wpr sha file %s.\n' 'You can upload your new wpr archive file with the command:\n' 'depot_tools/upload_to_google_storage.py --bucket ' '<Your pageset\'s bucket> %s.\nFor more info: see ' 'http://www.chromium.org/developers/telemetry/' 'record_a_page_set#TOC-Upload-the-recording-to-Cloud-Storage' % (filename, wpr_filename))) return results
def DependencyExistsInCloudStorage(self): return cloud_storage.Exists(self._cs_bucket, self._cs_remote_path)