def UploadToCloudStorage(self, bucket, target_path): """Uploads video file to cloud storage. Args: target_path: Path indicating where to store the file in cloud storage. """ cloud_storage.Insert(bucket, target_path, self._video_file_path)
def _UploadBitmapToCloudStorage(self, bucket, name, bitmap, public=False): # This sequence of steps works on all platforms to write a temporary # PNG to disk, following the pattern in bitmap_unittest.py. The key to # avoiding PermissionErrors seems to be to not actually try to write to # the temporary file object, but to re-open its name for all operations. temp_file = tempfile.NamedTemporaryFile().name bitmap.WritePngFile(temp_file) cloud_storage.Insert(bucket, name, temp_file, publicly_readable=public)
def _SyncFilesToCloud(input_api, output_api): """Searches for .sha1 files and uploads them to Cloud Storage. It validates all the hashes and skips upload if not necessary. """ cloud_storage = LoadSupport(input_api) # Look in both buckets, in case the user uploaded the file manually. But this # script focuses on WPR archives, so it only uploads to the internal bucket. hashes_in_cloud_storage = cloud_storage.List(cloud_storage.INTERNAL_BUCKET) hashes_in_cloud_storage += cloud_storage.List(cloud_storage.PUBLIC_BUCKET) results = [] for affected_file in input_api.AffectedFiles(include_deletes=False): hash_path = affected_file.AbsoluteLocalPath() file_path, extension = os.path.splitext(hash_path) if extension != '.sha1': continue with open(hash_path, 'rb') as f: file_hash = f.read(1024).rstrip() if file_hash in hashes_in_cloud_storage: results.append( output_api.PresubmitNotifyResult( 'File already in Cloud Storage, skipping upload: %s' % hash_path)) continue if not re.match('^([A-Za-z0-9]{40})$', file_hash): results.append( output_api.PresubmitError( 'Hash file does not contain a valid SHA-1 hash: %s' % hash_path)) continue if not os.path.exists(file_path): results.append( output_api.PresubmitError( 'Hash file exists, but file not found: %s' % hash_path)) continue if cloud_storage.GetHash(file_path) != file_hash: results.append( output_api.PresubmitError( 'Hash file does not match file\'s actual hash: %s' % hash_path)) continue try: cloud_storage.Insert(cloud_storage.INTERNAL_BUCKET, file_hash, file_path) results.append( output_api.PresubmitNotifyResult( 'Uploaded file to Cloud Storage: %s' % hash_path)) except cloud_storage.CloudStorageError, e: results.append( output_api.PresubmitError( 'Unable to upload to Cloud Storage: %s\n\n%s' % (hash_path, e)))
def _SyncFilesToCloud(input_api, output_api): """Searches for .sha1 files and uploads them to Cloud Storage. It validates all the hashes and skips upload if not necessary. """ # Because this script will be called from a magic PRESUBMIT demon, # avoid angering it; don't pollute its sys.path. old_sys_path = sys.path try: sys.path = [os.path.join(os.pardir, os.pardir, 'telemetry')] + sys.path from telemetry.page import cloud_storage finally: sys.path = old_sys_path hashes_in_cloud_storage = cloud_storage.List(cloud_storage.DEFAULT_BUCKET) results = [] for hash_path in input_api.AbsoluteLocalPaths(): file_path, extension = os.path.splitext(hash_path) if extension != '.sha1': continue with open(hash_path, 'rb') as f: file_hash = f.read(1024).rstrip() if file_hash in hashes_in_cloud_storage: results.append( output_api.PresubmitNotifyResult( 'File already in Cloud Storage, skipping upload: %s' % hash_path)) continue if not re.match('^([A-Za-z0-9]{40})$', file_hash): results.append( output_api.PresubmitError( 'Hash file does not contain a valid SHA-1 hash: %s' % hash_path)) continue if not os.path.exists(file_path): results.append( output_api.PresubmitError( 'Hash file exists, but file not found: %s' % hash_path)) continue if cloud_storage.GetHash(file_path) != file_hash: results.append( output_api.PresubmitError( 'Hash file does not match file\'s actual hash: %s' % hash_path)) continue try: cloud_storage.Insert(cloud_storage.DEFAULT_BUCKET, file_hash, file_path) except cloud_storage.CloudStorageError: results.append( output_api.PresubmitError( 'Unable to upload to Cloud Storage: %s' % hash_path)) return results
def _SyncFilesToCloud(input_api, output_api): """Searches for .sha1 files and uploads them to Cloud Storage. It validates all the hashes and skips upload if not necessary. """ cloud_storage = LoadSupport(input_api) results = [] for hash_path, file_hash in _GetFilesNotInCloud(input_api): file_path, _ = os.path.splitext(hash_path) if not re.match('^([A-Za-z0-9]{40})$', file_hash): results.append( output_api.PresubmitError( 'Hash file does not contain a valid SHA-1 hash: %s' % hash_path)) continue if not os.path.exists(file_path): results.append( output_api.PresubmitError( 'Hash file exists, but file not found: %s' % hash_path)) continue if cloud_storage.CalculateHash(file_path) != file_hash: results.append( output_api.PresubmitError( 'Hash file does not match file\'s actual hash: %s' % hash_path)) continue try: bucket_input = raw_input( 'Uploading to Cloud Storage: %s\n' 'Is this file [P]ublic or Google-[i]nternal?' % file_path).lower() if 'public'.startswith(bucket_input): bucket = cloud_storage.PUBLIC_BUCKET elif ('internal'.startswith(bucket_input) or 'google-internal'.startswith(bucket_input)): bucket = cloud_storage.INTERNAL_BUCKET else: results.append( output_api.PresubmitError( 'Response was neither "public" nor "internal": %s' % bucket_input)) return results cloud_storage.Insert(bucket, file_hash, file_path) results.append( output_api.PresubmitNotifyResult( 'Uploaded file to Cloud Storage: %s' % file_path)) except cloud_storage.CloudStorageError, e: results.append( output_api.PresubmitError( 'Unable to upload to Cloud Storage: %s\n\n%s' % (file_path, e)))
def PrintSummary(self): super(HtmlPageMeasurementResults, self).PrintSummary() json_results = self._GetResultsJson() json_results.append(self._result_json) html = self._GetHtmlTemplate() html = html.replace('%json_results%', json.dumps(json_results)) html = html.replace('%json_units%', self._GetUnitJson()) html = html.replace('%plugins%', self._GetPlugins()) self._SaveResults(html) if self._upload_results: file_path = os.path.abspath(self._output_stream.name) file_name = 'html-results/results-%s' % datetime.datetime.now().strftime( '%Y-%m-%d_%H-%M-%S') cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET, file_name, file_path) print print ('View online at ' 'http://storage.googleapis.com/chromium-telemetry/%s' % file_name) print print 'View result at file://%s' % os.path.abspath(self._output_stream.name)