def UploadArtifactsToCloud(self, bucket): """Uploads all artifacts of the test to cloud storage. Local artifact paths are changed to their respective cloud URLs. """ for name, local_path in self.IterArtifacts(): abs_artifact_path = os.path.abspath( os.path.join(self._output_dir, local_path)) remote_path = str(uuid.uuid1()) cloud_url = cloud_storage.Insert(bucket, remote_path, abs_artifact_path) self._artifacts[name] = cloud_url logging.warning('Uploading %s of page %s to %s\n' % (name, self._story.name, cloud_url))
def _UploadScreenShotToCloudStorage(fh): """ Upload the given screenshot image to cloud storage and return the cloud storage url if successful. """ try: return cloud_storage.Insert(cloud_storage.TELEMETRY_OUTPUT, _GenerateRemotePath(fh), fh.GetAbsPath()) except cloud_storage.CloudStorageError as err: logging.error( 'Cloud storage error while trying to upload screenshot: %s' % repr(err)) return '<Missing link>' finally: # Must clean up screenshot file if exists. os.remove(fh.GetAbsPath())
def _UploadMinidumpToCloudStorage(self, minidump_path): """ Upload minidump_path to cloud storage and return the cloud storage url. """ remote_path = ('minidump-%s-%i.dmp' % (datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'), random.randint(0, 1000000))) try: return cloud_storage.Insert(cloud_storage.TELEMETRY_OUTPUT, remote_path, minidump_path) except cloud_storage.CloudStorageError as err: logging.error( 'Cloud storage error while trying to upload dump: %s', repr(err)) return '<Missing link>'
def Format(self, page_test_results): histograms = page_test_results.AsHistogramDicts() vulcanize_histograms_viewer.VulcanizeAndRenderHistogramsViewer( histograms, self._output_stream, self._reset_results) if self._upload_bucket: file_path = os.path.abspath(self._output_stream.name) remote_path = ('html-results/results-%s' % datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) try: url = cloud_storage.Insert(self._upload_bucket, remote_path, file_path) print 'View HTML results online at %s' % url except cloud_storage.PermissionError as e: logging.error('Cannot upload profiling files to cloud storage due to ' ' permission error: %s' % e.message)
def UploadArtifactsToCloud(results): """Upload all artifacts of the test to cloud storage. Sets 'url' attribute of each artifact to its cloud URL. """ for run in results.IterStoryRuns(): for artifact in run.IterArtifacts(): if artifact.url is None: remote_name = str(uuid.uuid1()) cloud_url = cloud_storage.Insert(results.upload_bucket, remote_name, artifact.local_path) logging.info('Uploading %s of page %s to %s\n' % (artifact.name, run.story.name, cloud_url)) artifact.SetUrl(cloud_url)
def _UploadSerialLogToCloudStorage(self): """Uploads the BattOr serial log to cloud storage.""" if not self._serial_log_file or not cloud_storage.IsNetworkIOEnabled(): return remote_path = ('battor-serial-log-%s-%d.txt' % ( datetime.datetime.now().strftime('%Y-%m-%d_%H-%M.txt'), random.randint(1, 100000))) try: cloud_url = cloud_storage.Insert( self._serial_log_bucket, remote_path, self._serial_log_file.name) sys.stderr.write('View BattOr serial log at %s\n' % cloud_url) except cloud_storage.PermissionError as e: logging.error('Cannot upload BattOr serial log file to cloud storage due ' 'to permission error: %s' % e.message)
def Format(self, page_test_results): results_renderer.RenderHTMLView(page_test_results.value_set, self._output_stream, self._reset_results) file_path = os.path.abspath(self._output_stream.name) if self._upload_results: remote_path = ('html-results/results-%s' % datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) try: cloud_storage.Insert( cloud_storage.PUBLIC_BUCKET, remote_path, file_path) print 'View online at', print 'http://storage.googleapis.com/chromium-telemetry/' + remote_path except cloud_storage.PermissionError as e: logging.error('Cannot upload profiling files to cloud storage due ' + 'to permission error: ' + e.message) print 'View result at file://' + file_path
def UploadArtifactsToCloud(self): bucket = self.telemetry_info.upload_bucket for test_name, artifacts in self._artifact_results.IterTestAndArtifacts(): for artifact_type in artifacts: total_num_artifacts = len(artifacts[artifact_type]) for i, artifact_path in enumerate(artifacts[artifact_type]): artifact_path = artifacts[artifact_type][i] abs_artifact_path = os.path.abspath(os.path.join( self._artifact_results.artifact_dir, '..', artifact_path)) remote_path = str(uuid.uuid1()) cloud_url = cloud_storage.Insert( bucket, remote_path, abs_artifact_path) sys.stderr.write( 'Uploading %s of page %s to %s (%d out of %d)\n' % (artifact_type, test_name, cloud_url, i + 1, total_num_artifacts))
def UploadToCloud(self): if self._temp_file is None: raise ValueError('Tried to upload nonexistent trace to Cloud Storage.') try: if self._serialized_file_handle: fh = self._serialized_file_handle else: fh = self._temp_file cloud_storage.Insert( self._upload_bucket, self._remote_path, fh.GetAbsPath()) sys.stderr.write( 'View generated trace files online at %s for story %s\n' % (self._cloud_url, self.page.name if self.page else 'unknown')) return self._cloud_url except cloud_storage.PermissionError as e: logging.error('Cannot upload trace files to cloud storage due to ' ' permission error: %s' % e.message)
def Format(self, page_test_results): if page_test_results.value_set: html2_formatter = html2_output_formatter.Html2OutputFormatter( self._output_stream, self._reset_results, self._upload_results) html2_formatter.Format(page_test_results) return chart_json_dict = chart_json_output_formatter.ResultsAsChartDict( self._metadata, page_test_results.all_page_specific_values, page_test_results.all_summary_values) self._TranslateChartJson(chart_json_dict) self._PrintPerfResult('telemetry_page_measurement_results', 'num_failed', [len(page_test_results.failures)], 'count', 'unimportant') self._combined_results = self._ReadExistingResults(self._output_stream) self._combined_results.append(self._result) html = self._GetHtmlTemplate() html = html.replace('%json_results%', json.dumps(self.GetCombinedResults())) html = html.replace('%json_units%', self._GetUnitJson()) html = html.replace('%plugins%', self._GetPlugins()) self._SaveResults(html) if self._upload_results: file_path = os.path.abspath(self._output_stream.name) file_name = 'html-results/results-%s' % datetime.datetime.now( ).strftime('%Y-%m-%d_%H-%M-%S') try: cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET, file_name, file_path) print print( 'View online at ' 'http://storage.googleapis.com/chromium-telemetry/%s' % file_name) except cloud_storage.PermissionError as e: logging.error( 'Cannot upload profiling files to cloud storage due to ' ' permission error: %s' % e.message) print print 'View result at file://%s' % os.path.abspath( self._output_stream.name)
def Upload(self, force=False): """Upload all pending files and then write the updated config to disk. Will attempt to copy files existing in the upload location to a backup location in the same bucket in cloud storage if |force| is True. Args: force: True if files should be uploaded to cloud storage even if a file already exists in the upload location. Raises: CloudStorageUploadConflictError: If |force| is False and the potential upload location of a file already exists. CloudStorageError: If copying an existing file to the backup location or uploading the new file fails. """ if cloud_storage.Exists(self._cs_bucket, self._cs_remote_path): if not force: #pylint: disable=nonstandard-exception raise exceptions.CloudStorageUploadConflictError( self._cs_bucket, self._cs_remote_path) #pylint: enable=nonstandard-exception logging.debug( 'A file already exists at upload path %s in self.cs_bucket' ' %s', self._cs_remote_path, self._cs_bucket) try: cloud_storage.Copy(self._cs_bucket, self._cs_bucket, self._cs_remote_path, self._cs_backup_path) self._backed_up = True except cloud_storage.CloudStorageError: logging.error( 'Failed to copy existing file %s in cloud storage bucket ' '%s to backup location %s', self._cs_remote_path, self._cs_bucket, self._cs_backup_path) raise try: cloud_storage.Insert(self._cs_bucket, self._cs_remote_path, self._local_path) except cloud_storage.CloudStorageError: logging.error( 'Failed to upload %s to %s in cloud_storage bucket %s', self._local_path, self._cs_remote_path, self._cs_bucket) raise self._updated = True
def _SerializeAndUploadHtmlTrace(run, label, bucket): html_trace = run.GetArtifact(HTML_TRACE_NAME) if html_trace is None: trace_files = [art.local_path for art in run.IterArtifacts('trace')] with run.CaptureArtifact(HTML_TRACE_NAME) as html_path: trace_data.SerializeAsHtml(trace_files, html_path) html_trace = run.GetArtifact(HTML_TRACE_NAME) if bucket is not None and html_trace.url is None: remote_name = _TraceCanonicalName(run, label) cloud_url = cloud_storage.Insert(bucket, remote_name, html_trace.local_path) sys.stderr.write( 'View generated trace files online at %s for story %s\n' % (cloud_url, run.story.name)) html_trace.SetUrl(cloud_url) return html_trace
def UploadAndSwitchDataFile(data_file_name, data_file_path, version): """Upload the script to the cloud and update config to use the new version.""" filename = os.path.basename(data_file_path) remote_path = posixpath.join(DATA_CS_FOLDER, data_file_name, version, filename) if not cloud_storage.Exists(DATA_BUCKET, remote_path): cloud_storage.Insert(DATA_BUCKET, remote_path, data_file_path, publicly_readable=False) with open(CONFIG_PATH) as f: config = json.load(f) config[data_file_name]['remote_path'] = remote_path config[data_file_name]['hash'] = cloud_storage.CalculateHash( data_file_path) with open(CONFIG_PATH, 'w') as f: json.dump(config, f, indent=4, separators=(',', ': '))
def UploadHostBinary(binary_name, binary_path, version): """Upload the binary to the cloud. This function uploads the host binary (e.g. trace_processor_shell) to the cloud and updates the 'latest' file for the host platform to point to the newly uploaded file. Note that it doesn't modify the config and so doesn't affect which binaries will be downloaded by FetchHostBinary. """ filename = os.path.basename(binary_path) platform = _GetHostPlatform() remote_path = posixpath.join(BINARY_CS_FOLDER, binary_name, platform, version, filename) if not cloud_storage.Exists(BINARY_BUCKET, remote_path): cloud_storage.Insert(BINARY_BUCKET, remote_path, binary_path, publicly_readable=True) _SetLatestPathForBinary(binary_name, platform, remote_path)
def UploadProfilingFilesToCloud(self, bucket): for page, file_handle_list in self._pages_to_profiling_files.iteritems(): for file_handle in file_handle_list: remote_path = ('profiler-file-id_%s-%s%-d%s' % ( file_handle.id, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'), random.randint(1, 100000), file_handle.extension)) try: cloud_url = cloud_storage.Insert( bucket, remote_path, file_handle.GetAbsPath()) sys.stderr.write( 'View generated profiler files online at %s for page %s\n' % (cloud_url, page.name)) self._pages_to_profiling_files_cloud_url[page].append(cloud_url) except cloud_storage.PermissionError as e: logging.error('Cannot upload profiling files to cloud storage due to ' ' permission error: %s', e.message)
def AddRecordedStories(self, stories, upload_to_cloud_storage=False, target_platform=_DEFAULT_PLATFORM): if not stories: os.remove(self.temp_target_wpr_file_path) return target_wpr_file_hash = cloud_storage.CalculateHash( self.temp_target_wpr_file_path) (target_wpr_file, target_wpr_file_path) = self._NextWprFileName(target_wpr_file_hash) for story in stories: # Check to see if the platform has been manually overrided. if not story.platform_specific: current_target_platform = _DEFAULT_PLATFORM else: current_target_platform = target_platform self._SetWprFileForStory(story.name, target_wpr_file, current_target_platform) shutil.move(self.temp_target_wpr_file_path, target_wpr_file_path) # Update the hash file. with open(target_wpr_file_path + '.sha1', 'wb') as f: f.write(target_wpr_file_hash) f.flush() self._WriteToFile() # Upload to cloud storage if upload_to_cloud_storage: if not self._bucket: logging.warning( 'StorySet must have bucket specified to upload ' 'stories to cloud storage.') return try: cloud_storage.Insert(self._bucket, target_wpr_file_hash, target_wpr_file_path) except cloud_storage.CloudStorageError as e: logging.warning( 'Failed to upload wpr file %s to cloud storage. ' 'Error:%s' % target_wpr_file_path, e)
def Format(self, page_test_results): histograms = page_test_results.histograms if not histograms: histograms = self._ConvertChartJson(page_test_results) if isinstance(histograms, histogram_set.HistogramSet): histograms = histograms.AsDicts() results_renderer.RenderHTMLView( histograms, self._output_stream, self._reset_results) file_path = os.path.abspath(self._output_stream.name) if self._upload_bucket: remote_path = ('html-results/results-%s' % datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) try: url = cloud_storage.Insert(self._upload_bucket, remote_path, file_path) print 'View HTML results online at %s' % url except cloud_storage.PermissionError as e: logging.error('Cannot upload profiling files to cloud storage due to ' ' permission error: %s' % e.message)
def UploadArtifacts(test_result, upload_bucket, run_identifier): """Upload all artifacts to cloud. For a test run, uploads all its artifacts to cloud and sets remoteUrl fields in intermediate_results. """ artifacts = test_result.get('outputArtifacts', {}) for name, artifact in artifacts.iteritems(): if 'remoteUrl' in artifact: continue # TODO(crbug.com/981349): Think of a more general way to # specify which artifacts deserve uploading. if name in [DIAGNOSTICS_NAME, MEASUREMENTS_NAME]: continue remote_name = '/'.join([run_identifier, test_result['testPath'], name]) urlsafe_remote_name = re.sub(r'[^A-Za-z0-9/.-]+', '_', remote_name) artifact['remoteUrl'] = cloud_storage.Insert(upload_bucket, urlsafe_remote_name, artifact['filePath']) logging.info('Uploaded %s of %s to %s', name, test_result['testPath'], artifact['remoteUrl'])
def UploadArtifacts(test_result, upload_bucket, run_identifier): """Upload all artifacts to cloud. For a test run, uploads all its artifacts to cloud and sets remoteUrl fields in intermediate_results. """ artifacts = test_result.get('outputArtifacts', {}) for name, artifact in artifacts.iteritems(): if 'remoteUrl' in artifact: continue # TODO(crbug.com/981349): Remove check for HISTOGRAM_DICTS_FILE # after Telemetry does not save histograms as an artifact anymore. # Another TODO(crbug.com/981349): Think of a more general way to # specify which artifacts deserve uploading. if name in [compute_metrics.HISTOGRAM_DICTS_FILE, MEASUREMENTS_NAME]: continue remote_name = '/'.join([run_identifier, test_result['testPath'], name]) artifact['remoteUrl'] = cloud_storage.Insert(upload_bucket, remote_name, artifact['filePath']) logging.info('Uploaded %s of %s to %s', name, test_result['testPath'], artifact['remoteUrl'])
def UploadToCloud(self, bucket): if self._temp_file is None: raise ValueError( 'Tried to upload nonexistent trace to Cloud Storage.') try: if self._serialized_file_handle: fh = self._serialized_file_handle else: fh = self._temp_file remote_path = ( 'trace-file-id_%s-%s-%d%s' % (fh.id, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'), random.randint(1, 100000), fh.extension)) self._cloud_url = cloud_storage.Insert(bucket, remote_path, fh.GetAbsPath()) sys.stderr.write( 'View generated trace files online at %s for page %s\n' % (self._cloud_url, self.page.url if self.page else 'unknown')) return self._cloud_url except cloud_storage.PermissionError as e: logging.error('Cannot upload trace files to cloud storage due to ' ' permission error: %s' % e.message)
def testDisableCloudStorageIo(self, unused_lock_mock): os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1' dir_path = 'real_dir_path' self.fs.CreateDirectory(dir_path) file_path = os.path.join(dir_path, 'file1') file_path_sha = file_path + '.sha1' self.CreateFiles([file_path, file_path_sha]) with open(file_path_sha, 'w') as f: f.write('hash1234') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Get('bucket', 'foo', file_path) with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetIfChanged(file_path, 'foo') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetIfHashChanged('bar', file_path, 'bucket', 'hash1234') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Insert('bucket', 'foo', file_path) with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
def Format(self, page_test_results): histograms = page_test_results.value_set if not histograms: histograms = self.ConvertChartJson_(page_test_results) results_renderer.RenderHTMLView(histograms, self._output_stream, self._reset_results) file_path = os.path.abspath(self._output_stream.name) if self._upload_results and self._upload_bucket: remote_path = ( 'html-results/results-%s' % datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) try: cloud_storage.Insert(self._upload_bucket, remote_path, file_path) print 'View online at', print 'http://storage.googleapis.com/{bucket}/{path}'.format( bucket=self._upload_bucket, path=remote_path) except cloud_storage.PermissionError as e: logging.error( 'Cannot upload profiling files to cloud storage due ' + 'to permission error: ' + e.message) print 'View result at file://' + file_path
def upload_build(zip_file, context): """Uploads a single build in zip_file to the repackage_gs_url in context.""" cloud_storage.Insert(context.repackage_gs_bucket, context.repackage_remote_path, zip_file)
def _UpdateExtensionsInCloud(local_extensions_dir, extensions_csv, remote_dir): """Updates set of extensions in Cloud Storage from a CSV of extension ids. From well-formatted CSV file containing some set of extensions (extensions_csv), download them, compress into archive, and update the remote extension archive under REMOTE_DIR in CHROME-PARTNER-TELEMETRY bucket. This script expects 2nd column of CSV file to contain extension ids. Args: local_extensions_dir: directory to download CRX files into. extension_csv: CSV to pull extension_ids from. remote_dir: remote directory to put extension archive in cloud storage. Raises: Exception if a CRX download fails. """ # Download CRX to temp files and compress into archive zip_path = os.path.join(local_extensions_dir, ZIP_NAME) extension_zip = zipfile.ZipFile(zip_path, 'w') update_csv = False extensions_info = [] with open(extensions_csv, 'rb') as csv_file: reader = csv.reader(csv_file) # Stores comments (in case CSV needs to be updated/rewritten) # and skips header line. comments = [] line = ','.join(reader.next()) while line.startswith('#'): comments.append(line) line = ','.join(reader.next()) # Extract info from CSV. for row in reader: extension_info = { 'extension_name': row[0], 'id': row[1], 'hash': row[2], 'version': row[3] } print 'Fetching extension %s...' % extension_info['id'] crx_path = _DownloadCrxFromCws(extension_info['id'], local_extensions_dir) if crx_path is None: raise exceptions.Error( '\tCould not fetch %s.\n\n' 'If this extension dl consistently fails, ' 'remove this entry from %s.' % (extension_info['id'], extensions_csv)) (new_hash, new_version) = _CrxHashIfChanged(crx_path, extension_info) if new_hash is not None: update_csv = True extension_info['hash'] = new_hash extension_info['version'] = new_version extensions_info.append(extension_info) extension_zip.write(crx_path, arcname='%s.crx' % extension_info['id']) extension_zip.close() if update_csv: print 'Updating CSV...' _UpdateCsv(comments, extensions_csv, extensions_info) print 'Uploading extensions to cloud...' remote_zip_path = os.path.join(remote_dir, ZIP_NAME) cloud_storage.Insert(cloud_storage.PARTNER_BUCKET, remote_zip_path, zip_path)
def PoolUploader(work_item): artifact, remote_name = work_item artifact['remoteUrl'] = cloud_storage.Insert( upload_bucket, remote_name, artifact['filePath'])