def Format(self, page_test_results): self._values.extend(page_test_results.value_set) vulcanizer = tracing_project.TracingProject().CreateVulcanizer() load_sequence = vulcanizer.CalcLoadSequenceForModuleNames( ['tracing.results2_template']) html = generate.GenerateStandaloneHTMLAsString(load_sequence) html = html.replace(self._JSON_TAG % '', self._JSON_TAG % json.dumps( self._values, separators=(',', ':'))) self._output_stream.seek(0) self._output_stream.write(html) self._output_stream.truncate() file_path = os.path.abspath(self._output_stream.name) if self._upload_results: remote_path = ('html-results/results-%s' % datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) try: cloud_storage.Insert( cloud_storage.PUBLIC_BUCKET, remote_path, file_path) print 'View online at', print 'http://storage.googleapis.com/chromium-telemetry/' + remote_path except cloud_storage.PermissionError as e: logging.error('Cannot upload profiling files to cloud storage due ' + 'to permission error: ' + e.message) print 'View result at file://' + file_path
def AddRecordedStories(self, stories, upload_to_cloud_storage=False): if not stories: os.remove(self.temp_target_wpr_file_path) return (target_wpr_file, target_wpr_file_path) = self._NextWprFileName() for story in stories: self._SetWprFileForStory(story.display_name, target_wpr_file) shutil.move(self.temp_target_wpr_file_path, target_wpr_file_path) # Update the hash file. target_wpr_file_hash = cloud_storage.CalculateHash( target_wpr_file_path) with open(target_wpr_file_path + '.sha1', 'wb') as f: f.write(target_wpr_file_hash) f.flush() self._WriteToFile() self._DeleteAbandonedWprFiles() # Upload to cloud storage if upload_to_cloud_storage: if not self._bucket: logging.warning( 'StorySet must have bucket specified to upload ' 'stories to cloud storage.') return try: cloud_storage.Insert(self._bucket, target_wpr_file_hash, target_wpr_file_path) except cloud_storage.CloudStorageError, e: logging.warning( 'Failed to upload wpr file %s to cloud storage. ' 'Error:%s' % target_wpr_file_path, e)
def UploadToCloudStorage(self, bucket, target_path): """Uploads video file to cloud storage. Args: target_path: Path indicating where to store the file in cloud storage. """ cloud_storage.Insert(bucket, target_path, self._video_file_obj.name)
def Format(self, page_test_results): chart_json_dict = chart_json_output_formatter.ResultsAsChartDict( self._metadata, page_test_results.all_page_specific_values, page_test_results.all_summary_values) self._TranslateChartJson(chart_json_dict) self._PrintPerfResult('telemetry_page_measurement_results', 'num_failed', [len(page_test_results.failures)], 'count', 'unimportant') html = self._GetHtmlTemplate() html = html.replace('%json_results%', json.dumps(self.GetCombinedResults())) html = html.replace('%json_units%', self._GetUnitJson()) html = html.replace('%plugins%', self._GetPlugins()) self._SaveResults(html) if self._upload_results: file_path = os.path.abspath(self._output_stream.name) file_name = 'html-results/results-%s' % datetime.datetime.now().strftime( '%Y-%m-%d_%H-%M-%S') try: cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET, file_name, file_path) print print ('View online at ' 'http://storage.googleapis.com/chromium-telemetry/%s' % file_name) except cloud_storage.PermissionError as e: logging.error('Cannot upload profiling files to cloud storage due to ' ' permission error: %s' % e.message) print print 'View result at file://%s' % os.path.abspath( self._output_stream.name)
def _UploadBitmapToCloudStorage(self, bucket, name, bitmap, public=False): # This sequence of steps works on all platforms to write a temporary # PNG to disk, following the pattern in bitmap_unittest.py. The key to # avoiding PermissionErrors seems to be to not actually try to write to # the temporary file object, but to re-open its name for all operations. temp_file = tempfile.NamedTemporaryFile(suffix='.png').name image_util.WritePngFile(bitmap, temp_file) cloud_storage.Insert(bucket, name, temp_file, publicly_readable=public)
def UploadLogsToCloudStorage(self): """ Uploading log files produce by this browser instance to cloud storage. Check supports_uploading_logs before calling this method. """ assert self.supports_uploading_logs cloud_url = cloud_storage.Insert( bucket=self.browser_options.logs_cloud_bucket, remote_path=self.browser_options.logs_cloud_remote_path, local_path=self.log_file_path) sys.stderr.write('Uploading browser log to %s\n' % cloud_url)
def testInsertCreatesValidCloudUrl(self): orig_run_command = cloud_storage._RunCommand try: cloud_storage._RunCommand = self._FakeRunCommand remote_path = 'test-remote-path.html' local_path = 'test-local-path.html' cloud_url = cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET, remote_path, local_path) self.assertEqual('https://console.developers.google.com/m/cloudstorage' '/b/chromium-telemetry/o/test-remote-path.html', cloud_url) finally: cloud_storage._RunCommand = orig_run_command
def _UploadMinidumpToCloudStorage(self, minidump_path): """ Upload minidump_path to cloud storage and return the cloud storage url. """ remote_path = ('minidump-%s-%i.dmp' % (datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'), random.randint(0, 1000000))) try: return cloud_storage.Insert(cloud_storage.TELEMETRY_OUTPUT, remote_path, minidump_path) except cloud_storage.CloudStorageError as err: logging.error('Cloud storage error while trying to upload dump: %s' % repr(err)) return '<Missing link>'
def _UploadScreenShotToCloudStorage(fh): """ Upload the given screenshot image to cloud storage and return the cloud storage url if successful. """ try: return cloud_storage.Insert(cloud_storage.TELEMETRY_OUTPUT, _GenerateRemotePath(fh), fh.GetAbsPath()) except cloud_storage.CloudStorageError as err: logging.error( 'Cloud storage error while trying to upload screenshot: %s' % repr(err)) return '<Missing link>' finally: # Must clean up screenshot file if exists. os.remove(fh.GetAbsPath())
def _UploadSerialLogToCloudStorage(self): """Uploads the BattOr serial log to cloud storage.""" if not self._serial_log_file or not cloud_storage.IsNetworkIOEnabled(): return remote_path = ('battor-serial-log-%s-%d.txt' % ( datetime.datetime.now().strftime('%Y-%m-%d_%H-%M.txt'), random.randint(1, 100000))) try: cloud_url = cloud_storage.Insert( self._serial_log_bucket, remote_path, self._serial_log_file.name) sys.stderr.write('View BattOr serial log at %s\n' % cloud_url) except cloud_storage.PermissionError as e: logging.error('Cannot upload BattOr serial log file to cloud storage due ' 'to permission error: %s' % e.message)
def Upload(self, force=False): """Upload all pending files and then write the updated config to disk. Will attempt to copy files existing in the upload location to a backup location in the same bucket in cloud storage if |force| is True. Args: force: True if files should be uploaded to cloud storage even if a file already exists in the upload location. Raises: CloudStorageUploadConflictError: If |force| is False and the potential upload location of a file already exists. CloudStorageError: If copying an existing file to the backup location or uploading the new file fails. """ if cloud_storage.Exists(self._cs_bucket, self._cs_remote_path): if not force: #pylint: disable=nonstandard-exception raise exceptions.CloudStorageUploadConflictError( self._cs_bucket, self._cs_remote_path) #pylint: enable=nonstandard-exception logging.debug( 'A file already exists at upload path %s in self.cs_bucket' ' %s', self._cs_remote_path, self._cs_bucket) try: cloud_storage.Copy(self._cs_bucket, self._cs_bucket, self._cs_remote_path, self._cs_backup_path) self._backed_up = True except cloud_storage.CloudStorageError: logging.error( 'Failed to copy existing file %s in cloud storage bucket ' '%s to backup location %s', self._cs_remote_path, self._cs_bucket, self._cs_backup_path) raise try: cloud_storage.Insert(self._cs_bucket, self._cs_remote_path, self._local_path) except cloud_storage.CloudStorageError: logging.error( 'Failed to upload %s to %s in cloud_storage bucket %s', self._local_path, self._cs_remote_path, self._cs_bucket) raise self._updated = True
def UploadProfilingFilesToCloud(self, bucket): for page, file_handle_list in self._pages_to_profiling_files.iteritems(): for file_handle in file_handle_list: remote_path = ('profiler-file-id_%s-%s%-d%s' % ( file_handle.id, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'), random.randint(1, 100000), file_handle.extension)) try: cloud_url = cloud_storage.Insert( bucket, remote_path, file_handle.GetAbsPath()) sys.stderr.write( 'View generated profiler files online at %s for page %s\n' % (cloud_url, page.display_name)) self._pages_to_profiling_files_cloud_url[page].append(cloud_url) except cloud_storage.PermissionError as e: logging.error('Cannot upload profiling files to cloud storage due to ' ' permission error: %s' % e.message)
def Format(self, page_test_results): results_renderer.RenderHTMLView(page_test_results.value_set, self._output_stream, self._reset_results) file_path = os.path.abspath(self._output_stream.name) if self._upload_results: remote_path = ( 'html-results/results-%s' % datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) try: cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET, remote_path, file_path) print 'View online at', print 'http://storage.googleapis.com/chromium-telemetry/' + remote_path except cloud_storage.PermissionError as e: logging.error( 'Cannot upload profiling files to cloud storage due ' + 'to permission error: ' + e.message) print 'View result at file://' + file_path
def UploadToCloud(self, bucket): if self._temp_file is None: raise ValueError( 'Tried to upload nonexistent trace to Cloud Storage.') try: if self._serialized_file_handle: fh = self._serialized_file_handle else: fh = self._temp_file remote_path = ( 'trace-file-id_%s-%s-%d%s' % (fh.id, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'), random.randint(1, 100000), fh.extension)) self._cloud_url = cloud_storage.Insert(bucket, remote_path, fh.GetAbsPath()) sys.stderr.write( 'View generated trace files online at %s for page %s\n' % (self._cloud_url, self.page.url if self.page else 'unknown')) return self._cloud_url except cloud_storage.PermissionError as e: logging.error('Cannot upload trace files to cloud storage due to ' ' permission error: %s' % e.message)
def testDisableCloudStorageIo(self, unused_lock_mock): os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1' dir_path = 'real_dir_path' self.fs.CreateDirectory(dir_path) file_path = os.path.join(dir_path, 'file1') file_path_sha = file_path + '.sha1' self.CreateFiles([file_path, file_path_sha]) with open(file_path_sha, 'w') as f: f.write('hash1234') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Get('bucket', 'foo', file_path) with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetIfChanged(file_path, 'foo') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetIfHashChanged('bar', file_path, 'bucket', 'hash1234') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Insert('bucket', 'foo', file_path) with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
def _UpdateExtensionsInCloud(local_extensions_dir, extensions_csv, remote_dir): """Updates set of extensions in Cloud Storage from a CSV of extension ids. From well-formatted CSV file containing some set of extensions (extensions_csv), download them, compress into archive, and update the remote extension archive under REMOTE_DIR in CHROME-PARTNER-TELEMETRY bucket. This script expects 2nd column of CSV file to contain extension ids. Args: local_extensions_dir: directory to download CRX files into. extension_csv: CSV to pull extension_ids from. remote_dir: remote directory to put extension archive in cloud storage. Raises: Exception if a CRX download fails. """ # Download CRX to temp files and compress into archive zip_path = os.path.join(local_extensions_dir, ZIP_NAME) extension_zip = zipfile.ZipFile(zip_path, 'w') update_csv = False extensions_info = [] with open(extensions_csv, 'rb') as csv_file: reader = csv.reader(csv_file) # Stores comments (in case CSV needs to be updated/rewritten) # and skips header line. comments = [] line = ','.join(reader.next()) while line.startswith('#'): comments.append(line) line = ','.join(reader.next()) # Extract info from CSV. for row in reader: extension_info = { 'extension_name': row[0], 'id': row[1], 'hash': row[2], 'version': row[3] } print 'Fetching extension %s...' % extension_info['id'] crx_path = _DownloadCrxFromCws(extension_info['id'], local_extensions_dir) if crx_path is None: raise exceptions.Error( '\tCould not fetch %s.\n\n' 'If this extension dl consistently fails, ' 'remove this entry from %s.' % (extension_info['id'], extensions_csv)) (new_hash, new_version) = _CrxHashIfChanged(crx_path, extension_info) if new_hash is not None: update_csv = True extension_info['hash'] = new_hash extension_info['version'] = new_version extensions_info.append(extension_info) extension_zip.write(crx_path, arcname='%s.crx' % extension_info['id']) extension_zip.close() if update_csv: print 'Updating CSV...' _UpdateCsv(comments, extensions_csv, extensions_info) print 'Uploading extensions to cloud...' remote_zip_path = os.path.join(remote_dir, ZIP_NAME) cloud_storage.Insert(cloud_storage.PARTNER_BUCKET, remote_zip_path, zip_path)