def main(argv): options = _ParseCommandLine(argv) _PostParseCheck(options) # Set cros_build_lib debug level to hide RunCommand spew. if options.verbose: logging.getLogger().setLevel(logging.DEBUG) else: logging.getLogger().setLevel(logging.INFO) with stats.UploadContext() as queue: cmd_stats = stats.Stats.SafeInit(cmd_line=argv, cmd_base='deploy_chrome') if cmd_stats: queue.put([cmd_stats, stats.StatsUploader.URL, 1]) with osutils.TempDir(set_global=True) as tempdir: staging_dir = options.staging_dir if not staging_dir: staging_dir = os.path.join(tempdir, 'chrome') deploy = DeployChrome(options, tempdir, staging_dir) try: deploy.Perform() except failures_lib.StepFailure as ex: raise SystemExit(str(ex).strip()) deploy.Cleanup()
def CopyImageToDevice(self, image, device): """Copies |image| to the removable |device|. Args: image: Path to the image to copy. device: Device to copy to. """ cmd = [ 'dd', 'if=%s' % image, 'of=%s' % device, 'bs=4M', 'iflag=fullblock', 'oflag=sync' ] if logging.getLogger().getEffectiveLevel() <= logging.NOTICE: op = UsbImagerOperation(image) op.Run(cros_build_lib.SudoRunCommand, cmd, debug_level=logging.NOTICE, update_period=0.5) else: cros_build_lib.SudoRunCommand( cmd, debug_level=logging.NOTICE, print_cmd=logging.getLogger().getEffectiveLevel() < logging.NOTICE) cros_build_lib.SudoRunCommand(['sync'], debug_level=self.debug_level)
def CopyPayload(request_data): """Copy a payload file to a target DUT. This constructs a `RemoteDevice`, and calls its `CopyToDevice()` method to copy a payload file to the target device. A payload file is either the `stateful.tgz` tarball, or a Chrome OS AU payload (typically a full payload). The `request_data` argument has the following fields: * hostname: Name of the target DUT where the payload will be copied. * localpath: Path on this system to the payload file. * remotepath: Path on the DUT where the payload will be copied. * kwargs: Keyword arguments dictionanry to be passed to `RemoteDevice.CopyToDevice()`. * kwargs['log_stdout_to_file']: If present, a file to which logger output will be written. The output will typically include the command used for the copy, and standard input/output of that copy command. """ log_handler = None log_file = request_data.kwargs.get('log_stdout_to_file') if log_file: log_handler = logging.FileHandler(log_file) log_handler.setFormatter(LOG_FORMATTER) logging.getLogger().addHandler(log_handler) try: device = remote_access.RemoteDevice(request_data.hostname) device.CopyToDevice(request_data.localpath, request_data.remotepath, mode='scp', **request_data.kwargs) finally: if log_handler: logging.getLogger().removeHandler(log_handler)
def main(_argv): logging.getLogger().setLevel(logging.DEBUG) for i in range(_ITERATIONS): print('Iteration {}'.format(i)) wrapper = dev_server_wrapper.DevServerWrapper() print('Starting') wrapper.Start() print('Stopping') wrapper.Stop()
def _CaptureLogOutput(self, cmd, **kwargs): """Capture logging output of run.""" log = os.path.join(self.tempdir, 'output') fh = logging.FileHandler(log) fh.setLevel(logging.DEBUG) logging.getLogger().addHandler(fh) cros_build_lib.run(cmd, **kwargs) logging.getLogger().removeHandler(fh) output = osutils.ReadFile(log) fh.close() return output
def main(argv): parser = GetParser() options = parser.parse_args(argv) logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) # Check that we have no uncommitted files, and that our checkout's HEAD is # contained in a remote branch. This is to ensure that we don't accidentally # run uncommitted migrations. uncommitted_files = git.RunGit(os.getcwd(), ['status', '-s']).output if uncommitted_files: cros_build_lib.Die('You appear to have uncommitted files. Aborting!') remote_branches = git.RunGit( os.getcwd(), ['branch', '-r', '--contains']).output if not remote_branches: cros_build_lib.Die( 'You appear to be on a local branch of chromite. Aborting!') if options.command == MIGRATE: positive_confirmation = 'please modify my database' warn = ('This option will apply schema changes to your existing database. ' 'You should not run this against the production database unless ' 'your changes are thoroughly tested, and those tests included ' 'in cidb_integration_test.py (including tests that old data is ' 'sanely migrated forward). Database corruption could otherwise ' 'result. Are you sure you want to proceed? If so, type "%s" ' 'now.\n') % positive_confirmation elif options.command == WIPE: positive_confirmation = 'please delete my data' warn = ('This operation will wipe (i.e. DELETE!) the entire contents of ' 'the database pointed at by %s. Are you sure you want to proceed? ' 'If so, type "%s" now.\n') % ( os.path.join(options.cred_dir, 'host.txt'), positive_confirmation) else: cros_build_lib.Die('No command or unsupported command. Exiting.') print(warn) conf_string = cros_build_lib.GetInput('(%s)?: ' % positive_confirmation) if conf_string != positive_confirmation: cros_build_lib.Die('You changed your mind. Aborting.') if options.command == MIGRATE: print('OK, applying migrations...') db = cidb.CIDBConnection(options.cred_dir) db.ApplySchemaMigrations(maxVersion=options.migrate_version) elif options.command == WIPE: print('OK, wiping database...') db = cidb.CIDBConnection(options.cred_dir) db.DropDatabase() print('Done.')
def SetupLogging(logdir): logging.basicConfig( level=logging.DEBUG, format='%(asctime)s:%(name)s:%(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M', filename=os.path.join(logdir, LOGFILE), filemode='w' ) rotate = logging_handlers.RotatingFileHandler( os.path.join(logdir, LOGFILE), maxBytes=LOGFILE_SIZE_BYTES, backupCount=LOGFILE_COUNT) logging.getLogger().addHandler(rotate)
def Run(self, func, *args, **kwargs): """Run func, parse its output, and update the progress bar. Args: func: Function to execute in the background and whose output is to be captured. update_period: Optional argument to specify the period that output should be read. log_level: Logging level to run the func at. By default, it runs at log level info. """ update_period = kwargs.pop('update_period', self._PROGRESS_BAR_UPDATE_INTERVAL) # If we are not running in a terminal device, do not display the progress # bar. if not self._isatty: log_level = kwargs.pop('log_level', logging.INFO) restore_log_level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(log_level) try: func(*args, **kwargs) finally: logging.getLogger().setLevel(restore_log_level) return with osutils.TempDir() as tempdir: self._stdout_path = os.path.join(tempdir, STDOUT_FILE) self._stderr_path = os.path.join(tempdir, STDERR_FILE) osutils.Touch(self._stdout_path) osutils.Touch(self._stderr_path) try: with parallel.BackgroundTaskRunner( self.CaptureOutputInBackground, func, *args, **kwargs) as queue: queue.put([]) self.OpenStdoutStderr() while True: self.ParseOutput() if self.WaitUntilComplete(update_period): break # Before we exit, parse the output again to update progress bar. self.ParseOutput() # Final sanity check to update the progress bar to 100% if it was used # by ParseOutput self.Cleanup() except: # Add a blank line before the logging message so the message isn't # touching the progress bar. sys.stdout.write('\n') logging.error('Oops. Something went wrong.') # Raise the exception so it can be caught again. raise
def main(argv): parser = GetParser() options = parser.parse_args(argv) logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) # Check that we have no uncommitted files, and that our checkout's HEAD is # contained in a remote branch. This is to ensure that we don't accidentally # run uncommitted migrations. uncommitted_files = git.RunGit(os.getcwd(), ['status', '-s']).output if uncommitted_files: cros_build_lib.Die('You appear to have uncommitted files. Aborting!') remote_branches = git.RunGit(os.getcwd(), ['branch', '-r', '--contains']).output if not remote_branches: cros_build_lib.Die( 'You appear to be on a local branch of chromite. Aborting!') if options.command == MIGRATE: positive_confirmation = 'please modify my database' warn = ( 'This option will apply schema changes to your existing database. ' 'You should not run this against the production database unless ' 'your changes are thoroughly tested, and those tests included ' 'in cidb_integration_test.py (including tests that old data is ' 'sanely migrated forward). Database corruption could otherwise ' 'result. Are you sure you want to proceed? If so, type "%s" ' 'now.\n') % positive_confirmation elif options.command == WIPE: positive_confirmation = 'please delete my data' warn = ( 'This operation will wipe (i.e. DELETE!) the entire contents of ' 'the database pointed at by %s. Are you sure you want to proceed? ' 'If so, type "%s" now.\n') % (os.path.join( options.cred_dir, 'host.txt'), positive_confirmation) else: cros_build_lib.Die('No command or unsupported command. Exiting.') print(warn) conf_string = cros_build_lib.GetInput('(%s)?: ' % positive_confirmation) if conf_string != positive_confirmation: cros_build_lib.Die('You changed your mind. Aborting.') if options.command == MIGRATE: print('OK, applying migrations...') db = cidb.CIDBConnection(options.cred_dir) db.ApplySchemaMigrations(maxVersion=options.migrate_version) elif options.command == WIPE: print('OK, wiping database...') db = cidb.CIDBConnection(options.cred_dir) db.DropDatabase() print('Done.')
def SetupLogging(logdir): logging.basicConfig( level=logging.DEBUG, format='%(asctime)s:%(name)s:%(levelname)-8s %(message)s', datefmt='%Y-%m-%d %H:%M', filename=os.path.join(logdir, LOGFILE), filemode='w') rotate = logging_handlers.RotatingFileHandler(os.path.join( logdir, LOGFILE), maxBytes=LOGFILE_SIZE_BYTES, backupCount=LOGFILE_COUNT) logging.getLogger().addHandler(rotate)
def main(argv): logging.getLogger().setLevel(logging.INFO) flags = _ParseArguments(argv) sysroot = None if flags.board: sysroot = cros_build_lib.GetSysroot(flags.board) elif flags.host: sysroot = '/' else: sysroot = flags.sysroot modified = ListModifiedWorkonPackages(sysroot_lib.Sysroot(sysroot)) print(' '.join(sorted(modified)))
def main(argv): parser = _GetParser() opts = parser.parse_args(argv) opts.Freeze() if opts.dry_run: logging.getLogger().setLevel(logging.DEBUG) if not os.path.isfile(opts.source_image): raise UploadGceImageRuntimError('%s is not a valid file.') source_dir, source_image_name = os.path.split(opts.source_image) with osutils.TempDir() as tempdir: logging.info('Generating tarball from %s', opts.source_image) tarball_name = commands.BuildGceTarball(tempdir, source_dir, source_image_name) # We must generate a uuid when uploading the tarball because repeated # uploads are likely to be named similarly. We'll just use tempdir to keep # files separate. temp_tarball_dir = os.path.join(opts.temp_gcs_path, os.path.basename(tempdir)) gs_context = gs.GSContext(init_boto=True, retries=5, acl='private', dry_run=opts.dry_run) gc_context = gcloud.GCContext(opts.project, dry_run=opts.dry_run) try: logging.info('Uploading tarball %s to %s', tarball_name, temp_tarball_dir) gs_context.CopyInto(os.path.join(tempdir, tarball_name), temp_tarball_dir) logging.info('Creating image %s', opts.target_name) gc_context.CreateImage(opts.target_name, source_uri=os.path.join(temp_tarball_dir, tarball_name)) except: logging.error('Oops! Something went wonky.') logging.error('Trying to clean up temporary artifacts...') try: with cros_build_lib.OutputCapturer() as output_capturer: gc_context.ListImages() if opts.target_name in ''.join(output_capturer.GetStdoutLines()): logging.info('Removing image %s', opts.target_name) gc_context.DeleteImage(opts.target_name, quiet=True) except gcloud.GCContextException: # Gobble up this error so external error is visible. logging.error('Failed to clean up image %s', opts.target_name) raise finally: logging.info('Removing GS tempdir %s', temp_tarball_dir) gs_context.Remove(temp_tarball_dir, ignore_missing=True) logging.info('All done!')
def testSetupFileLoggerLogLevels(self): """Test that the logger operates at the right level.""" command.SetupFileLogger('foo.log', log_level=logging.INFO) logging.getLogger().setLevel(logging.DEBUG) logging.debug('debug') logging.info('info') logging.notice('notice') # Test that the logs are correct. logs = open( os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR, 'foo.log'), 'r').read() self.assertNotIn('debug', logs) self.assertIn('info', logs) self.assertIn('notice', logs)
def Kill(self, sig, log_level, first=False): """Kill process with signal, ignoring if the process is dead. Args: sig: Signal to send. log_level: The log level of log messages. first: Whether this is the first signal we've sent. """ self._killing.set() self._WaitForStartup() if logging.getLogger().isEnabledFor(log_level): # Dump debug information about the hanging process. logging.log(log_level, 'Killing %r (sig=%r %s)', self.pid, sig, signals.StrSignal(sig)) if first: ppid = str(self.pid) output = self._DebugRunCommand( ('pgrep', '-P', ppid), debug_level=log_level, print_cmd=False, error_code_ok=True, capture_output=True) for pid in [ppid] + output.splitlines(): self._DumpDebugPid(log_level, pid) try: os.kill(self.pid, sig) except OSError as ex: if ex.errno != errno.ESRCH: raise
def UseProgressBar(): """Determine whether the progress bar is to be used or not. We only want the progress bar to display for the brillo commands which operate at logging level NOTICE. If the user wants to see the noisy output, then they can execute the command at logging level INFO or DEBUG. """ return logging.getLogger().getEffectiveLevel() == logging.NOTICE
def inner(*args, **kwargs): logger = logging.getLogger() current = logger.getEffectiveLevel() try: logger.setLevel(logging.INFO) return functor(*args, **kwargs) finally: logger.setLevel(current)
def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None): """Opens an https connection to a gerrit service, and sends a request.""" path = '/a/' + path.lstrip('/') headers = headers or {} if _InAppengine(): # TODO(phobbs) how can we choose to only run this on GCE / AppEngine? credentials = _GetAppCredentials() try: headers.setdefault( 'Authorization', 'Bearer %s' % credentials.get_access_token().access_token) except gce.HttpAccessTokenRefreshError as e: logging.debug('Failed to retreive gce access token: %s', e) # Not in an Appengine or GCE environment. except httplib2.ServerNotFoundError as e: pass if 'Cookie' not in headers: cookies = GetCookies(host, path) headers['Cookie'] = '; '.join('%s=%s' % (n, v) for n, v in cookies.items()) elif 'Authorization' not in headers: logging.debug('No gitcookies file or Appengine credentials found.') if 'User-Agent' not in headers: # We may not be in a git repository. try: version = git.GetGitRepoRevision( os.path.dirname(os.path.realpath(__file__))) except cros_build_lib.RunCommandError: version = 'unknown' headers['User-Agent'] = ' '.join(( 'chromite.lib.gob_util', os.path.basename(sys.argv[0]), version, )) if body: body = json.JSONEncoder().encode(body) headers.setdefault('Content-Type', 'application/json') if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug('%s https://%s%s', reqtype, host, path) for key, val in headers.items(): if key.lower() in ('authorization', 'cookie'): val = 'HIDDEN' logging.debug('%s: %s', key, val) if body: logging.debug(body) conn = httplib.HTTPSConnection(host) conn.req_host = host conn.req_params = { 'url': path, 'method': reqtype, 'headers': headers, 'body': body, } conn.request(**conn.req_params) return conn
def main(argv): logging.getLogger().setLevel(logging.INFO) flags = _ParseArguments(argv) sysroot = None if flags.brick: try: sysroot = cros_build_lib.GetSysroot(brick_lib.Brick(flags.brick)) except brick_lib.BrickNotFound: cros_build_lib.Die('Could not load brick %s.' % flags.brick) elif flags.board: sysroot = cros_build_lib.GetSysroot(flags.board) elif flags.host: sysroot = '/' else: sysroot = flags.sysroot modified = ListModifiedWorkonPackages(sysroot_lib.Sysroot(sysroot)) print(' '.join(sorted(modified)))
def _ExecuteTask(handler, request_data): """Wrapper for the task handler function.""" root_logger = logging.getLogger() for h in list(root_logger.handlers): root_logger.removeHandler(h) try: return handler(request_data) except Exception as e: return e
def testLogLevel(self): """Test that the log level of the function running is set correctly.""" func_log_level = logging.DEBUG test_log_level = logging.NOTICE expected_output = 'hi' def func(): if logging.getLogger().getEffectiveLevel() == func_log_level: print(expected_output) logging.getLogger().setLevel(test_log_level) op = TestWrapperProgressBarOperation() with self.OutputCapturer(): op.Run(func, update_period=0.05, log_level=func_log_level) # Check that OutputCapturer contains the expected output. This means that # the log level was changed. self.AssertOutputContainsLine(expected_output) # Check that the log level was restored after the function executed. self.assertEqual(logging.getLogger().getEffectiveLevel(), test_log_level)
def CopyImageToDevice(self, image, device): """Copies |image| to the removable |device|. Args: image: Path to the image to copy. device: Device to copy to. """ cmd = ['dd', 'if=%s' % image, 'of=%s' % device, 'bs=4M', 'iflag=fullblock', 'oflag=sync'] if logging.getLogger().getEffectiveLevel() <= logging.NOTICE: op = UsbImagerOperation(image) op.Run(cros_build_lib.SudoRunCommand, cmd, debug_level=logging.NOTICE, update_period=0.5) else: cros_build_lib.SudoRunCommand( cmd, debug_level=logging.NOTICE, print_cmd=logging.getLogger().getEffectiveLevel() < logging.NOTICE) cros_build_lib.SudoRunCommand(['sync'], debug_level=self.debug_level)
def CaptureOutputInBackground(self, func, *args, **kwargs): """Launch func in background and capture its output. Args: func: Function to execute in the background and whose output is to be captured. log_level: Logging level to run the func at. By default, it runs at log level info. """ log_level = kwargs.pop('log_level', logging.INFO) restore_log_level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(log_level) try: with cros_build_lib.OutputCapturer(stdout_path=self._stdout_path, stderr_path=self._stderr_path, quiet_fail=False): func(*args, **kwargs) finally: self._queue.put(_BackgroundTaskComplete()) logging.getLogger().setLevel(restore_log_level)
def CaptureOutputInBackground(self, func, *args, **kwargs): """Launch func in background and capture its output. Args: func: Function to execute in the background and whose output is to be captured. log_level: Logging level to run the func at. By default, it runs at log level info. """ log_level = kwargs.pop('log_level', logging.INFO) restore_log_level = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(log_level) try: with cros_build_lib.OutputCapturer( stdout_path=self._stdout_path, stderr_path=self._stderr_path, quiet_fail=self._workspace_path is not None): func(*args, **kwargs) finally: self._queue.put(_BackgroundTaskComplete()) logging.getLogger().setLevel(restore_log_level)
def main(argv): parser = GetParser() options = parser.parse_args(argv) logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) if options.command == MIGRATE: positive_confirmation = 'please modify my database' warn = ('This option will apply schema changes to your existing database. ' 'You should not run this against the production database unless ' 'your changes are thoroughly tested, and those tests included ' 'in cidb_integration_test.py (including tests that old data is ' 'sanely migrated forward). Database corruption could otherwise ' 'result. Are you sure you want to proceed? If so, type "%s" ' 'now.\n') % positive_confirmation elif options.command == WIPE: positive_confirmation = 'please delete my data' warn = ('This operation will wipe (i.e. DELETE!) the entire contents of ' 'the database pointed at by %s. Are you sure you want to proceed? ' 'If so, type "%s" now.\n') % ( os.path.join(options.cred_dir, 'host.txt'), positive_confirmation) else: print('No command or unsupported command. Exiting.') exit() print(warn) conf_string = cros_build_lib.GetInput('(%s)?: ' % positive_confirmation) if conf_string != positive_confirmation: print('You changed your mind. Aborting.') exit() if options.command == MIGRATE: print('OK, applying migrations...') db = cidb.CIDBConnection(options.cred_dir) db.ApplySchemaMigrations(maxVersion=options.migrate_version) elif options.command == WIPE: print('OK, wiping database...') db = cidb.CIDBConnection(options.cred_dir) db.DropDatabase() print('Done.')
def SetupLogging(args): """Sets up logging based on the parsed arguments.""" # Set up logging. root = logging.getLogger() if args.output_logfile: handler = handlers.RotatingFileHandler(args.output_logfile, maxBytes=10**6, backupCount=5) root.addHandler(handler) else: root.addHandler(logging.StreamHandler(sys.stdout)) root.setLevel(logging.DEBUG)
def main(argv): options = _ParseCommandLine(argv) _PostParseCheck(options) # Set cros_build_lib debug level to hide RunCommand spew. if options.verbose: logging.getLogger().setLevel(logging.DEBUG) else: logging.getLogger().setLevel(logging.INFO) with osutils.TempDir(set_global=True) as tempdir: staging_dir = options.staging_dir if not staging_dir: staging_dir = os.path.join(tempdir, 'chrome') deploy = DeployChrome(options, tempdir, staging_dir) try: deploy.Perform() except failures_lib.StepFailure as ex: raise SystemExit(str(ex).strip()) deploy.Cleanup()
def CopyImageToDevice(self, image, device): """Copies |image| to the removable |device|. Args: image: Path to the image to copy. device: Device to copy to. """ cmd = [ 'dd', 'if=%s' % image, 'of=%s' % device, 'bs=4M', 'iflag=fullblock', 'oflag=direct', 'conv=fdatasync' ] if logging.getLogger().getEffectiveLevel() <= logging.NOTICE: op = UsbImagerOperation(image) op.Run(cros_build_lib.sudo_run, cmd, debug_level=logging.NOTICE, encoding='utf-8', update_period=0.5) else: cros_build_lib.sudo_run( cmd, debug_level=logging.NOTICE, print_cmd=logging.getLogger().getEffectiveLevel() < logging.NOTICE) # dd likely didn't put the backup GPT in the last block. sfdisk fixes this # up for us with a 'write' command, so we have a standards-conforming GPT. # Ignore errors because sfdisk (util-linux < v2.32) isn't always happy to # fix GPT sanity issues. cros_build_lib.sudo_run(['sfdisk', device], input='write\n', check=False, debug_level=self.debug_level) cros_build_lib.sudo_run(['partx', '-u', device], debug_level=self.debug_level) cros_build_lib.sudo_run(['sync', '-d', device], debug_level=self.debug_level)
def __init__(self, port, duration, timeout=1, baudrate=BAUDRATE, cr50_workload=False): """Initialize UartSerial Args: port: UART device path. e.g. /dev/ttyUSB0 duration: Time to test, in seconds timeout: Read timeout value. baudrate: Baud rate such as 9600 or 115200. cr50_workload: True if a workload should be generated on cr50 Attributes: char_loss_occurrences: Number that character loss happens cleanup_cli: Command list to perform before the test exits cr50_workload: True if cr50 should be stressed, or False otherwise dev_prof: Dictionary of device profile duration: Time to keep chargen running eol: Characters to add at the end of input logger: object that store the log num_ch_exp: Expected number of characters in output num_ch_cap: Number of captured characters in output test_cli: Command list to run for chargen test test_thread: Thread object that captures the UART output serial: serial.Serial object """ # Initialize serial object self.serial = serial.Serial() self.serial.port = port self.serial.timeout = timeout self.serial.baudrate = baudrate self.duration = duration self.cr50_workload = cr50_workload self.logger = logging.getLogger(type(self).__name__ + '| ' + port) self.test_thread = threading.Thread(target=self.stress_test_thread) self.dev_prof = {} self.cleanup_cli = [] self.test_cli = [] self.eol = CRLF self.num_ch_exp = 0 self.num_ch_cap = 0 self.char_loss_occurrences = 0 atexit.register(self.cleanup)
def setUp(self): self.default_chromium_dir = os.path.join(self.tempdir, 'chromium') self.default_repo_dir = os.path.join(self.tempdir, 'chromium', 'src') self.default_archive_base = os.path.join(self.tempdir, 'build') self.gclient_path = os.path.join(self.tempdir, 'gclient') self.log_output_args = {'log_output': True} # The SimpleChromeBuilder class sets a 'verbose' setting based on the # ambient logging level, so we must set the logger to the default setting # of INFO for this test and then restore the logger to whatever it was # originally set to when we clean the test up. logger = logging.getLogger() self._prev_logging_level = logger.getEffectiveLevel() logger.setLevel(logging.INFO)
def Cleanup(self, silent=False): """Wait for a process to exit.""" if os.getpid() != self._parent_pid or self._output is None: return try: # Print output from subprocess. if not silent and logging.getLogger().isEnabledFor(logging.DEBUG): with open(self._output.name, 'r') as f: for line in f: logging.debug(line.rstrip('\n')) finally: # Clean up our temporary file. osutils.SafeUnlink(self._output.name) self._output.close() self._output = None
def SetupLogging(self, opts): """Sets up logging based on |opts|.""" value = opts.log_level.upper() logger = logging.getLogger() logger.setLevel(getattr(logging, value)) formatter = ColoredFormatter(fmt=opts.log_format, datefmt=constants.LOGGER_DATE_FMT, enable_color=opts.color) # Only set colored formatter for ChromiteStreamHandler instances, # which could have been added by ScriptWrapperMain() below. chromite_handlers = [x for x in logger.handlers if isinstance(x, ChromiteStreamHandler)] for handler in chromite_handlers: handler.setFormatter(formatter) return value
def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None): """Opens an https connection to a gerrit service, and sends a request.""" headers = headers or {} bare_host = host.partition(':')[0] auth = NETRC.authenticators(bare_host) if auth: headers.setdefault( 'Authorization', 'Basic %s' % (base64.b64encode('%s:%s' % (auth[0], auth[2])))) else: logging.debug('No netrc file found') if 'Cookie' not in headers: cookies = GetCookies(host, '/a/%s' % path) headers['Cookie'] = '; '.join('%s=%s' % (n, v) for n, v in cookies.items()) if 'User-Agent' not in headers: headers['User-Agent'] = ' '.join(( 'chromite.lib.gob_util', os.path.basename(sys.argv[0]), git.GetGitRepoRevision(os.path.dirname( os.path.realpath(__file__))), )) if body: body = json.JSONEncoder().encode(body) headers.setdefault('Content-Type', 'application/json') if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug('%s https://%s/a/%s', reqtype, host, path) for key, val in headers.iteritems(): if key.lower() in ('authorization', 'cookie'): val = 'HIDDEN' logging.debug('%s: %s', key, val) if body: logging.debug(body) conn = httplib.HTTPSConnection(host) conn.req_host = host conn.req_params = { 'url': '/a/%s' % path, 'method': reqtype, 'headers': headers, 'body': body, } conn.request(**conn.req_params) return conn
def testNotice(self): """Test logging.notice works and is between INFO and WARNING.""" logger = logging.getLogger() sh = logging.StreamHandler(sys.stdout) logger.addHandler(sh) msg = 'notice message' logger.setLevel(logging.INFO) with self.OutputCapturer(): logging.notice(msg) self.AssertOutputContainsLine(msg) logger.setLevel(logging.WARNING) with self.OutputCapturer(): logging.notice(msg) self.AssertOutputContainsLine(msg, invert=True)
def SetupFileLogger(filename='brillo.log', log_level=logging.DEBUG): """Store log messages to a file. In case of an error, this file can be made visible to the user. """ workspace_path = workspace_lib.WorkspacePath() if workspace_path is None: return path = os.path.join(workspace_path, workspace_lib.WORKSPACE_LOGS_DIR, filename) osutils.Touch(path, makedirs=True) logger = logging.getLogger() fh = logging.FileHandler(path, mode='w') fh.setLevel(log_level) fh.setFormatter( logging.Formatter(fmt=constants.LOGGER_FMT, datefmt=constants.LOGGER_DATE_FMT)) logger.addHandler(fh)
def __init__(self, options): """Constructor. Args: options: In addition to the flags required by the base class, need to specify: * chromium_dir: Optional. If specified, use the chromium repo the path points to. Otherwise, use base_dir/chromium/src. * build_dir: Optional. Store build result to it if specified. Default: base_dir/build. * archive_build: True to archive build. * reuse_build: True to reuse previous build. """ super(SimpleChromeBuilder, self).__init__(options) self.reuse_build = options.reuse_build self.archive_build = options.archive_build if 'chromium_dir' in options and options.chromium_dir: self.chromium_dir = options.chromium_dir self.repo_dir = os.path.join(self.chromium_dir, 'src') else: self.chromium_dir = os.path.join(self.base_dir, CHROMIUM_DIR) self.repo_dir = os.path.join(self.base_dir, self.DEFAULT_REPO_DIR) if 'build_dir' in options and options.build_dir: self.archive_base = options.build_dir else: self.archive_base = os.path.join(self.base_dir, 'build') if self.archive_build: osutils.SafeMakedirs(self.archive_base) self.gclient = osutils.Which('gclient') if not self.gclient: self.gclient = os.path.join(constants.DEPOT_TOOLS_DIR, 'gclient') self.verbose = logging.getLogger().isEnabledFor(logging.DEBUG) self.chrome_sdk = commands.ChromeSDK(self.repo_dir, self.board, goma=True, debug_log=self.verbose) # log_output=True: Instead of emitting output to STDOUT, redirect output to # logging.info. self.log_output_args = {'log_output': True}
def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None): """Opens an https connection to a gerrit service, and sends a request.""" headers = headers or {} bare_host = host.partition(':')[0] auth = NETRC.authenticators(bare_host) if auth: headers.setdefault('Authorization', 'Basic %s' % ( base64.b64encode('%s:%s' % (auth[0], auth[2])))) else: logging.debug('No netrc file found') if 'Cookie' not in headers: cookies = GetCookies(host, '/a/%s' % path) headers['Cookie'] = '; '.join('%s=%s' % (n, v) for n, v in cookies.items()) if 'User-Agent' not in headers: headers['User-Agent'] = ' '.join(( 'chromite.lib.gob_util', os.path.basename(sys.argv[0]), git.GetGitRepoRevision(os.path.dirname(os.path.realpath(__file__))), )) if body: body = json.JSONEncoder().encode(body) headers.setdefault('Content-Type', 'application/json') if logging.getLogger().isEnabledFor(logging.DEBUG): logging.debug('%s https://%s/a/%s', reqtype, host, path) for key, val in headers.iteritems(): if key.lower() in ('authorization', 'cookie'): val = 'HIDDEN' logging.debug('%s: %s', key, val) if body: logging.debug(body) conn = httplib.HTTPSConnection(host) conn.req_host = host conn.req_params = { 'url': '/a/%s' % path, 'method': reqtype, 'headers': headers, 'body': body, } conn.request(**conn.req_params) return conn
def _UpdateGnArgsIfStale(self, out_dir, build_label, gn_args, board): """Runs 'gn gen' if gn args are stale or logs a warning.""" gn_args_file_path = os.path.join( self.options.chrome_src, out_dir, build_label, 'args.gn') if not self._StaleGnArgs(gn_args, gn_args_file_path): return if not self.options.gn_gen: logging.warning('To update gn args run:') logging.warning('gn gen out_$SDK_BOARD/Release --args="$GN_ARGS"') return logging.warning('Running gn gen') cros_build_lib.RunCommand( ['gn', 'gen', 'out_%s/Release' % board, '--args=%s' % gn_helpers.ToGNString(gn_args)], print_cmd=logging.getLogger().isEnabledFor(logging.DEBUG), cwd=self.options.chrome_src)
def _SetupLogging(options): """Set up default logging for the command. Removes any pre-installed logging handlers, and installs a single `TimedRotatingFileHandler` that will write to the default log file in the log directory specified on the command line. Args: options: Results of parsing the command line; used to obtain the log directory path. """ logger = logging.getLogger() for h in list(logger.handlers): logger.removeHandler(h) logfile = os.path.join(options.logdir, _LOG_FILENAME) handler = handlers.TimedRotatingFileHandler(logfile, when='W4', backupCount=13) handler.setFormatter(copy_handler.LOG_FORMATTER) logger.setLevel(logging.DEBUG) logger.addHandler(handler)
def __init__(self, ports, duration, cr50_workload=False): """Initialize UART stress tester Args: ports: List of UART ports to test. duration: Time to keep testing in seconds. cr50_workload: True if a workload should be generated on cr50 Raises: ChargenTestError: if any of ports is not a valid character device. """ # Save the arguments self.ports = ports for port in ports: try: mode = os.stat(port).st_mode except OSError as e: raise ChargenTestError(e) if not stat.S_ISCHR(mode): raise ChargenTestError('%s is not a character device.' % port) if duration <= 0: raise ChargenTestError('Input error: duration is not positive.') self.duration = duration self.cr50_workload = cr50_workload # Initialize logging object self.logger = logging.getLogger(type(self).__name__) # Create an UartSerial object per UART port self.serials = {} # UartSerial objects for port in self.ports: self.serials[port] = UartSerial(port=port, duration=self.duration, cr50_workload=self.cr50_workload)
from __future__ import absolute_import from __future__ import print_function import collections import json import subprocess from chromite.lib import cros_logging as logging from chromite.lib import metrics from infra_libs import ts_mon _METRIC_ROOT_PATH = 'prod_hosts/' _ATEST_PROGRAM = '/usr/local/autotest/cli/atest' logger = logging.getLogger(__name__) def collect_prod_hosts(): servers = list(_get_servers()) sink = _TsMonSink(_METRIC_ROOT_PATH) sink.write_servers(servers) def _get_servers(): """Get server information from atest. Returns: Iterable of Server instances. """ output = subprocess.check_output([_ATEST_PROGRAM, 'server', 'list', '--json'])
def func(): if logging.getLogger().getEffectiveLevel() == func_log_level: print(expected_output)
def SetupBuild(options): """Set up parameters needed for the build. This checks the current environment and options and sets up various things needed for the build, including 'base' which holds the base flags for passing to the U-Boot Makefile. Args: options: Command line options Returns: Base flags to use for U-Boot, as a list. """ # pylint: disable=W0603 global arch, board, compiler, family, outdir, smdk, uboard, vendor, verbose if not verbose: verbose = options.verbose != 0 logging.getLogger().setLevel(options.verbose) Log('Building for %s' % options.board) # Separate out board_variant string: "peach_pit" becomes "peach", "pit". # But don't mess up upstream boards which use _ in their name. parts = options.board.split('_') if parts[0] in ['daisy', 'peach']: board = parts[0] else: board = options.board # To allow this to be run from 'cros_sdk' if in_chroot: os.chdir(os.path.join(src_root, 'third_party', 'u-boot', 'files')) base_board = board if options.verified: base_board = 'chromeos_%s' % base_board uboard = UBOARDS.get(base_board, base_board) Log('U-Boot board is %s' % uboard) # Pull out some information from the U-Boot boards config file family = None with open('boards.cfg') as f: for line in f: if uboard in line: if line[0] == '#': continue fields = line.split() if not fields: continue arch = fields[1] fields += [None, None, None] smdk = fields[3] vendor = fields[4] family = fields[5] break if not arch: cros_build_lib.Die("Selected board '%s' not found in boards.cfg." % board) vboot = os.path.join('build', board, 'usr') if arch == 'x86': family = 'em100' if in_chroot: compiler = 'i686-pc-linux-gnu-' else: compiler = '/opt/i686/bin/i686-unknown-elf-' elif arch == 'arm': if in_chroot: # Use the Chrome OS toolchain compiler = 'armv7a-cros-linux-gnueabi-' else: compiler = glob.glob('/opt/linaro/gcc-linaro-arm-linux-*/bin/*gcc') if not compiler: cros_build_lib.Die("""Please install an ARM toolchain for your machine. 'Install a Linaro toolchain from:' 'https://launchpad.net/linaro-toolchain-binaries' 'or see cros/commands/cros_chrome_sdk.py.""") compiler = compiler[0] compiler = re.sub('gcc$', '', compiler) elif arch == 'sandbox': compiler = '' else: cros_build_lib.Die("Selected arch '%s' not supported." % arch) if not options.build: options.incremental = True cpus = multiprocessing.cpu_count() outdir = os.path.join(OUT_DIR, uboard) base = [ 'make', '-j%d' % cpus, 'O=%s' % outdir, 'ARCH=%s' % arch, 'CROSS_COMPILE=%s' % compiler, '--no-print-directory', 'HOSTSTRIP=true', 'DEV_TREE_SRC=%s-%s' % (family, options.dt), 'QEMU_ARCH='] if options.verbose < 2: base.append('-s') if options.ro and options.rw: cros_build_lib.Die('Cannot specify both --ro and --rw options') if options.ro: base.append('CROS_RO=1') options.small = True if options.rw: base.append('CROS_RW=1') options.small = True if options.small: base.append('CROS_SMALL=1') else: base.append('CROS_FULL=1') if options.verified: base += [ 'VBOOT=%s' % vboot, 'MAKEFLAGS_VBOOT=DEBUG=1', 'QUIET=1', 'CFLAGS_EXTRA_VBOOT=-DUNROLL_LOOPS', 'VBOOT_SOURCE=%s/platform/vboot_reference' % src_root] base.append('VBOOT_DEBUG=1') # Handle the Chrome OS USE_STDINT workaround. Vboot needs <stdint.h> due # to a recent change, the need for which I didn't fully understand. But # U-Boot doesn't normally use this. We have added an option to U-Boot to # enable use of <stdint.h> and without it vboot will fail to build. So we # need to enable it where ww can. We can't just enable it always since # that would prevent this script from building other non-Chrome OS boards # with a different (older) toolchain, or Chrome OS boards without vboot. # So use USE_STDINT if the toolchain supports it, and not if not. This # file was originally part of glibc but has recently migrated to the # compiler so it is reasonable to use it with a stand-alone program like # U-Boot. At this point the comment has got long enough that we may as # well include some poetry which seems to be sorely lacking the code base, # so this is from Ogden Nash: # To keep your marriage brimming # With love in the loving cup, # Whenever you're wrong, admit it; # Whenever you're right, shut up. cmd = [CompilerTool('gcc'), '-ffreestanding', '-x', 'c', '-c', '-'] result = cros_build_lib.RunCommand(cmd, input='#include <stdint.h>', capture_output=True, **kwargs) if result.returncode == 0: base.append('USE_STDINT=1') if options.trace: base.append('FTRACE=1') if options.separate: base.append('DEV_TREE_SEPARATE=1') if options.incremental: # Get the correct board for cros_write_firmware config_mk = '%s/include/config.mk' % outdir if not os.path.exists(config_mk): logging.warning('No build found for %s - dropping -i' % board) options.incremental = False config_mk = 'include/config.mk' if os.path.exists(config_mk): logging.warning("Warning: '%s' exists, try 'make distclean'" % config_mk) # For when U-Boot supports ccache # See http://patchwork.ozlabs.org/patch/245079/ if use_ccache: os.environ['CCACHE'] = 'ccache' return base
def ScriptWrapperMain(find_target_func, argv=None, log_level=logging.DEBUG, log_format=constants.LOGGER_FMT): """Function usable for chromite.script.* style wrapping. Note that this function invokes sys.exit on the way out by default. Args: find_target_func: a function, which, when given the absolute pathway the script was invoked via (for example, /home/ferringb/cros/trunk/chromite/bin/cros_sdk; note that any trailing .py from the path name will be removed), will return the main function to invoke (that functor will take a single arg- a list of arguments, and shall return either None or an integer, to indicate the exit code). argv: sys.argv, or an equivalent tuple for testing. If nothing is given, sys.argv is defaulted to. log_level: Default logging level to start at. log_format: Default logging format to use. """ if argv is None: argv = sys.argv[:] target = os.path.abspath(argv[0]) name = os.path.basename(target) if target.endswith('.py'): target = os.path.splitext(target)[0] target = find_target_func(target) if target is None: print('Internal error detected- no main functor found in module %r.' % (name,), file=sys.stderr) sys.exit(100) # Set up basic logging information for all modules that use logging. # Note a script target may setup default logging in its module namespace # which will take precedence over this. logger = logging.getLogger() logger.setLevel(log_level) logger_handler = ChromiteStreamHandler() logger_handler.setFormatter( logging.Formatter(fmt=log_format, datefmt=constants.LOGGER_DATE_FMT)) logger.addHandler(logger_handler) signal.signal(signal.SIGTERM, _DefaultHandler) ret = 1 try: ret = target(argv[1:]) except _ShutDownException as e: sys.stdout.flush() print('%s: Signaled to shutdown: caught %i signal.' % (name, e.signal), file=sys.stderr) sys.stderr.flush() except SystemExit as e: # Right now, let this crash through- longer term, we'll update the scripts # in question to not use sys.exit, and make this into a flagged error. raise except ChrootRequiredError as e: ret = _RestartInChroot(e.cmd, e.chroot_args) except ExecRequiredError as e: logging.shutdown() # This does not return. os.execv(e.cmd[0], e.cmd) except Exception as e: sys.stdout.flush() print('%s: Unhandled exception:' % (name,), file=sys.stderr) sys.stderr.flush() raise finally: logging.shutdown() if ret is None: ret = 0 sys.exit(ret)
def main(argv): parser = _GetParser() options, args = parser.parse_args(argv) if len(args) < 2: parser.error('Not enough arguments specified') changes = args[0:-1] try: patches = gerrit.GetGerritPatchInfo(changes) except ValueError as e: logging.error('Invalid patch: %s', e) cros_build_lib.Die('Did you swap the branch/gerrit number?') branch = args[-1] # Suppress all logging info output unless we're running debug. if not options.debug: logging.getLogger().setLevel(logging.NOTICE) # Get a pointer to your repo checkout to look up the local project paths for # both email addresses and for using your checkout as a git mirror. manifest = None if options.mirror: try: manifest = git.ManifestCheckout.Cached(constants.SOURCE_ROOT) except OSError as e: if e.errno == errno.ENOENT: logging.error('Unable to locate ChromiumOS checkout: %s', constants.SOURCE_ROOT) logging.error('Did you mean to use --nomirror?') return 1 raise if not _ManifestContainsAllPatches(manifest, patches): return 1 else: if not options.email: chromium_email = '*****@*****.**' % os.environ['USER'] logging.notice('--nomirror set without email, using %s', chromium_email) options.email = chromium_email index = 0 work_dir = None root_work_dir = tempfile.mkdtemp(prefix='cros_merge_to_branch') try: for index, (change, patch) in enumerate(zip(changes, patches)): # We only clone the project and set the committer the first time. work_dir = os.path.join(root_work_dir, patch.project) if not os.path.isdir(work_dir): branch = _SetupWorkDirectoryForPatch(work_dir, patch, branch, manifest, options.email) # Now that we have the project checked out, let's apply our change and # create a new change on Gerrit. logging.notice('Uploading change %s to branch %s', change, branch) urls = _UploadChangeToBranch(work_dir, patch, branch, options.draft, options.dryrun) logging.notice('Successfully uploaded %s to %s', change, branch) for url in urls: if url.endswith('\x1b[K'): # Git will often times emit these escape sequences. url = url[0:-3] logging.notice(' URL: %s', url) except (cros_build_lib.RunCommandError, cros_patch.ApplyPatchException, git.AmbiguousBranchName, OSError) as e: # Tell the user how far we got. good_changes = changes[:index] bad_changes = changes[index:] logging.warning('############## SOME CHANGES FAILED TO UPLOAD ############') if good_changes: logging.notice( 'Successfully uploaded change(s) %s', ' '.join(good_changes)) # Printing out the error here so that we can see exactly what failed. This # is especially useful to debug without using --debug. logging.error('Upload failed with %s', str(e).strip()) if not options.wipe: logging.error('Not wiping the directory. You can inspect the failed ' 'change at %s; After fixing the change (if trivial) you can' ' try to upload the change by running:\n' 'git commit -a -c CHERRY_PICK_HEAD\n' 'git push %s HEAD:refs/for/%s', work_dir, patch.project_url, branch) else: logging.error('--nowipe not set thus deleting the work directory. If you ' 'wish to debug this, re-run the script with change(s) ' '%s and --nowipe by running:\n %s %s %s --nowipe', ' '.join(bad_changes), sys.argv[0], ' '.join(bad_changes), branch) # Suppress the stack trace if we're not debugging. if options.debug: raise else: return 1 finally: if options.wipe: shutil.rmtree(root_work_dir) if options.dryrun: logging.notice('Success! To actually upload changes, re-run without ' '--dry-run.') else: logging.notice('Successfully uploaded all changes requested.') return 0
# Autotest uses this library and can not guarantee existence of the statsd # module. try: import statsd except ImportError: from chromite.lib.graphite_lib import statsd_mock as statsd # This is _type for all metadata logged to elasticsearch from here. STATS_ES_TYPE = 'stats_metadata' # statsd logs details about what its sending at the DEBUG level, which I really # don't want to see tons of stats in logs, so all of these are silenced by # setting the logging level for all of statsdto WARNING. logging.getLogger('statsd').setLevel(logging.WARNING) def _prepend_init(_es, _conn, _prefix): def wrapper(original): """Decorator to override __init__.""" class _Derived(original): """Derived stats class.""" # pylint: disable=super-on-old-class def __init__(self, name, connection=None, bare=False, metadata=None): name = self._add_prefix(name, _prefix, bare) conn = connection if connection else _conn super(_Derived, self).__init__(name, conn) self.metadata = metadata
def setUp(self): self.logger = logging.getLogger() sh = logging.StreamHandler(sys.stdout) self.logger.addHandler(sh)
"""Store and manage Mob* Monitor checkfiles.""" from __future__ import print_function import cherrypy import collections import imp import inspect import os import time from cherrypy.process import plugins from chromite.lib import cros_logging as logging LOGGER = logging.getLogger(__name__) HCEXECUTION_IN_PROGRESS = 0 HCEXECUTION_COMPLETED = 1 HCSTATUS_HEALTHY = 0 IN_PROGRESS_DESCRIPTION = 'Health check is currently executing.' NULL_DESCRIPTION = '' EMPTY_ACTIONS = [] HEALTHCHECK_STATUS = collections.namedtuple('healthcheck_status', ['name', 'health', 'description', 'actions']) HEALTH_CHECK_METHODS = ['Check', 'Diagnose']
from chromite.lib import cros_build_lib from chromite.lib import cros_logging as logging from chromite.lib import osutils from chromite.lib import parallel arch = None board = None compiler = None default_board = None family = None in_chroot = True logging.basicConfig(format='%(message)s') kwargs = {'print_cmd': False, 'error_code_ok': True, 'debug_level': logging.getLogger().getEffectiveLevel()} outdir = '' # If you have multiple boards connected on different servo ports, put lines # like 'SERVO_PORT{"peach_pit"} = 7777' in your ~/.crosfwrc SERVO_PORT = {} smdk = None src_root = os.path.join(constants.SOURCE_ROOT, 'src') in_chroot = cros_build_lib.IsInsideChroot() uboard = '' default_board = 'peach_pit' use_ccache = False