def _AddRecord(self, record, delimit=False): """Prints the current record as CSV. Printer attributes: noheading: bool, Disable the initial key name heading record. Args: record: A list of JSON-serializable object columns. delimit: bool, Print resource delimiters -- ignored. Raises: ToolException: A data value has a type error. """ # The CSV heading has three states: # 1: No heading, used by ValuePrinter and CSV when 2. and 3. are empty. # 2: Heading via AddHeading(). # 3: Default heading from format labels, if specified. if not self._heading_printed: self._heading_printed = True if 'no-heading' not in self.attributes: if self._heading: labels = self._heading else: labels = self.column_attributes.Labels() if labels: labels = [x.lower() for x in labels] if labels: self._out.write( self._separator.join( [self._QuoteField(label) for label in labels]) + self._terminator) line = [] for col in record: if col is None: val = '' elif isinstance(col, dict): val = self._delimiter.join([ self._QuoteField(u'{0}={1}'.format( console_attr.DecodeFromInput(k), console_attr.DecodeFromInput(v))) for k, v in sorted(col.iteritems()) ]) elif isinstance(col, list): val = self._delimiter.join([ self._QuoteField(console_attr.DecodeFromInput(x)) for x in col ]) elif isinstance(col, float): val = self._QuoteField(resource_transform.TransformFloat(col)) else: val = self._QuoteField(console_attr.DecodeFromInput(col)) line.append(val) self._out.write(self._separator.join(line) + self._terminator)
def __exit__(self, prev_exc_type, prev_exc_val, prev_exc_trace): try: self.Close() except: # pylint: disable=bare-except if not prev_exc_type: raise message = (u'Got exception {0}' u'while another exception was active {1} [{2}]' .format(console_attr.DecodeFromInput(traceback.format_exc()), prev_exc_type, console_attr.DecodeFromInput(prev_exc_val))) raise prev_exc_type, message, prev_exc_trace # always return False so any exceptions will be re-raised return False
def GetHttpErrorMessage(error): """Returns a human readable string representation from the http response. Args: error: HttpException representing the error response. Returns: A human readable string representation of the error. """ status = error.response.status code = error.response.reason message = '' try: data = json.loads(error.content) if 'error' in data: error_info = data['error'] if 'message' in error_info: message = error_info['message'] violations = _GetViolationsFromError(error_info) if violations: message += '\nProblems:\n' + violations except (ValueError, TypeError): message = error.content return u'ResponseError: status=[{0}], code=[{1}], message=[{2}]'.format( status, code, console_attr.DecodeFromInput(message))
def ReportMetrics(self, wait_for_report=False): """Reports the collected metrics using a separate async process.""" if not self._metrics: return temp_metrics_file = tempfile.NamedTemporaryFile(delete=False) with temp_metrics_file: pickle.dump(self._metrics, temp_metrics_file) self._metrics = [] # TODO(user): make this not depend on the file. this_file = console_attr.DecodeFromInput(__file__) reporting_script_path = os.path.realpath( os.path.join(os.path.dirname(this_file), 'metrics_reporter.py')) execution_args = execution_utils.ArgsForPythonTool( reporting_script_path, temp_metrics_file.name) exec_env = os.environ.copy() exec_env['PYTHONPATH'] = os.pathsep.join(sys.path) try: p = subprocess.Popen(execution_args, env=exec_env, **self._async_popen_args) log.debug('Metrics reporting process started...') except OSError: # This can happen specifically if the Python executable moves between the # start of this process and now. log.debug('Metrics reporting process failed to start.') if wait_for_report: # NOTE: p.wait() can cause a deadlock. p.communicate() is recommended. # See python docs for more information. p.communicate() log.debug('Metrics reporting process finished.')
def DisplayProposedDeployment(app, project, app_config, version, promote): """Prints the details of the proposed deployment. Args: app: Application resource for the current application (required if any services are deployed, otherwise ignored). project: The name of the current project. app_config: yaml_parsing.AppConfigSet, The configurations being deployed. version: The version identifier of the application to be deployed. promote: Whether the newly deployed version will receive all traffic (this affects deployed URLs). Returns: dict (str->str), a mapping of service names to deployed service URLs This includes information on to-be-deployed services (including service name, version number, and deployed URLs) as well as configurations. """ deployed_urls = {} if app_config.Services(): if app is None: raise TypeError( 'If services are deployed, must provide `app` parameter.') deploy_messages = [] for service, info in app_config.Services().iteritems(): use_ssl = deploy_command_util.UseSsl(info.parsed.handlers) deploy_message = DEPLOY_MESSAGE_TEMPLATE.format( project=project, service=service, version=version, file=console_attr.DecodeFromInput(info.file)) url = deploy_command_util.GetAppHostname( app=app, service=info.module, version=None if promote else version, use_ssl=use_ssl) deployed_urls[service] = url deploy_message += DEPLOYED_URL_TEMPLATE.format(url=url) if not promote: default_url = deploy_command_util.GetAppHostname( app=app, service=info.module, use_ssl=use_ssl) deploy_message += PROMOTE_MESSAGE_TEMPLATE.format( default_url=default_url) deploy_messages.append(deploy_message) fmt = 'list[title="You are about to deploy the following services:"]' resource_printer.Print(deploy_messages, fmt, out=log.status) if app_config.Configs(): fmt = 'list[title="You are about to deploy the following configurations:"]' resource_printer.Print([ u'{0}/{1} (from [{2}])'.format(project, c.config, c.file) for c in app_config.Configs().values() ], fmt, out=log.status) return deployed_urls
def sdk_root(self): """Searches for the Cloud SDK root directory. Returns: str, The path to the root of the Cloud SDK or None if it could not be found. """ return file_utils.FindDirectoryContaining( os.path.dirname(console_attr.DecodeFromInput(__file__)), Paths.CLOUDSDK_STATE_DIR)
def GetOperationError(error): """Returns a human readable string representation from the operation. Args: error: A string representing the raw json of the operation error. Returns: A human readable string representation of the error. """ return u'OperationError: code={0}, message={1}'.format( error.code, console_attr.DecodeFromInput(error.message))
def _ExtractResponseAndJsonContent(self, http_error): """Extracts the response and JSON content from the HttpError.""" response = getattr(http_error, 'response', None) if response: self.status_code = int(response.get('status', 0)) self.status_description = console_attr.DecodeFromInput( response.get('reason', '')) content = console_attr.DecodeFromInput(http_error.content) try: self.content = _JsonSortedDict(json.loads(content)) self.error_info = _JsonSortedDict(self.content['error']) if not self.status_code: # Could have been set above. self.status_code = int(self.error_info.get('code', 0)) if not self.status_description: # Could have been set above. self.status_description = self.error_info.get('status', '') self.status_message = self.error_info.get('message', '') except (KeyError, TypeError, ValueError): self.status_message = content except AttributeError: pass
def _Stringify(value): # pylint: disable=invalid-name """Represents value as a JSON string if it's not a string.""" if value is None: return '' elif isinstance(value, console_attr.Colorizer): return value elif isinstance(value, basestring): return console_attr.DecodeFromInput(value) elif isinstance(value, float): return resource_transform.TransformFloat(value) elif hasattr(value, '__str__'): return unicode(value) else: return json.dumps(value, sort_keys=True)
def Print(self, *msg): """Writes the given message to the output stream, and adds a newline. This method has the same output behavior as the build in print method but respects the configured verbosity. Args: *msg: str, The messages to print. """ from googlecloudsdk.core.console import console_attr # pylint: disable=g-import-not-at-top, avoid import loop msg = (x if isinstance(x, unicode) else console_attr.DecodeFromInput(x) for x in msg) message = u' '.join(msg) self.write(message + u'\n')
def CompilePythonFiles(self): """Attempts to compile all the python files into .pyc files. This does not raise exceptions if compiling a given file fails. """ # The self.sdk_root pathname could contain unicode chars and py_compile # chokes on unicode paths. Using relative paths from self.sdk_root works # around the problem. with file_utils.ChDir(self.sdk_root): to_compile = [ os.path.join('bin', 'bootstrapping'), 'lib', 'platform', ] for d in to_compile: d = console_attr.DecodeFromInput(d) compileall.compile_dir(d, quiet=True)
def _ProcessRecord(self, record): """Applies process_record_orig to dict, list and default repr records. Args: record: A JSON-serializable object. Returns: The processed record. """ if isinstance(record, (dict, list)) or _HasDefaultRepr(record): record = self._process_record_orig(record) if isinstance(record, dict): return [u'{0}: {1}'.format(k, v) for k, v in sorted(record.iteritems()) if v is not None] if isinstance(record, list): return [i for i in record if i is not None] return [console_attr.DecodeFromInput(record or '')]
def __init__(self, sdk_root): """Initializes the installation state for the given sdk install. Args: sdk_root: str, The file path of the root of the SDK installation. Raises: ValueError: If the given SDK root does not exist. """ if not os.path.isdir(sdk_root): raise ValueError( 'The given Cloud SDK root does not exist: [{0}]'.format( sdk_root)) self.__sdk_root = console_attr.DecodeFromInput(sdk_root) self._state_directory = os.path.join(sdk_root, InstallationState.STATE_DIR_NAME) self.__backup_directory = os.path.join( self._state_directory, InstallationState.BACKUP_DIR_NAME) self.__trash_directory = os.path.join(self._state_directory, InstallationState.TRASH_DIR_NAME) self.__sdk_staging_root = (os.path.normpath(self.__sdk_root) + InstallationState.STAGING_ROOT_SUFFIX)
def FormatDateTime(dt, fmt=None, tzinfo=None): """Returns a string of a datetime object formatted by an extended strftime(). fmt handles these modifier extensions to the standard formatting chars: %Nf Limit the fractional seconds to N digits. The default is N=6. %Ez Format +/-HHMM offsets as ISO RFC 3339 Z for +0000 otherwise +/-HH:MM. %Oz Format +/-HHMM offsets as ISO RFC 3339 +/-HH:MM. NOTE: The standard Python 2 strftime() borks non-ascii time parts. It does so by encoding non-ascii names to bytes, presumably under the assumption that the return value will be immediately output. This code works around that by decoding strftime() values to unicode if necessary and then returning either an ASCII or UNICODE string. Args: dt: The datetime object to be formatted. fmt: The strftime(3) format string, None for the RFC 3339 format in the dt timezone ('%Y-%m-%dT%H:%M:%S.%3f%Ez'). tzinfo: Format dt relative to this timezone. Raises: DateTimeValueError: A DateTime numeric constant exceeded its range. Returns: A string of a datetime object formatted by an extended strftime(). """ if tzinfo: dt = LocalizeDateTime(dt, tzinfo) if not fmt: fmt = '%Y-%m-%dT%H:%M:%S.%3f%Ez' extension = re.compile('%[1-9]?[EO]?[fz]') m = extension.search(fmt) if not m: return console_attr.DecodeFromInput(_StrFtime(dt, fmt)) # Split the format into standard and extension parts. parts = [] start = 0 while m: match = start + m.start() if start < match: # Format the preceeding standard part. parts.append( console_attr.DecodeFromInput(_StrFtime(dt, fmt[start:match]))) # Format the standard variant of the exetended spec. The extensions only # have one modifier char. match += 1 if fmt[match].isdigit(): n = int(fmt[match]) match += 1 else: n = None if fmt[match] in ('E', 'O'): alternate = fmt[match] match += 1 else: alternate = None spec = fmt[match] std_fmt = '%' + spec val = _StrFtime(dt, std_fmt) if spec == 'f': # Round the fractional part to n digits. if n and n < len(val): round_format = '{{0:0{n}.0f}}'.format(n=n) rounded = round_format.format(float(val) / 10**(len(val) - n)) if len(rounded) == n: val = rounded else: val = val[:n] elif spec == 'z': # Convert the time zone offset to RFC 3339 format. if alternate: if alternate == 'E' and val == '+0000': val = 'Z' elif len(val) == 5: val = val[:3] + ':' + val[3:] if val: parts.append(console_attr.DecodeFromInput(val)) start += m.end() m = extension.search(fmt[start:]) # Format the trailing part if any. if start < len(fmt): parts.append(console_attr.DecodeFromInput(_StrFtime(dt, fmt[start:]))) # Combine the parts. return ''.join(parts)