def Generate(discovery_doc, package_writer, include_timestamp=False, version_package=False, package_path=None, output_type='plain', language='java', language_variant='default', callback=None): """Generate a library package from discovery and options.""" options = { # Include other files needed to compile (e.g. base jar files) 'include_dependencies': False, # Include the timestamp in the generated library 'include_timestamp': include_timestamp, # Put API version in the package 'version_package': version_package, # Custom package name 'package_path': package_path, } if FLAGS.monolithic_source_name: options['useSingleSourceFile'] = True if output_type == 'full': options['include_dependencies'] = True if FLAGS.reparent_methods_using_id: discovery_doc['reparentMethodsUsingId'] = True # determine language version from language variant. language_variations = Targets().VariationsForLanguage(language) if not language_variations: raise app.UsageError('Language %s missing from ' 'apiserving/libgen/gen/targets.json' % language) features = language_variations.GetFeatures(language_variant) if not features: raise app.UsageError( 'Unsupported language variant: ' '%s/%s/features.json is missing' % language, language_variant) try: generator_class = generator_lookup.GetGeneratorByLanguage( features.get('generator', language)) except ValueError: raise app.UsageError('Unsupported language: %s' % language) generator = generator_class(discovery_doc, options=options) if FLAGS.monolithic_source_name: generator.api.SetTemplateValue('monolithicSourceName', FLAGS.monolithic_source_name) generator.SetTemplateDir(features.template_dir) generator.SetFeatures(features) generator.GeneratePackage(package_writer) package_writer.DoneWritingArchive() if callback: callback(discovery_doc=discovery_doc, package_writer=package_writer, include_timestamp=include_timestamp, version_package=version_package, package_path=package_path, output_type=output_type, language=language, language_variant=language_variant)
def RunCommand(self, argv): if not FLAGS['continue'].value and len(argv) < 2: raise app.UsageError( 'You must specify at least one instance and a destination zone.') if FLAGS['continue'].value and argv: raise app.UsageError( 'You cannot specify instances or a destination zone when continuing ' 'a failed move.') credentials = gce_util.get_credentials() api = gce.get_api( credentials, version=FLAGS.service_version, default_project=FLAGS.project) if not FLAGS['continue'].value: name_regexes = argv[:-1] dest_zone = argv[-1] else: name_regexes = None dest_zone = None args = collections.namedtuple( 'Namespace', ['name_regexes', 'dest_zone', 'replay_log_file'])( name_regexes, dest_zone, FLAGS['continue'].value) InstanceMigrator(api)(args) return True
def _ValidateFlags(self): """Raises a UsageError if there is any problem with the flags.""" if not self._flags.source_zone: raise app.UsageError( 'You must specify a source zone through the --source_zone flag.') if not self._flags.destination_zone: raise app.UsageError('You must specify a destination zone ' 'through the --destination_zone flag.') if self._flags.source_zone == self._flags.destination_zone: raise app.UsageError('The destination and source zones cannot be equal.')
def _CheckSchemaFile(self, schema): if ':' in schema: raise app.UsageError( '\nMust specify an extended schema JSON file as opposed to ' 'text schema.\n ebq requires every command to provide the ' 'extended schema file.') if ',' in schema: raise app.UsageError( '\nMust specify a local source file, cannot upload ' 'URIs with encryption yet.')
def Handle(self, image_name, root_source_tarball=None): """Add the specified image. Args: image_name: The name of the image to add. root_source_tarball: Tarball in Google Storage containing the desired root directory for the resulting image. Returns: The result of inserting the image. """ image_context = self._context_parser.ParseContextOrPrompt( 'images', image_name) # Source Tarball and Source Disk are mutually exclusive parameters. if self.api.version >= version.get('v1'): if root_source_tarball and self._flags.source_disk: raise app.UsageError( 'You cannot specify both root_source_tarball and ' 'source_disk. Only one or the other.') if not root_source_tarball and not self._flags.source_disk: raise app.UsageError( 'You must specify either a root_source_tarball or ' 'a source_disk.') elif not root_source_tarball: raise app.UsageError('You must specify a root_source_tarball.') image_resource = { 'kind': self._GetResourceApiKind('image'), 'name': image_context['image'], 'description': self._flags.description, 'sourceType': 'RAW', } if root_source_tarball: # Accept gs:// URLs. if root_source_tarball.startswith('gs://'): root_source_tarball = ('http://storage.googleapis.com/' + root_source_tarball[len('gs://'):]) image_resource['rawDisk'] = { 'source': root_source_tarball, 'containerType': 'TAR', } elif self._flags.source_disk: self._AutoDetectZone() disk_url = self._context_parser.NormalizeOrPrompt( 'disks', self._flags.source_disk) image_resource['sourceDisk'] = disk_url image_request = self.api.images.insert( project=image_context['project'], body=image_resource) return image_request.execute()
def _Check(self): """Perform argument checking and expansion.""" if not self.generated_codebase: raise app.UsageError('generated_codebase not set') if not self.public_codebase: raise app.UsageError('public_codebase not set') if not self.previous_codebase: raise app.UsageError('previous_codebase not set') self.merged_codebase = tempfile.mkdtemp( dir=moe_app.RUN.temp_dir, prefix='merged_codebase') print ('Writing merged codebase to %s' % self.merged_codebase)
def GatherFromFiles(metadata_files, metadata_dict): for metadata_entry in metadata_files: if ':' not in metadata_entry: raise app.UsageError( 'Wrong syntax for metadata_from_file %s. ' 'Use key:filename.' % metadata_entry) key_value = metadata_entry.split(':', 1) key = key_value[0] CheckKey(key, metadata_dict) if len(key_value) != 2: raise app.UsageError('No metadata file specified for %s.' % key) with open(key_value[1], 'r') as f: metadata_dict[key] = f.read()
def Run(self, argv): """Run the command, printing the result. Args: argv: The non-flag arguments to the command. """ if not FLAGS.project_name: raise app.UsageError('You must specify a project name' ' using the "--project_name" flag.') discovery_uri = ( FLAGS.api_host + 'discovery/v1/apis/{api}/{apiVersion}/rest') try: # If the Credentials don't exist or are invalid run through the # native client flow. The Storage object will ensure that if # successful the good Credentials will get written back to a file. # Setting FLAGS.auth_local_webserver to false since we can run our # tool on Virtual Machines and we do not want to run the webserver # on VMs. FLAGS.auth_local_webserver = False storage = Storage(FLAGS.credentials_file) credentials = storage.get() if credentials is None or credentials.invalid == True: credentials = run(FLOW, storage) http = credentials.authorize(self._dump_request_wrapper( httplib2.Http())) api = build('taskqueue', FLAGS.service_version, http=http, discoveryServiceUrl=discovery_uri) result = self.run_with_api_and_flags_and_args(api, FLAGS, argv) self.print_result(result) except HttpError, http_error: print 'Error Processing request: %s' % str(http_error)
def Run(self, argv): super(ResetDatabase, self).Run(argv) if len(argv) != 1: raise app.UsageError('Too many args: %s' % repr(argv)) if os.path.exists(FLAGS.database_filename): os.remove(FLAGS.database_filename) print 'Database successfully reset.'
def Run(self, unused_argv): project = db_client.MakeProjectContext() try: source_revision = FLAGS.source_revision source = FLAGS.source_repository if source not in base.REPOSITORIES: raise app.UsageError('source_repository should be one of %s' % str(base.REPOSITORIES)) if source == base.INTERNAL_STR: repository = project.internal_repository codebase_creator = project.internal_codebase_creator elif source == base.PUBLIC_STR: repository = project.public_repository codebase_creator = project.public_codebase_creator else: raise base.Error('Unexpected source: %s' % source) with moe_app.RUN.ui.BeginImmediateTask( 'head_revision', 'Determining Head Revision') as t: head_revision = repository.GetHeadRevision(source_revision) t.SetResult(head_revision) source_codebase = codebase_creator.Create(revision=head_revision) translated_codebase = translators.TranslateToProjectSpace( source_codebase, FLAGS.target_project_space, project.translators) moe_app.RUN.ui.Info('Codebase created at %s' % translated_codebase.Path()) finally: project.db.Disconnect()
def Run(self, argv): super(Batch, self).Run(argv) if len(argv) != 2: raise app.UsageError('Needs one argument, the filename of the file ' 'where each line is a command.') if argv[-1] == '-': argv[-1] = '/dev/stdin' if not os.path.exists(argv[-1]): raise app.UsageError('File specified does not exist: %s' % argv[-1]) try: with open(argv[-1]) as input_file: ApplyBatchOfCommands(input_file) except serialization.DeserializationError as e: _Print(e) _Print('Aborting.') return 1
def Run(self, argv): """Run the command, printing the result. Args: argv: The non-flag arguments to the command. """ if not FLAGS.project_name: raise app.UsageError('You must specify a project name' ' using the "--project_name" flag.') discovery_uri = (FLAGS.api_host + 'discovery/v1/apis/{api}/{apiVersion}/rest') try: # Load the credentials from the service account credentials json file if not FLAGS.service_account_file: FLAGS.service_account_file = get_env_variable( 'GOOGLE_APPLICATION_CREDENTIALS') credentials = ServiceAccountCredentials.from_json_keyfile_name( FLAGS.service_account_file, scopes=SCOPES) http = credentials.authorize( self._dump_request_wrapper(httplib2.Http())) api = build('cloudtasks', FLAGS.service_version, http=http, discoveryServiceUrl=discovery_uri) result = self.run_with_api_and_flags_and_args(api, FLAGS, argv) self.print_result(result) except HttpError, http_error: print 'Error Processing request: %s' % str(http_error)
def main(unused_argv): project = db_client.MakeProjectContext(create_project=True, acquire_lock=False) try: if FLAGS.install_on_db_only: moe_app.RUN.ui.Info(( 'Project %s now exists on the MOE db. ' 'You can run local MOE commands (create_codebase, e.g.) to prepare. ' 'When you are ready to start using MOE for real:\n' '1) run moe change to create a change\n' '2) submit the change and note the equivalence\n' '3) start running moe auto\n') % project.config.name) return internal_revision = FLAGS.internal_revision if not internal_revision: raise app.UsageError( 'Must supply a revision using --internal_revision ' 'flag.') public_revision = FLAGS.public_revision context = InitCodebasesContext(project, internal_revision, public_revision) context.InitializeProject() moe_app.RUN.report.PrintSummary() finally: project.db.Disconnect()
def __init__(self): if not FLAGS.project_name: raise app.UsageError('You must specify a project name' ' using the "--project_name" flag.') discovery_uri = ( FLAGS.api_host + 'discovery/v1/apis/{api}/{apiVersion}/rest') logger.info(discovery_uri) try: # If the Credentials don't exist or are invalid run through the # native clien flow. The Storage object will ensure that if # successful the good Credentials will get written back to a file. # Setting FLAGS.auth_local_webserver to false since we can run our # tool on Virtual Machines and we do not want to run the webserver # on VMs. FLAGS.auth_local_webserver = False storage = Storage(FLAGS.credentials_file) credentials = storage.get() if credentials is None or credentials.invalid == True: credentials = run(FLOW, storage) http = credentials.authorize(self._dump_request_wrapper( httplib2.Http())) self.task_api = build('taskqueue', FLAGS.service_version, http=http, discoveryServiceUrl=discovery_uri) except HttpError, http_error: logger.error('Error gettin task_api: %s' % http_error)
def GatherFromList(metadata_entries, metadata_dict): for metadata in metadata_entries: if ':' not in metadata: raise app.UsageError( 'Wrong syntax for metadata %s. Use key:value.' % metadata) key_value = metadata.split(':', 1) key = key_value[0] CheckKey(key, metadata_dict) value = '' if key in MetadataFlagsProcessor._BANNED_ON_COMMAND_LINE: raise app.UsageError( 'Metadata attribute %s cannot be given on command line.' % key) if len(key_value) > 1: value = key_value[1] metadata_dict[key] = value
def _CheckKeyfileFlag(self): if not self.master_key_filename: raise app.UsageError( 'Must specify a master key to encrypt/decrypt values.\n' 'If you do not want any encryption/decryption to occur, consider\n' 'using the Bigquery client. If you wish to generate a key_file during' ' a load command, just specify the path file to where the new key ' 'will be placed. This file must not exist beforehand.')
def Run(self, argv): super(Interactive, self).Run(argv) if len(argv) != 1: raise app.UsageError('Too many args: %s' % repr(argv)) try: LoopInteractively() except serialization.DeserializationError as e: _Print(e) _Print('Aborting.') return 1
def main(unused_argv): if not (FLAGS.api_name or FLAGS.input): raise app.UsageError('You must specify one of --api_name or --input') if FLAGS.api_name and FLAGS.input: raise app.UsageError( 'You can only specify one of --api_name or --input') if not (FLAGS.output_dir or FLAGS.output_file): raise app.UsageError( 'You must specify one of --output_dir or --output_file') if FLAGS.output_dir and FLAGS.output_file: raise app.UsageError( 'You can only specify one of --output_dir or --output_file') if FLAGS.verbose: logging.basicConfig(level=logging.DEBUG) # Get the discovery document if FLAGS.api_name: if not FLAGS.api_version: raise app.UsageError( 'You must specify --api_version with --api_name') content = GetApiDiscovery(FLAGS.api_name, FLAGS.api_version) else: f = open(FLAGS.input) content = f.read() f.close() discovery_doc = json.loads(content, object_pairs_hook=collections.OrderedDict) package_writer = package_writer_foundry.GetPackageWriter( output_dir=FLAGS.output_dir, output_file=FLAGS.output_file, output_format=FLAGS.output_format) Generate(discovery_doc=discovery_doc, package_writer=package_writer, include_timestamp=FLAGS.include_timestamp, version_package=FLAGS.version_package, package_path=FLAGS.package_path, output_type=FLAGS.output_type, language=FLAGS.language, language_variant=FLAGS.language_variant) return 0
def RunWithArgs(self, identifier=''): """Show all information about an object. All fields that are encrypted are of type ciphertext. Examples: ebq show -j <job_id> ebq show dataset ebq show dataset.table """ # pylint: disable=g-doc-exception client = bq.Client.Get() if self.j: reference = client.GetJobReference(identifier) elif self.d: reference = client.GetDatasetReference(identifier) else: reference = client.GetReference(identifier) if reference is None: raise app.UsageError('Must provide an identifier for show.') object_info = client.GetObjectInfo(reference) # Remove prefixes that were prepended during load/query. object_info = show_lib.RewriteShowSchema(object_info) # The JSON formats are handled separately so that they don't print # the record as a list of one record. if FLAGS.format in ['prettyjson', 'json']: bq._PrintFormattedJsonObject(object_info) # pylint: disable=protected-access elif FLAGS.format in [None, 'sparse', 'pretty']: formatter = bq._GetFormatterFromFlags() # pylint: disable=protected-access bigquery_client.BigqueryClient.ConfigureFormatter( formatter, type(reference), print_format='show') object_info = bigquery_client.BigqueryClient.FormatInfoByKind( object_info) formatter.AddDict(object_info) print '%s %s\n' % (reference.typename.capitalize(), reference) formatter.Print() print if (isinstance(reference, bigquery_client.ApiClientHelper.JobReference) and object_info['State'] == 'FAILURE'): error_result = object_info['status']['errorResult'] error_ls = object_info['status'].get('errors', []) error = bigquery_client.BigqueryError.Create( error_result, error_result, error_ls) print 'Errors encountered during job execution. %s\n' % ( error, ) else: formatter = bq._GetFormatterFromFlags() # pylint: disable=protected-access formatter.AddColumns(object_info.keys()) formatter.AddDict(object_info) formatter.Print()
def GatherFromList(metadata_entries, metadata_dict): for metadata in metadata_entries: if ':' not in metadata: raise app.UsageError( 'Wrong syntax for metadata %s. Use key:value.', metadata) key_value = metadata.split(':', 1) key = key_value[0] CheckKey(key, metadata_dict) value = '' if len(key_value) > 1: value = key_value[1] metadata_dict[key] = value
def main(argv): if len(argv) != 1: raise app.UsageError("Expected 0 positional args") try: if FLAGS.mode == "oob": run_oob_flow(cluster=FLAGS.cluster) elif FLAGS.mode == "callback": run_callback_flow(cluster=FLAGS.cluster) else: raise AssertionError("Internal error! Unknown mode: %s" % FLAGS.mode) except K8sLoginError as err: log.error(err) sys.exit(1)
def ProcessBigqueryrcSection(section_name, flag_values): """Read the bigqueryrc file into flag_values for section section_name. Args: section_name: if None, read the global flag settings. flag_values: FLAGS instance. Raises: UsageError: Unknown flag found. """ bigqueryrc = GetBigqueryRcFilename() if not os.path.exists(bigqueryrc): return with open(bigqueryrc) as rcfile: in_section = not section_name for line in rcfile: if line.lstrip().startswith('[') and line.rstrip().endswith(']'): next_section = line.strip()[1:-1] in_section = section_name == next_section continue elif not in_section: continue elif line.lstrip().startswith('#') or not line.strip(): continue flag, equalsign, value = line.partition('=') # if no value given, assume stringified boolean true if not equalsign: value = 'true' flag = flag.strip() value = value.strip() while flag.startswith('-'): flag = flag[1:] # We want flags specified at the command line to override # those in the flagfile. if flag not in flag_values: raise app.UsageError( 'Unknown flag %s found in bigqueryrc file in section %s' % (flag, section_name if section_name else 'global')) if not flag_values[flag].present: flag_values[flag].Parse(value) else: flag_type = flag_values[flag].Type() if flag_type.startswith('multi'): old_value = getattr(flag_values, flag) flag_values[flag].Parse(value) setattr(flag_values, flag, old_value + getattr(flag_values, flag))
def Handle(self, *args, **kwargs): """The point of entry to the command. This dispatches the subclass' HandleMove method. Raises: UsageError: If the service version is not v1beta14 or higher. The dependency on the version is due to the fact that snapshots were introduced in v1beta14. """ if not self._IsUsingAtLeastApiVersion('v1beta14'): raise app.UsageError( 'This command requires using API version v1beta14 or higher.') self._project_resource = self._projects_api.get( project=self._project).execute() self.HandleMove(*args, **kwargs) print 'The move completed successfully.'
def Handle(self, instance_name): """Get the specified instance's serial port output. Args: instance_name: The name of the instance. Returns: The output of the instance's serial port. """ if self._IsUsingAtLeastApiVersion('v1beta13'): instance_request = self._instances_api.getSerialPortOutput( project=self._project, instance=self._DenormalizeResourceName(instance_name)) return instance_request.execute() else: raise app.UsageError( 'Serial port output is only supported in v1beta13 and above.')
def Handle(self, address_name): """Reserve the specified address. Args: address_name: The name of the address to add. Returns: The result of the reservation request. Raises: CommandError: If the command is unsupported in this API version. UsageError: If no address name is specified. """ if not address_name: raise app.UsageError('Please specify an address name.') address_context = self._context_parser.ParseContextOrPrompt( 'addresses', address_name) kind = self._GetResourceApiKind('address') kwargs = {'region': address_context['region']} address = { 'kind': kind, 'name': address_context['address'], 'description': self._flags.description, } if self._flags.source_address is not None: address['address'] = self._flags.source_address request = self.api.addresses.insert(project=address_context['project'], body=address, **kwargs) if self._flags.wait_until_complete and not self._flags.synchronous_mode: LOGGER.warn( 'wait_until_complete specified. Implying synchronous_mode.') self._flags.synchronous_mode = True return request.execute()
def main(_): if len(sys.argv) < 3: raise app.UsageError('Must pass at least two arguments, font file or font' ' dir to diff') font1 = sys.argv[1] font2 = sys.argv[2] dirs = os.path.isdir(font1) and os.path.isdir(font2) files = os.path.isfile(font1) and os.path.isfile(font2) if not dirs and not files: print '%s and %s must both point to directories or font files' % ( font1, font2) sys.exit(1) if dirs: CompareDirs(font1, font2) if files: CompareFiles(font1, font2)
def build_request(self, task_api, flag_values): """Build a request to lease a pending task from the TaskQueue. Args: task_api: The handle to the task collection API. flag_values: The parsed command flags. Returns: A new leased task. """ if not flag_values.lease_secs: raise app.UsageError('lease_secs must be specified') parent = build_cloudtasks_task_name(flag_values.project_name, flag_values.project_location, flag_values.taskqueue_name) body = { 'maxTasks': flag_values.num_tasks, 'leaseDuration': '%ss' % flag_values.lease_secs, 'responseView': 'FULL', } return task_api.lease(parent=parent, body=body)
def Handle(self): """Set the cloud storage bucket where usage reports will be exported to. Args: None. Returns: The result of setting the cloud storage bucket. Raises: UsageError: If user does not specify a bucket. """ if not self._flags['bucket'].present: raise app.UsageError('You must specify a bucket name. ' 'To unset this feature run clearusagebucket') return self.ExecuteUsageExportRequest( self._flags.bucket, self._flags.prefix if self._flags['prefix'].present else None)
def __init__(self): if not FLAGS.project_name: raise app.UsageError('You must specify a project name' ' using the "--project_name" flag.') discovery_uri = (FLAGS.api_host + 'discovery/v1/apis/{api}/{apiVersion}/rest') logger.info(discovery_uri) try: # Load the credentials from the service account credentials json file if not FLAGS.service_account_file: FLAGS.service_account_file = get_env_variable( 'GOOGLE_APPLICATION_CREDENTIALS') credentials = ServiceAccountCredentials.from_json_keyfile_name( FLAGS.service_account_file, scopes=SCOPES) http = credentials.authorize( self._dump_request_wrapper(httplib2.Http())) self.task_api = build('cloudtasks', FLAGS.service_version, http=http, discoveryServiceUrl=discovery_uri) except HttpError, http_error: logger.error('Error gettin task_api: %s' % http_error)
def __init__(self, repository, temp_dir, username='', password='', existing_checkout=''): """Create SvnClient. Args: repository: SvnRepository from which this client is checked out. temp_dir: temporary directory to use for our client. username: svn client username password: svn client password existing_checkout: an existing checkout we can use. (NB: if this directory is not actually an existing checkout, results will be wonky. But this can save much time. Recommended for debugging.) """ self.repository_url = repository.Url() self.username = username self.password = password self.checked_out = False self.authenticated = False self._CheckSvnVersion() if existing_checkout: if not os.path.isdir(os.path.join(existing_checkout, '.svn')): raise app.UsageError( '%s does not appear to be an svn checkout' % existing_checkout) self.checkout = existing_checkout self.checked_out = True else: self.checkout = tempfile.mkdtemp(dir=temp_dir, prefix='svn_') self.checkout = os.path.abspath(self.checkout) self._repository = repository