def get_new_project_setting(self, setting_name, current_value=None): value = None if setting_name == 'project_home': value = cli.InputPrompt(setting_name, current_value).show() if setting_name == 'datasource_module': value = cli.MenuPrompt( 'Datasource module', generate_module_options_from_directory(os.getcwd())).show() if setting_name == 'service_module': value = cli.MenuPrompt( 'Service module', generate_module_options_from_directory(os.getcwd())).show() return value
def create_bucket(self): bucket_name = None quota = None num_replicas = None with mandatory_input(cli.InputPrompt('bucket name'), warning_message='a bucket must have a name', failure_message='bad or missing bucket name') as input_result: bucket_name = input_result.data bucket_type = cli.MenuPrompt('bucket type', BUCKET_TYPE_OPTIONS).show() with required_input_format(INT_REGEX, cli.InputPrompt('bucket RAM quota (GB)'), warning_message='RAM quota must be a positive integer', failure_message='bad RAM quota value') as input_result: quota = input_result.data value_test_func = lambda x: re.compile(INT_REGEX).match(x) and int(x) < 5 with constrained_input_value(value_test_func, cli.InputPrompt('number of replicas'), warning_message='# of replicas must be less than 5', failure_message='# of replicas outside of allowed limits') as input_result: num_replicas = input_result.data return CouchbaseBucketSpec(name=bucket_name, type=bucket_type, ram_quota=quota, num_replicas=num_replicas)
def configure_datasource(self, cmd_args): '''Configures a new lookup datasource.''' if not self.project_home_contains_python_source(): print('No python source files in project home directory "%s".' % self.globals['project_home']) return None # TODO: parameterize source dir for modules source_module = cli.MenuPrompt('datasource_module', generate_module_options( os.getcwd())).show() if source_module: source_name = cli.InputPrompt('Datasource name').show() if source_name: self.datasources[source_name] = source_module
def configure_datasource(self, cmd_args): '''Configures a new lookup datasource.''' if not self.project_home_contains_python_source(): print('No python source files in project home directory "%s".' % self.project_home) return None if not self.datasource_module: print( 'The lookup datasource module has not been configured for this project.' ) should_set = cli.InputPrompt('Set it now (Y/n)?', 'y').show() if should_set == 'y': self.do_globals({'update': True}) if not self.datasource_module: print('project datasource module not updated.') return source_module_name = self.datasource_module print('scanning python module %s for datasources...' % source_module_name) class_options = generate_class_options_from_module(source_module_name) if not len(class_options): print( '\nThe python module "%s" contains no eligible types. Please check your code.\n' % source_module_name) return class_name = cli.MenuPrompt('Datasource name', class_options).show() if not class_name: return print('\nUsing datasource class %s from module %s.\n' % (class_name, source_module_name)) datasource_label = cli.InputPrompt( 'Enter an alias for this datasource').show() if not datasource_label: return should_create = cli.InputPrompt( 'Register datasource "%s" using class %s (Y/n)?' % (datasource_label, class_name), 'y').show() if should_create == 'y': return DatasourceSpec(name=datasource_label, klass=class_name)
def do_delete(self, cmd_args): '''Usage: delete (cluster | bucket) ''' if cmd_args['cluster']: print('### This action will delete your cluster config and all bucket configurations.') should_clear = cli.InputPrompt('Are you sure (y/N)', 'n').show() if should_clear == 'y': self.cluster_config = {} elif cmd_args['bucket']: if not len(self.cluster_config['buckets']): print('\nNo bucket specs created.') return options = self.generate_bucket_options() bucket_name = cli.MenuPrompt('Bucket to delete', options).show() print('### This action will delete the bucket config "%s".' % bucket_name) should_delete = cli.InputPrompt('Are you sure (y/N)', 'n').show() if should_delete == 'y': index = self.get_bucket_index_by_name(bucket_name) self.cluster_config['buckets'].pop(index)
def do_globals(self, cmd_args): '''Usage: globals globals update [<setting_name>] ''' if cmd_args['update']: if not cmd_args['<setting_name>']: setting_name = cli.MenuPrompt('update project setting', GLOBAL_OPTIONS).show() else: setting_name = cmd_args['<setting_name>'] new_setting_value = self.get_new_project_setting( setting_name, self.get_current_project_setting(setting_name)) if new_setting_value: print('\nSetting project parameter "%s" to "%s".\n' % (setting_name, new_setting_value)) self.update_project_setting(setting_name, new_setting_value) print('_______________________\n') print('Global project settings:\n') print('\n'.join(['%s: %s' % (g.name, g.value) for g in self.globals])) print('_______________________\n')
def select_ssh_key(self, keyset_dict): print('\n____ SSH keys in target directory %s:\n' % keyset_dict['location']) key_options = self.generate_key_options(keyset_dict) key_selection = cli.MenuPrompt('SSH keypair', key_options).show() return key_selection
def do_new(self, cmd_args): '''Create a new datamap or map-related object. Usage: new (map | datasource) ''' if not self.project_home: print( '\nTo create a new map or datasource, you must specify a valid project home directory.' ) should_update_globals = cli.InputPrompt( 'Run globals command now (Y/n)?', 'y').show() if should_update_globals == 'y': self.do_globals({'update': True}) if not self.project_home: print('project home not updated.') return print('Returning to "new map" command...') else: return if not self.datasource_module: print( '\nTo create a new map or datasource, you must specify the Python module containing your lookup-datasource class.' ) should_update_globals = cli.InputPrompt('Configure now (Y/n)?', 'y').show() if should_update_globals == 'y': self.do_globals({'update': True}) if not self.datasource_module: print('datasource module not set.') return print('Returning to "new map" command...') else: return if cmd_args['datasource']: source_spec = self.configure_datasource(cmd_args) if source_spec: self.datasource_specs.append(source_spec) elif cmd_args['map']: if self.initial_datafile: print( '\nThis mkmap CLI session was initialized with datafile %s.\n' % self.initial_datafile) should_generate_from_datafile = cli.InputPrompt( 'Use this datafile to generate a map (Y/n)?', 'y').show() if should_generate_from_datafile == 'y': separator_char = cli.InputPrompt( 'separator character used in this file').show() if not len( separator_char ): # in case whitespace input (which is valid) is stripped print( 'Cannot create a CSV map without specifying a separator character.' ) return print('Will use separator character: ' + separator_char) should_create_source = False if not self.get_current_project_setting( 'datasource_module'): should_create_source = True print( '\nPlease set the lookup source to a valid Python module containing at least one Datasource class.\n' ) datasource_module = cli.MenuPrompt( 'Datasource module', generate_module_options_from_directory( os.getcwd())) if not datasource_module: print( 'Cannot continue without setting the project-wide datasource module.\n' ) return self.update_project_setting('datasource_module', datasource_module) if not len(self.datasource_specs): create_response = cli.InputPrompt( 'No datasources registered. Register one now (Y/n)?', 'y').show() if create_response == 'y': should_create_source = True else: print('Cannot create a map without a datasource.') return if should_create_source: datasource_spec = self.configure_datasource(cmd_args) if not datasource_spec: print('Cannot create a map without a datasource.') return print('\nRegistered new datasource %s: %s.\n' % (datasource_spec.name, datasource_spec.klass)) self.datasource_specs.append(datasource_spec) datasource_options = self.generate_datasource_options() selected_source = cli.MenuPrompt( 'Select a datasource', datasource_options).show() if not selected_source: print('Cannot create a map without a datasource.') return print('\nSelected datasource "%s" for this datamap.\n' % (selected_source)) map_name = cli.InputPrompt( 'Please enter a name for this datamap').show() if not map_name: return print( 'Scanning initial file with separator character: %s' % separator_char) mapspec = create_default_map_from_csv_file( self.initial_datafile, map_name, selected_source, separator_char) if not mapspec: return confirm = cli.InputPrompt( 'Create datamap "%s" (Y/n)?' % map_name, 'y').show() if confirm == 'y': print('\nDatamap "%s" created.\n' % map_name) self.map_specs.append(mapspec) # set this to None so that we don't visit this path again self.initial_datafile = None else: self.initial_datafile = None else: map_name = cli.InputPrompt('alias for this new map').show() if not map_name: return mapspec = self.configure_map(map_name) if not mapspec: return self.map_specs.append(mapspec) '''
def create_ngst_datastore_prompt(live_config): menudata = [] for dstore in live_config['datastores']: menudata.append({'label': dstore.alias, 'value': dstore.alias}) return cli.MenuPrompt('select a datastore', menudata)
def create_map_editor_prompt(live_config): menudata = [] for datasource in live_config.get('sources'): menudata.append({'label': datasource.name, 'value': datasource.name}) return cli.MenuPrompt('select lookup source', menudata)
+++ ''', 'builder_func': new_xfile_fieldspec, 'steps': [{ 'type': 'static_prompt', 'field_name': 'field_name', 'prompt': cli.InputPrompt('output field name'), 'required': True }, { 'type': 'sequence_select', 'field_name': 'field_params', 'prompt': cli.MenuPrompt('field source:', XFILE_FIELD_SOURCE_TYPES), 'required': True, 'conditions': { 'lambda': { 'sequence': xfile_field_src_lambda }, 'record': { 'sequence': xfile_field_src_record }, 'lookup': { 'sequence': xfile_field_src_lookup } } }] }