def createPostgresDB(self): self.db_username = '******' self.db_password = common.load_config_var('$POSTGRESQL_PASSWORD') self.db_name = 'scratch' host = 'localhost' port = 5432 return sqldbx.PostgreSQLDatabase(host, self.db_name, port)
def load_config_dictionary(config_dict, yaml_config): data = {} if not config_dict: return data for key, value in config_dict.items(): macro_rx_match = MACRO_RX.match(str(value)) template_rx_match = TEMPLATE_RX.match(str(value)) # if we have a macro as the value, load and execute it if macro_rx_match: macro_function = str(value).lstrip(MACRO_PREFIX).rstrip( MACRO_SUFFIX) data[key] = eval_macro(macro_function, yaml_config) # if we have a template as the value, populate it elif template_rx_match: template_str = str(value).lstrip(TEMPLATE_PREFIX).rstrip( TEMPLATE_SUFFIX) #print(f'####### found template string: {template_str}') new_value = resolve_env_vars_in_template(template_str) #print(f'@@@@@@@ Resolved template string {template_str} to value {new_value}') data[key] = new_value # if the value is not a macro or a template, see if it's an env var else: data[key] = common.load_config_var(value) return data
def update_project_setting(self, name, value): for index in range(len(self.globals)): if self.globals[index].name == name: if name == 'project_home': try: if not value: print( '### WARNING: setting project parameter %s to an empty value.' % name) common.load_config_var(value) self.globals[index] = ParamSpec(name=name, value=value) except common.MissingEnvironmentVarException as err: print( '\nThe environment variable %s has not been set.' % value) return else: self.globals[index] = ParamSpec(name=name, value=value) break
def load_profiler_classes(cls, yaml_config): profiler_classes = {} profiler_module = yaml_config['globals']['profiler_module'] project_home = common.load_config_var( yaml_config['globals']['project_home']) for profiler_alias in yaml_config['profilers']: profiler_config = yaml_config['profilers'][profiler_alias] profiler_module_name = yaml_config['globals']['profiler_module'] classname = profiler_config['class'] klass = common.load_class(classname, profiler_module_name) profiler_classes[profiler_alias] = klass return profiler_classes
def main(args): print(args) local_env = common.LocalEnvironment('PGSQL_USER', 'PGSQL_PASSWORD') local_env.init() pgsql_user = local_env.get_variable('PGSQL_USER') pgsql_password = local_env.get_variable('PGSQL_PASSWORD') yaml_config = common.read_config_file(args['<initfile>']) print(common.jsonpretty(yaml_config)) db_host = yaml_config['globals']['database_host'] db_name = yaml_config['globals']['database_name'] pubsub = pgpubsub.connect(host=db_host, user=pgsql_user, password=pgsql_password, database=db_name) channel_id = args['<channel>'] if not yaml_config['channels'].get(channel_id): raise NoSuchEventChannel(channel_id) handler_module_name = yaml_config['globals']['handler_module'] project_dir = common.load_config_var(yaml_config['globals']['project_dir']) sys.path.append(project_dir) handlers = __import__(handler_module_name) handler_function_name = yaml_config['channels'][channel_id][ 'handler_function'] if not hasattr(handlers, handler_function_name): raise NoSuchEventHandler(handler_function_name, handler_module_name) handler_function = getattr(handlers, handler_function_name) service_objects = common.ServiceObjectRegistry( snap.initialize_services(yaml_config, logger)) pubsub.listen(channel_id) print('listening on channel "%s"...' % channel_id) for event in pubsub.events(): print(event.payload)
def configure_logging(yaml_config): global logging_config if not logging_config: project_dir = common.load_config_var(yaml_config['globals']['project_directory']) logfile_full_path = os.path.join(project_dir, yaml_config['globals']['logfile']) j2env = jinja2.Environment() template_mgr = common.JinjaTemplateManager(j2env) log_config_template = j2env.from_string(config_templates.LOGGING_CONFIG) log_config_file = os.path.join(project_dir, 'logging_config.yaml') with open(log_config_file, 'w') as f: f.write(log_config_template.render(log_filename=logfile_full_path)) with open(log_config_file, 'rt') as f: logging_config = yaml.safe_load(f.read()) logging.config.dictConfig(logging_config)
def build_project(schema_filename: str, yaml_config: dict) -> GProjectConfig: project_conf = GProjectConfig() project_conf.set_home_dir( common.load_config_var(yaml_config['globals']['project_home'])) project_conf.set_schema_file(schema_filename) project_conf.set_resolver_module( yaml_config['globals']['resolver_module']) project_conf.set_handler_module( yaml_config['globals']['handler_module']) for typespec in load_type_specs(yaml_config): project_conf.add_object_type(typespec.name) for qspec in load_query_specs(yaml_config): project_conf.add_query_spec(qspec) for mspec in load_mutation_specs(yaml_config): project_conf.add_mutation_spec(mspec) return project_conf
def initialize_services(yaml_config_obj): service_objects = {} configured_services = yaml_config_obj.get('service_objects') if configured_services is None: configured_services = [] for service_object_name in configured_services: config_segment = yaml_config_obj['service_objects'][service_object_name] service_object_classname = config_segment['class'] service_module_name = yaml_config_obj['globals']['service_module'] parameter_array = config_segment['init_params'] or [] param_tbl = {} for param in parameter_array: param_name = param['name'] raw_param_value = param['value'] param_value = common.load_config_var(raw_param_value) param_tbl[param_name] = param_value klass = common.load_class(service_object_classname, service_module_name) service_object = klass(**param_tbl) service_objects[service_object_name] = service_object return service_objects
def main(args): if args['--version']: print(show_version()) return verbose_mode = False if args['--verbose']: verbose_mode = True configfile = args['<configfile>'] yaml_config = common.read_config_file(configfile) source_name = args['<source_name>'] if not yaml_config['sources'].get(source_name): raise Exception( 'No queue source "%s" defined. Please check your config file.') service_tbl = snap.initialize_services(yaml_config) service_registry = common.ServiceObjectRegistry(service_tbl) source_config = yaml_config['sources'][source_name] # Create SQS client region = source_config['region'] polling_interval = int(source_config['polling_interval_seconds']) sqs = boto3.client('sqs', region_name=region) queue_url = common.load_config_var(source_config['queue_url']) msg_handler_name = source_config['handler'] project_dir = common.load_config_var( yaml_config['globals']['project_home']) sys.path.append(project_dir) msg_handler_module = yaml_config['globals']['consumer_module'] msg_handler_func = common.load_class(msg_handler_name, msg_handler_module) child_procs = [] print('### initiating polling loop.') # loop forever while True: current_time = datetime.datetime.now().isoformat() if verbose_mode: print('### checking SQS queue %s for messages at %s...' % (queue_url, current_time), file=sys.stderr) # Receive message from SQS queue response = sqs.receive_message( QueueUrl=queue_url, AttributeNames=['SentTimestamp'], MaxNumberOfMessages=1, MessageAttributeNames=['All'], VisibilityTimeout=30, # VisibilityTimeout (integer) -- The duration (in seconds) that the received messages # are hidden from subsequent retrieve requests after being retrieved by a ReceiveMessage request. WaitTimeSeconds=3 # WaitTimeSeconds (integer) -- The duration (in seconds) for which the call waits for a message # to arrive in the queue before returning. # If a message is available, the call returns sooner than WaitTimeSeconds . If no messages are available # and the wait time expires, the call returns successfully with an empty list of messages. ) inbound_msgs = response.get('Messages') or [] if not len(inbound_msgs): if verbose_mode: print( '### No messages pending, sleeping %d seconds before re-try...' % polling_interval) time.sleep(polling_interval) continue for message in inbound_msgs: receipt_handle = message['ReceiptHandle'] current_time = datetime.datetime.now().isoformat() print('### spawning message processor at %s...' % current_time, file=sys.stderr) try: # TODO: can we pickle a ServiceObjectRegistry? p = Process(target=msg_handler_func, args=(message, receipt_handle, service_registry)) p.start() child_procs.append(p) print('### Queued message-handling subprocess with PID %s.' % p.pid, file=sys.stderr) # Delete received message from queue sqs.delete_message(QueueUrl=queue_url, ReceiptHandle=receipt_handle) except Exception as err: print('!!! Error processing message with receipt: %s' % receipt_handle, file=sys.stderr) print(err)
def project_home(self): return common.load_config_var( self.get_current_project_setting('project_home'))
def process_template_input_params(param_dict, macro_args_dict, macro_module_name=None): data = {} if not param_dict: return data for key, value in param_dict.items(): macro_rx_match = MACRO_RX.match(str(value)) template_rx_match = TEMPLATE_RX.match(str(value)) # if we have a macro as the value, load and execute it if macro_rx_match: macro_function_name = strip_tags(MACRO_PREFIX_RX, MACRO_SUFFIX_RX, macro_rx_match.group()) if '.' in macro_function_name: # # Some of the Mercury template-aware tools (such as beekeeper) allow the user to give a macro module name, # either at the command line, or in the config file. In the case of warp, when it's running in # explicit-command mode (no configuration file specified), # we can also use "dot notation" to specify a module that we want to try to load a macro from. # # If a template input parameter contains a macro reference which itself contains a dot: # # --params=<name>:~macro[modname.funcname] # # then Warp will interpret the macro reference as "invoke the macro function 'funcname' # from Python module 'modname' ". # # If we have been passed an explicit macro module name AND the macro reference we just parsed # contains a dot, then the module name passed WITHIN THE MACRO REFERENCE will take precedence. # # Note: This decision is subject to change. I'm attempting to balance ease-of-use against subtlety, # and I may decide to disallow ambiguous macro refs for the sake of eliminating subtlety. --DT # # resolve the macro reference using dot notation macro_tokens = macro_function_name.split('.') macro_module_name = macro_tokens[0] macro_function_name = macro_tokens[1] # # If the macro reference contains no dot, then Warp will (politely) error out if the macro module name # has not been explicitly specified. # else: if not macro_module_name: raise Exception( 'Template input params specify a macro, but no macro module name was supplied. Please check your command line.' ) data[key] = eval_macro(macro_function_name, macro_args_dict, macro_module_name) # if we have a template as the value, populate it elif template_rx_match: template_str = strip_tags(TEMPLATE_PREFIX_RX, TEMPLATE_SUFFIX_RX, template_rx_match.group()) new_value = resolve_env_vars_in_template(template_str) data[key] = new_value # if the value is not a macro or a template, see if it's an env var else: data[key] = common.load_config_var(value) return data