def load_datasource(self, src_name, transform_config, service_object_registry): src_module_name = self._transform_config['globals']['lookup_source_module'] datasource_class_name = self._transform_config['sources'][src_name]['class'] klass = common.load_class(datasource_class_name, src_module_name) #init_params = self._transform_config['sources'][src_name].get('init_params', {}) return klass(service_object_registry)
def load_datasource(self, src_name, transform_config, service_object_registry): src_module_name = self._transform_config['globals'][ 'datasource_module'] datasource_class_name = self._transform_config['sources'][src_name][ 'class'] klass = common.load_class(datasource_class_name, src_module_name) return klass(service_object_registry)
def load_line_array_generator(module_name): '''load a function called "line_array_generator (taking a **kwargs argument) from the Python module passed as a parameter. ''' # woof, bad function name -- this function will actually load # any module attribute, not just a class. In this case, of course, # it's loading a function return common.load_class('line_array_generator', module_name)
def eval_macro(macro_funcname: str, macro_args: dict, macro_module_name: str): macro_func = None try: macro_func = common.load_class(macro_funcname, macro_module_name) except AttributeError: raise Exception( f'The macro function "{macro_funcname}" was not found in the specified macro_module {macro_module_name}.' ) return macro_func(**macro_args)
def load_profiler_classes(cls, yaml_config): profiler_classes = {} profiler_module = yaml_config['globals']['profiler_module'] project_home = common.load_config_var( yaml_config['globals']['project_home']) for profiler_alias in yaml_config['profilers']: profiler_config = yaml_config['profilers'][profiler_alias] profiler_module_name = yaml_config['globals']['profiler_module'] classname = profiler_config['class'] klass = common.load_class(classname, profiler_module_name) profiler_classes[profiler_alias] = klass return profiler_classes
def load_datastore(self, name, transform_config, service_object_registry): ds_module_name = transform_config['globals']['datastore_module'] if not transform_config['datastores'].get(name): raise DatastoreNotRegisteredUnderName(name) datastore_class_name = transform_config['datastores'][name]['class'] klass = common.load_class(datasource_class_name, src_module_name) init_params = {} for param in transform_config['datastores'][name]['init_params']: init_params[param['name']] = param['value'] return klass(service_object_registry, **init_params)
def eval_macro(macro_funcname, yaml_config, **kwargs): macro_module = yaml_config['globals'].get('macro_module') if not macro_module: raise Exception( 'you have defined a macro, but there is no macro_module in the globals section. Please check your config file.' ) macro_func = None try: macro_func = common.load_class(macro_funcname, macro_module) except AttributeError: raise Exception( f'The code macro "{macro_funcname}" was not found in the specified macro_module {macro_module}.' ) return macro_func()
def load_datasource(self, src_name, transform_config, service_object_registry): src_module_name = self._transform_config['globals']['datasource_module'] datasource_class_name = self._transform_config['sources'][src_name]['class'] module_path_tokens = src_module_name.split('.') module = None if len(module_path_tokens) == 1: module = __import__(module_path_tokens[0]) else: module = __import__(src_module_name) for index in range(1, len(module_path_tokens)): module = getattr(module, module_path_tokens[index]) if not hasattr(module, datasource_class_name): raise NonexistentDatasource(datasource_class_name, src_module_name) klass = common.load_class(datasource_class_name, src_module_name) return klass(service_object_registry)
def initialize_datastores(transform_config, service_object_registry): datastores = {} ds_module_name = transform_config['globals']['datastore_module'] if not len(transform_config['datastores']): return datastores for datastore_name in transform_config['datastores']: datastore_class_name = transform_config['datastores'][datastore_name][ 'class'] klass = common.load_class(datastore_class_name, ds_module_name) init_params = {} param_config_section = transform_config['datastores'][datastore_name][ 'init_params'] if param_config_section: #for param in transform_config['datastores'][datastore_name].get('init_params', []): for param in param_config_section: init_params[param['name']] = param['value'] datastore_instance = klass(service_object_registry, **init_params) datastores[datastore_name] = datastore_instance return datastores
def initialize_services(yaml_config_obj): service_objects = {} configured_services = yaml_config_obj.get('service_objects') if configured_services is None: configured_services = [] for service_object_name in configured_services: config_segment = yaml_config_obj['service_objects'][service_object_name] service_object_classname = config_segment['class'] service_module_name = yaml_config_obj['globals']['service_module'] parameter_array = config_segment['init_params'] or [] param_tbl = {} for param in parameter_array: param_name = param['name'] raw_param_value = param['value'] param_value = common.load_config_var(raw_param_value) param_tbl[param_name] = param_value klass = common.load_class(service_object_classname, service_module_name) service_object = klass(**param_tbl) service_objects[service_object_name] = service_object return service_objects
def main(args): if args['--version']: print(show_version()) return verbose_mode = False if args['--verbose']: verbose_mode = True configfile = args['<configfile>'] yaml_config = common.read_config_file(configfile) source_name = args['<source_name>'] if not yaml_config['sources'].get(source_name): raise Exception( 'No queue source "%s" defined. Please check your config file.') service_tbl = snap.initialize_services(yaml_config) service_registry = common.ServiceObjectRegistry(service_tbl) source_config = yaml_config['sources'][source_name] # Create SQS client region = source_config['region'] polling_interval = int(source_config['polling_interval_seconds']) sqs = boto3.client('sqs', region_name=region) queue_url = common.load_config_var(source_config['queue_url']) msg_handler_name = source_config['handler'] project_dir = common.load_config_var( yaml_config['globals']['project_home']) sys.path.append(project_dir) msg_handler_module = yaml_config['globals']['consumer_module'] msg_handler_func = common.load_class(msg_handler_name, msg_handler_module) child_procs = [] print('### initiating polling loop.') # loop forever while True: current_time = datetime.datetime.now().isoformat() if verbose_mode: print('### checking SQS queue %s for messages at %s...' % (queue_url, current_time), file=sys.stderr) # Receive message from SQS queue response = sqs.receive_message( QueueUrl=queue_url, AttributeNames=['SentTimestamp'], MaxNumberOfMessages=1, MessageAttributeNames=['All'], VisibilityTimeout=30, # VisibilityTimeout (integer) -- The duration (in seconds) that the received messages # are hidden from subsequent retrieve requests after being retrieved by a ReceiveMessage request. WaitTimeSeconds=3 # WaitTimeSeconds (integer) -- The duration (in seconds) for which the call waits for a message # to arrive in the queue before returning. # If a message is available, the call returns sooner than WaitTimeSeconds . If no messages are available # and the wait time expires, the call returns successfully with an empty list of messages. ) inbound_msgs = response.get('Messages') or [] if not len(inbound_msgs): if verbose_mode: print( '### No messages pending, sleeping %d seconds before re-try...' % polling_interval) time.sleep(polling_interval) continue for message in inbound_msgs: receipt_handle = message['ReceiptHandle'] current_time = datetime.datetime.now().isoformat() print('### spawning message processor at %s...' % current_time, file=sys.stderr) try: # TODO: can we pickle a ServiceObjectRegistry? p = Process(target=msg_handler_func, args=(message, receipt_handle, service_registry)) p.start() child_procs.append(p) print('### Queued message-handling subprocess with PID %s.' % p.pid, file=sys.stderr) # Delete received message from queue sqs.delete_message(QueueUrl=queue_url, ReceiptHandle=receipt_handle) except Exception as err: print('!!! Error processing message with receipt: %s' % receipt_handle, file=sys.stderr) print(err)
def load_fact_pk_sqltype_class(self, classname, **kwargs): pk_type_module = self._yaml_config['globals'][ 'primary_key_datatype_module'] klass = common.load_class(classname, pk_type_module) return klass