Ejemplo n.º 1
0
def get_from_pipeline(componentMap, **arg):
    pipeline_name = arg['pipelineName']
    component_name = arg['componentName']
    pipeline = pkg.components.processComponentMap[pipeline_name]
    message = pipeline.get_message()
    log.info(f"Get {message} from pipeline. [size: {pipeline.qsize()}]")
    componentMap[component_name] = message
Ejemplo n.º 2
0
def initialize():
    pkg.components.processComponentMap["rules"] = read_yaml('../../rules/' + pkg.components.processComponentMap["processName"] + '.yaml')
    initializers=pkg.components.processComponentMap["rules"]["Initializers"]
    for initializer in initializers.values():
        extension_map[initializer["extension"]].process(function=initializer["operation"], componentMap=pkg.components.processComponentMap,**initializer)
    status=pkg.components.processComponentMap["status"]
    log.info(status)
Ejemplo n.º 3
0
def consumer(pipeline_name, consumer_name, event_name, worker_id):
    """Pretend we're saving a number in the database."""
    processRules = pkg.components.processComponentMap["rules"][consumer_name]
    pipeline = pkg.components.processComponentMap[pipeline_name]
    event = pkg.components.processComponentMap[event_name]
    componentMap = {}
    while not event.is_set() or not pipeline.empty():
        log.info(f"Starting consumer {consumer_name}-{worker_id}")
        for rule in processRules["extensionList"].values():
            extension_map[rule["extension"]].process(
                function=rule["operation"], componentMap=componentMap, **rule)
    log.info(f"{consumer_name}-{worker_id} received EXIT event. Exiting")
def send(componentMap,**arg):
    processComponentName = arg["processComponentName"]
    now = datetime.datetime.now()
    nowDict={}
    nowDict["time"] = now.strftime("%c")
    msg = json.dumps(componentMap[arg["componentName"]])
    msgtime=json.dumps(nowDict)
    log.info(f"Msg to send: {msg}")
    kafkaProducer=pkg.components.processComponentMap[processComponentName]
    future = kafkaProducer.send(arg["topicName"], msg.encode('ascii'))
    future = kafkaProducer.send(arg["topicName"], msgtime.encode('ascii'))
    kafkaProducer.flush()
Ejemplo n.º 5
0
def fileSystemEventHandler(event):
    log.info(f"Processing file {event.src_path}")
    # if event.eventType == 'created' or event.eventType == 'modified':
    pName = pkg.components.processComponentMap["processName"]
    processRules=pkg.components.processComponentMap["rules"][pName]
    time.sleep(2)
    with open(event.src_path, 'r') as file:
        csvRec = csv.DictReader(file)
        for rec in csvRec:
            componentMap={}
            componentMap[processRules["componentName"]] = rec
            for rule in processRules["extensionList"].values():
                extname = rule["extension"]
                extension_map[rule["extension"]].process(function=rule["operation"],componentMap=componentMap, **rule)
    os.remove(event.src_path)
Ejemplo n.º 6
0
def fileWatcher():
    observer = Observer()
    eventHandler = FileSystemEventHandler()
    eventHandler.on_created = fileSystemEventHandler
    #eventHandler.on_modified = fileSystemEventHandler
    pName=pkg.components.processComponentMap["processName"]
    observer.schedule(eventHandler, pkg.components.processComponentMap["rules"][pName]["directory"], recursive=True)
    observer.start()
    log.info("File watcher started")
    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        observer.stop()
        observer.join()
Ejemplo n.º 7
0
def add_to_pipeline(componentMap, **arg):
    pipeline_name = arg['pipelineName']
    pipeline_size = arg['pipelineSize']
    list_component_name = arg['listComponentName']
    list_component = componentMap[list_component_name] if list_component_name in componentMap else []
    pipeline = pkg.components.processComponentMap[pipeline_name]
    length = len(list_component)
    i = 0
    while i < length:
        if pipeline.qsize() < pipeline_size:
            pipeline.set_message(list_component[i])
            log.info(f"Added {list_component[i]} to pipeline [i={i}, size: {pipeline.qsize()}]")
            i = i+1
        else:
            log.info(f"Pipeline is full, sleeping for 1s [i={i}, size: {pipeline.qsize()}]")
            time.sleep(1)
Ejemplo n.º 8
0
def print(componentMap, **arg):
    if "componentName" in arg:
        if arg["componentName"] in componentMap:
            log.info(componentMap[arg["componentName"]])
        else:
            log.info("Component " + arg["componentName"] +
                     " does not exist in the map")
    else:
        log.info(componentMap)
def produce_employee_profile(componentMap, **arg):
    log.info("get the employee id list here, api fetch started")
    time.sleep(5)  #simulate an api fetch call delay
    log.info("api fetch complete.")
    arr = []
    for x in range(100,
                   120):  #simulating a db/api fetch to get list of emp ids
        arr.append(x)
    log.info(f"Adding {str(arr)} to component map from producer")
    componentMap[arg['listComponent']] = arr
def initialize(componentMap, **arg):
    log.info("Initialize something here")
def consume_company_details(componentMap, **arg):
    element = componentMap['employeePipelineElement']
    log.info(f"get the company details here for the employee  id {element}")
    # Do some other api fetch with element to receive additional data
    time.sleep(3)
Ejemplo n.º 12
0
def set_event(componentMap, **arg):
    pipeline_name = arg['pipelineName']
    event_name = pipeline_name + '-event'
    event = pkg.components.processComponentMap[event_name]
    log.info(f'Setting event for {event_name}')
    event.set()
Ejemplo n.º 13
0
if __name__ == "__main__":
    initialize()
    processRules = pkg.components.processComponentMap["rules"][
        'ProducerConsumer']
    componentMap = {}
    pipeline_name = processRules['pipelineName']
    producers = processRules['producers']
    consumers = processRules['consumers']
    event_name = pipeline_name + '-event'
    pkg.components.processComponentMap['employee-pipeline'] = Pipeline()
    pkg.components.processComponentMap[
        'employee-pipeline-event'] = threading.Event()
    max_worker_count = 0
    for k, v in producers.items():
        max_worker_count = max_worker_count + v
    for k, v in consumers.items():
        max_worker_count = max_worker_count + v
    with concurrent.futures.ThreadPoolExecutor(
            max_workers=max_worker_count) as executor:
        for k, v in producers.items():
            for id in range(0, v):
                log.info(f"submitting producer {k}-{id + 1}")
                executor.submit(producer, pipeline_name, k, event_name, id + 1)

        for k, v in consumers.items():
            for id in range(0, v):
                log.info(f"submitting consumer {k}-{id + 1}")
                executor.submit(consumer, pipeline_name, k, event_name, id + 1)
        #time.sleep(0.1)
        pkg.components.processComponentMap[event_name].set()
    log.info(f"Max worker count is {max_worker_count}")