def execute_workflow(generate_config, component=None): progress_show = None if component: progress_show = component.empty() progress_show.code("🏄🏄🏄 Processing 🐢🐢🐢") try: obsei_configuration = ObseiConfiguration(configuration=generate_config) source_config = obsei_configuration.initialize_instance("source_config") source = obsei_configuration.initialize_instance("source") analyzer = obsei_configuration.initialize_instance("analyzer") analyzer_config = obsei_configuration.initialize_instance("analyzer_config") sink_config = obsei_configuration.initialize_instance("sink_config") sink = obsei_configuration.initialize_instance("sink") source_response_list = source.lookup(source_config) analyzer_response_list = analyzer.analyze_input( source_response_list=source_response_list, analyzer_config=analyzer_config ) sink_response_list = sink.send_data(analyzer_response_list, sink_config) if progress_show: progress_show.code("🎉🎉🎉 Processing Complete!! 🍾🍾🍾") except Exception as ex: if progress_show: progress_show.code(f"❗❗❗ Processing Failed!! 😞😞😞 \n 👉 ({str(ex)})") raise ex
def execute_workflow(generate_config, component=None, log_components=None): progress_show = None if component: progress_show = component.empty() progress_show.code("🏄🏄🏄 Processing 🐢🐢🐢") try: obsei_configuration = ObseiConfiguration(configuration=generate_config) source_config = obsei_configuration.initialize_instance( "source_config") source = obsei_configuration.initialize_instance("source") analyzer = obsei_configuration.initialize_instance("analyzer") analyzer_config = obsei_configuration.initialize_instance( "analyzer_config") sink_config = obsei_configuration.initialize_instance("sink_config") sink = obsei_configuration.initialize_instance("sink") source_response_list = source.lookup(source_config) log_components["source"].write( [vars(response) for response in source_response_list]) analyzer_response_list = analyzer.analyze_input( source_response_list=source_response_list, analyzer_config=analyzer_config) log_components["analyzer"].write( [vars(response) for response in analyzer_response_list]) sink_response_list = sink.send_data(analyzer_response_list, sink_config) if sink.TYPE == 'Pandas': log_components["sink"].write(sink_response_list) elif sink_response_list is not None: log_components["sink"].write( [vars(response) for response in sink_response_list]) else: log_components["sink"].write("No Data") if progress_show: progress_show.code("🎉🎉🎉 Processing Complete!! 🍾🍾🍾") except Exception as ex: if progress_show: progress_show.code(f"❗❗❗ Processing Failed!! 😞😞😞 \n 👉 ({str(ex)})") raise ex
def get_obsei_config(current_path, file_name): return ObseiConfiguration( config_path=current_path, config_filename=file_name, ).configuration
import logging import sys from obsei.configuration import ObseiConfiguration logger = logging.getLogger(__name__) logging.basicConfig(stream=sys.stdout, level=logging.INFO) obsei_configuration = ObseiConfiguration( config_path="../example", config_filename="sdk.yaml", ) play_store_source_config = obsei_configuration.get_play_store_source_config() twitter_source_config = obsei_configuration.get_twitter_source_config() http_sink_config = obsei_configuration.get_http_sink_config() daily_get_sink_config = obsei_configuration.get_daily_get_sink_config() text_analyzer = obsei_configuration.get_analyzer() analyzer_config = obsei_configuration.get_analyzer_config() # docker run -d --name elasticsearch -p 9200:9200 -e "discovery.type=single-node" elasticsearch:7.9.2 elasticsearch_sink_config = obsei_configuration.get_elasticsearch_sink_config() # Start jira server locally `atlas-run-standalone --product jira` jira_sink_config = obsei_configuration.get_jira_sink_config()
def config_init() -> None: global obsei_config obsei_config = ObseiConfiguration( config_path=os.getenv('OBSEI_CONFIG_PATH', "../config"), config_filename=os.getenv('OBSEI_CONFIG_FILENAME', "rest.yaml"))
import logging import sys from obsei.configuration import ObseiConfiguration logger = logging.getLogger(__name__) logging.basicConfig(stream=sys.stdout, level=logging.INFO) obsei_configuration = ObseiConfiguration( config_path="../example", config_filename="sdk.yaml", ) text_analyzer = obsei_configuration.initialize_instance("analyzer") analyzer_config = obsei_configuration.initialize_instance("analyzer_config") slack_source_config = obsei_configuration.initialize_instance("slack_sink_config") slack_sink = obsei_configuration.initialize_instance("slack_sink") play_store_source_config = obsei_configuration.initialize_instance("play_store_source") twitter_source_config = obsei_configuration.initialize_instance("twitter_source") http_sink_config = obsei_configuration.initialize_instance("http_sink") daily_get_sink_config = obsei_configuration.initialize_instance("daily_get_sink") # docker run -d --name elasticsearch -p 9200:9200 -e "discovery.type=single-node" elasticsearch:7.9.2 elasticsearch_sink_config = obsei_configuration.initialize_instance( "elasticsearch_sink" ) # Start jira server locally `atlas-run-standalone --product jira` jira_sink_config = obsei_configuration.initialize_instance("jira_sink")
import logging from obsei.configuration import ObseiConfiguration logger = logging.getLogger(__name__) # Extract config via yaml file using `config_path` and `config_filename` obsei_configuration = ObseiConfiguration() # Initialize objects using configuration source_config = obsei_configuration.initialize_instance("source_config") source = obsei_configuration.initialize_instance("source") analyzer = obsei_configuration.initialize_instance("analyzer") analyzer_config = obsei_configuration.initialize_instance("analyzer_config") sink_config = obsei_configuration.initialize_instance("sink_config") sink = obsei_configuration.initialize_instance("sink") # This will fetch information from configured source ie twitter, app store etc source_response_list = source.lookup(source_config) for idx, source_response in enumerate(source_response_list): logger.info(f"source_response#'{idx}'='{vars(source_response)}'") # This will execute analyzer (Sentiment, classification etc) on source data with provided analyzer_config # Analyzer will it's output to `segmented_data` inside `analyzer_response` analyzer_response_list = analyzer.analyze_input( source_response_list=source_response_list, analyzer_config=analyzer_config ) for idx, analyzer_response in enumerate(analyzer_response_list): logger.info(f"source_response#'{idx}'='{vars(analyzer_response)}'")