def stop_process(options): """Stop specific daemon""" from system import process_helper from supervisor import supervisor_helper as helper from system.process_context import ProcessContext from supervisor.supervisor_constants import PROCESS_SUPERVISOR from constants import PROCESS_LAUNCH_PY logger = ProcessContext.get_logger(PROCESS_LAUNCH_PY) box_id = helper.get_box_id(logger) if options.supervisor is True and options.app != PROCESS_SUPERVISOR: from db.model import box_configuration from db.dao.box_configuration_dao import BoxConfigurationDao message = 'INFO: Marking %r to be managed by Supervisor \n' % options.app sys.stdout.write(message) bc_dao = BoxConfigurationDao(logger) box_config = bc_dao.get_one(box_id) box_config.set_process_state(options.app, box_configuration.STATE_OFF) bc_dao.update(box_config) return try: pid = process_helper.get_process_pid(options.app) if pid is None or process_helper.poll_process(options.app) is False: message = 'ERROR: Process %r is already terminated %r\n' % (options.app, pid) sys.stderr.write(message) sys.exit(1) process_helper.kill_process(options.app) except Exception as e: sys.stderr.write('Exception on killing %s : %s \n' % (options.app, str(e))) traceback.print_exc(file=sys.stderr)
def stop_process(options): """Stop specific daemon""" from system import process_helper from supervisor import supervisor_helper as helper from system.process_context import ProcessContext from supervisor.supervisor_constants import PROCESS_SUPERVISOR from constants import PROCESS_LAUNCH_PY logger = ProcessContext.get_logger(PROCESS_LAUNCH_PY) box_id = helper.get_box_id(logger) if options.supervisor is True and options.app != PROCESS_SUPERVISOR: from db.model import box_configuration from db.dao.box_configuration_dao import BoxConfigurationDao message = 'INFO: Marking %r to be managed by Supervisor \n' % options.app sys.stdout.write(message) bc_dao = BoxConfigurationDao(logger) box_config = bc_dao.get_one(box_id) box_config.set_process_state(options.app, box_configuration.STATE_OFF) bc_dao.update(box_config) return try: pid = process_helper.get_process_pid(options.app) if pid is None or process_helper.poll_process(options.app) is False: message = 'ERROR: Process %r is already terminated %r\n' % ( options.app, pid) sys.stderr.write(message) sys.exit(1) process_helper.kill_process(options.app) except Exception as e: sys.stderr.write('Exception on killing %s : %s \n' % (options.app, str(e))) traceback.print_exc(file=sys.stderr)
def query_configuration(options): """ Queries process state """ from system import process_helper if not options.supervisor: # reads status of one process only process_helper.poll_process(options.app) else: # reads current box configuration and prints it to the console from db.dao.box_configuration_dao import BoxConfigurationDao from supervisor import supervisor_helper as helper from system.process_context import ProcessContext from constants import PROCESS_LAUNCH_PY logger = ProcessContext.get_logger(PROCESS_LAUNCH_PY) box_id = helper.get_box_id(logger) bc_dao = BoxConfigurationDao(logger) sys.stdout.write('\nConfiguration for BOX_ID=%r:\n' % box_id) box_configuration = bc_dao.get_one(box_id) process_list = box_configuration.get_process_list() i = 1 for process in process_list: sys.stdout.write('%d\t%r:%r \n' % (i, process, process_list[process])) i += 1 sys.stdout.write('\n')
def __init__(self, process_name): """ renames process to SynergyYYY and creates PID file """ self.process_name = process_name self.logger = ProcessContext.get_logger(process_name) # process-related activities setproctitle.setproctitle(settings['process_prefix'] + self.process_name) ProcessContext.create_pid_file(self.process_name)
def create_and_insert_unit_of_work(process_name, start_id, end_id, state=unit_of_work.STATE_REQUESTED, timeperiod='INVALID_TIMEPERIOD'): """ method creates and inserts a unit_of_work into DB :return id of the created object in the db""" uow = create_unit_of_work(process_name, start_id, end_id, timeperiod, state) logger = ProcessContext.get_logger(process_name) uow_dao = UnitOfWorkDao(logger) uow_id = uow_dao.insert(uow) return uow_id
def clean_session_entries(): logger = ProcessContext.get_logger(PROCESS_UNIT_TEST) ds = ds_manager.ds_factory(logger) connection = ds.connection(COLLECTION_SINGLE_SESSION) for i in range(base_fixtures.TOTAL_ENTRIES): key = generate_session_composite_key(i, base_fixtures.TOTAL_ENTRIES) connection.remove({ raw_data.KEY: key[0], raw_data.TIMEPERIOD: key[1], raw_data.FAMILY_USER_PROFILE + '.' + raw_data.SESSION_ID: 'session_id_%s' % str(i)})
def __init__(self, process_name, process_id=None): """ renames process to SynergyYYY and creates PID file """ self.process_name = process_name self.process_id = process_id self.logger = ProcessContext.get_logger(process_name, process_id=self.process_id) # process-related activities process_title = settings['process_prefix'] + self.process_name if self.process_id: process_title += str(self.process_id) setproctitle.setproctitle(process_title) ProcessContext.create_pid_file(self.process_name, process_id=self.process_id)
def create_unit_of_work(process_name, first_object_id, last_object_id): """ method is used to insert unit_of_work """ source_collection = ProcessContext.get_source_collection(process_name) target_collection = ProcessContext.get_target_collection(process_name) logger = ProcessContext.get_logger(process_name) unit_of_work = UnitOfWorkEntry() unit_of_work.set_timestamp('UNIT_TEST') unit_of_work.set_start_id(first_object_id) unit_of_work.set_end_id(last_object_id) unit_of_work.set_source_collection(source_collection) unit_of_work.set_target_collection(target_collection) unit_of_work.set_state(UnitOfWorkEntry.STATE_REQUESTED) unit_of_work.set_process_name(process_name) unit_of_work.set_number_of_retries(0) uow_id = unit_of_work_helper.insert(logger, unit_of_work) return uow_id
def create_site_stats(collection, composite_key_function, statistics_klass, seed='RANDOM_SEED_OBJECT'): logger = ProcessContext.get_logger(PROCESS_UNIT_TEST) ds = ds_manager.ds_factory(logger) random.seed(seed) object_ids = [] for i in range(TOTAL_ENTRIES): key = composite_key_function(i, TOTAL_ENTRIES) site_stat = statistics_klass() site_stat.key = (key[0], key[1]) site_stat.number_of_visits = random.randint(1, 1000) site_stat.total_duration = random.randint(0, 100) items = _generate_entries('os_', 5, i) site_stat.os = items items = _generate_entries('browser_', 5, i) site_stat.browsers = items items = dict() items['(320, 240)'] = 3 items['(640, 480)'] = 5 items['(1024, 960)'] = 7 items['(1280, 768)'] = 9 site_stat.screen_res = items items = dict() items['ca_en'] = 3 items['ca_fr'] = 5 items['ua_uk'] = 7 items['us_en'] = 9 site_stat.languages = items items = dict() items['ca'] = 3 items['fr'] = 5 items['uk'] = 7 items['us'] = 9 site_stat.countries = items stat_id = ds.insert(site_stat.document) object_ids.append(stat_id) return object_ids
def run_tests(options): import unittest import settings settings.enable_test_mode() argv = [sys.argv[0]] + args try: unittest.main(module=None, defaultTest='__main__.load_all_tests', argv=argv) except SystemExit as e: from system.process_context import ProcessContext from constants import PROCESS_LAUNCH_PY logger = ProcessContext.get_logger(PROCESS_LAUNCH_PY) if e.code == 0: logger.info('PASS') else: logger.error('FAIL') raise
def start_process(options, args): """Start up specific daemon """ import psutil from system import process_helper from supervisor import supervisor_helper as helper from system.process_context import ProcessContext from supervisor.supervisor_constants import PROCESS_SUPERVISOR from constants import PROCESS_LAUNCH_PY logger = ProcessContext.get_logger(PROCESS_LAUNCH_PY) box_id = helper.get_box_id(logger) if options.supervisor is True and options.app != PROCESS_SUPERVISOR: from db.model import box_configuration from db.dao.box_configuration_dao import BoxConfigurationDao message = 'INFO: Marking %r to be managed by Supervisor \n' % options.app sys.stdout.write(message) bc_dao = BoxConfigurationDao(logger) box_config = bc_dao.get_one(box_id) box_config.set_process_state(options.app, box_configuration.STATE_ON) bc_dao.update(box_config) return try: pid = process_helper.get_process_pid(options.app) if pid is not None: if psutil.pid_exists(pid): message = 'ERROR: Process %r is already running with pid %r\n' % ( options.app, pid) sys.stderr.write(message) sys.exit(1) if not options.interactive: # this block triggers if the options.interactive is not defined or is False process_helper.start_process(options.app, args) else: process_starter.start_by_process_name(options.app, args) except Exception as e: sys.stderr.write('Exception on starting %s : %s \n' % (options.app, str(e))) traceback.print_exc(file=sys.stderr)
def create_session_stats(composite_key_function, seed='RANDOM_SEED_OBJECT'): logger = ProcessContext.get_logger(PROCESS_UNIT_TEST) ss_dao = SingleSessionDao(logger) time_array = ['20010303102210', '20010303102212', '20010303102215', '20010303102250'] random.seed(seed) object_ids = [] for i in range(TOTAL_ENTRIES): key = composite_key_function(i, TOTAL_ENTRIES) session = SingleSession() session.key = (key[0], key[1]) session.session_id = 'session_id_%s' % str(i) session.ip = '192.168.0.2' if i % 3 == 0: session.screen_res = (240, 360) elif i % 5 == 0: session.screen_res = (360, 480) else: session.screen_res = (760, 980) if i % 2 == 0: session.os = 'Linux' session.browser = 'FF %s' % str(i % 4) session.language = 'en_ca' session.country = 'ca' else: session.os = 'Windows' session.browser = 'IE %s' % str(i % 9) session.language = 'ua_uk' session.country = 'eu' session.total_duration = random.randint(0, 200) session.number_of_pageviews = random.randint(1, 5) for index in range(random.randint(1, 4)): session.number_of_entries = index + 1 session.set_entry_timestamp(index, time_array[index]) sess_id = ss_dao.insert(session) object_ids.append(sess_id) return object_ids
def start_process(options, args): """Start up specific daemon """ import psutil from system import process_helper from supervisor import supervisor_helper as helper from system.process_context import ProcessContext from supervisor.supervisor_constants import PROCESS_SUPERVISOR from constants import PROCESS_LAUNCH_PY logger = ProcessContext.get_logger(PROCESS_LAUNCH_PY) box_id = helper.get_box_id(logger) if options.supervisor is True and options.app != PROCESS_SUPERVISOR: from db.model import box_configuration from db.dao.box_configuration_dao import BoxConfigurationDao message = 'INFO: Marking %r to be managed by Supervisor \n' % options.app sys.stdout.write(message) bc_dao = BoxConfigurationDao(logger) box_config = bc_dao.get_one(box_id) box_config.set_process_state(options.app, box_configuration.STATE_ON) bc_dao.update(box_config) return try: pid = process_helper.get_process_pid(options.app) if pid is not None: if psutil.pid_exists(pid): message = 'ERROR: Process %r is already running with pid %r\n' % (options.app, pid) sys.stderr.write(message) sys.exit(1) if not options.interactive: # this block triggers if the options.interactive is not defined or is False process_helper.start_process(options.app, args) else: process_starter.start_by_process_name(options.app, args) except Exception as e: sys.stderr.write('Exception on starting %s : %s \n' % (options.app, str(e))) traceback.print_exc(file=sys.stderr)
datefmt='%Y-%m-%d %H:%M:%S') roto_file_handler.setFormatter(roto_file_formatter) self.logger.addHandler(roto_file_handler) # ATTENTION: redirecting stdout logger # stderr should be redirected to stdout by Supervisor sys.stdout = self def get_logger(self): return self.logger def write(self, msg, level=logging.INFO): """ method implements stream write interface, allowing to redirect stdout to logger """ if msg is not None and len(msg.strip()) > 0: self.logger.log(level, msg) def flush(self): """ method implements stream flush interface, allowing to redirect stdout to logger """ for handler in self.logger.handlers: handler.flush() if __name__ == '__main__': from system.process_context import ProcessContext process_name = 'TestAggregator' logger = ProcessContext.get_logger(process_name) logger.info('test_message') print 'regular print message' sys.stdout.flush()
def setUp(self): self.logger = ProcessContext.get_logger(PROCESS_UNIT_TEST) self.time_table_mocked = mock(Timetable) when(self.time_table_mocked).get_tree(any(str)).thenReturn(mock()) self.pipeline_real = ContinuousPipeline(self.logger, self.time_table_mocked)
def setUp(self): super(TestPublishersPool, self).setUp() self.logger = ProcessContext.get_logger(PROCESS_UNIT_TEST)
def write(self, msg, level=logging.INFO): """ method implements stream write interface, allowing to redirect stdout to logger """ if msg is not None and len(msg.strip()) > 0: self.logger.log(level, msg) def flush(self): """ method implements stream flush interface, allowing to redirect stdout to logger """ for handler in self.logger.handlers: handler.flush() def isatty(self): """ is the sys.stdout attached to the terminal? python -c "import sys; print(sys.stdout.isatty())" (should write True) python -c "import sys; print(sys.stdout.isatty())" | grep . (should write False). :return: False, indicating that the output is pipped or redirected """ return False if __name__ == '__main__': from system.process_context import ProcessContext from tests.ut_process_context import PROCESS_UNIT_TEST, register_unit_test_context register_unit_test_context() logger = ProcessContext.get_logger(PROCESS_UNIT_TEST) logger.info('test_message') print('regular print message') sys.stdout.flush()