def sync(): if DEBUG: logger.debug("Run in debug mode.") if INCLUDE: logger.debug("Only the files({}) will be processed.".format( ",".join(INCLUDE))) try: for init_method in module_init_handlers: init_method() pull_status = SlaveSyncStatus.get_bitbucket_status() get_tasks(pull_status) try: slave_sync_notify.SlaveServerSyncNotify.send_last_sync_time( pull_status) except: pass logger.info("HG_NODE: {}".format(HG_NODE)) for task_type in ordered_sync_task_type: for task_name, task in sync_tasks[task_type].iteritems(): if isinstance(task, list): #shared task logger.info("Shared Task : {0} {1} = [{2}]".format( task_type, task_name, ",".join([t[0]['job_file'] for t in task]))) else: #unshared task logger.info("Task : {0} {1} = {2}".format( task_type, task_name, task[0]['job_file'])) for task in notify_tasks: logger.info("Task : {0} {1} = {2}".format( "send_notify", taskname(task[0], task[1]), task[0]['job_file'])) #prepare tasks for task in prepare_tasks: execute_prepare_task(*task) #execute tasks for task_type in ordered_sync_task_type: for task in sync_tasks[task_type].values(): if isinstance(task, list): #shared task for shared_task in task: execute_task(*shared_task) else: #unshared task execute_task(*task) if SlaveSyncStatus.all_succeed(): logger.info("All done!") else: raise Exception("Some files({0}) are processed failed.".format( ' , '.join([ s.file for s in SlaveSyncStatus.get_failed_status_objects() ]))) if ignore_files: raise Exception( "{} files are ignored in debug mode,rollback!".format( ignore_files)) if ROLLBACK: raise Exception("Rollback for testing") return finally: #save notify status SlaveSyncStatus.save_all() #send notify for task in notify_tasks: execute_notify_task(*task) #clear all tasks for k in sync_tasks.keys(): sync_tasks[k].clear() for reset_method in module_reset_handlers: reset_method()
prepare_tasks = [] module_init_handlers = [] module_reset_handlers = [] plugin_modules = [ slave_sync_postgres, slave_sync_gs, slave_sync_gs_wms, slave_sync_gs_layergroup, slave_sync_gs_preview, slave_sync_gs_gwc, slave_sync_fastly, slave_sync_file, slave_catalogues ] notify_modules = [slave_sync_notify] prepare_modules = [slave_sync_prepare] ignore_files = 0 for key in sync_tasks.keys(): sync_tasks_metadata[key] = [] for m in plugin_modules: if hasattr(m, "tasks_metadata"): for task_metadata in m.tasks_metadata: if task_metadata[TASK_TYPE_INDEX] not in sync_tasks_metadata: continue sync_tasks_metadata[task_metadata[TASK_TYPE_INDEX]].append( (task_metadata, m.logger if hasattr(m, "logger") else logger)) if hasattr(m, "initialize"): module_init_handlers.append(m.initialize) if hasattr(m, "reset"): module_reset_handlers.append(m.reset)
slave_sync_gs_layergroup, slave_sync_gs_preview, slave_sync_gs_gwc, slave_sync_file, slave_catalogues ] notify_modules = [ slave_sync_notify ] prepare_modules = [ slave_sync_prepare ] ignore_files = 0 for key in sync_tasks.keys(): sync_tasks_metadata[key] = [] for m in plugin_modules: if hasattr(m,"tasks_metadata"): for task_metadata in m.tasks_metadata: if task_metadata[TASK_TYPE_INDEX] not in sync_tasks_metadata: continue sync_tasks_metadata[task_metadata[TASK_TYPE_INDEX]].append((task_metadata,m.logger if hasattr(m,"logger") else logger)) if hasattr(m,"initialize"): module_init_handlers.append(m.initialize) if hasattr(m,"reset"): module_reset_handlers.append(m.reset) for m in notify_modules:
def sync(): if DEBUG: logger.debug("Run in debug mode.") if INCLUDE: logger.debug("Only the files({}) will be processed.".format(",".join(INCLUDE))) try: for init_method in module_init_handlers: init_method() pull_status = SlaveSyncStatus.get_bitbucket_status() get_tasks(pull_status) slave_sync_notify.SlaveServerSyncNotify.send_last_sync_time(pull_status) logger.info("HG_NODE: {}".format(HG_NODE)) for task_type in ordered_sync_task_type: for task_name,task in sync_tasks[task_type].iteritems(): if isinstance(task,list): #shared task logger.info("Shared Task : {0} {1} = [{2}]".format(task_type,task_name,",".join([t[0]['job_file'] for t in task]))) else: #unshared task logger.info("Task : {0} {1} = {2}".format(task_type,task_name,task[0]['job_file'])) for task in notify_tasks: logger.info("Task : {0} {1} = {2}".format("send_notify",taskname(task[0],task[1]),task[0]['job_file'])) #prepare tasks for task in prepare_tasks: execute_prepare_task(*task) #execute tasks for task_type in ordered_sync_task_type: for task in sync_tasks[task_type].values(): if isinstance(task,list): #shared task for shared_task in task: execute_task(*shared_task) else: #unshared task execute_task(*task) if SlaveSyncStatus.all_succeed(): logger.info("All done!") else: raise Exception("Some files({0}) are processed failed.".format(' , '.join([s.file for s in SlaveSyncStatus.get_failed_status_objects()]))) if ignore_files: raise Exception("{} files are ignored in debug mode,rollback!".format(ignore_files)) if ROLLBACK: raise Exception("Rollback for testing") return finally: #save notify status SlaveSyncStatus.save_all() #send notify for task in notify_tasks: execute_notify_task(*task) #clear all tasks for k in sync_tasks.keys(): sync_tasks[k].clear() for reset_method in module_reset_handlers: reset_method()
def sync(): try: for init_method in module_init_handlers: init_method() pull_status = SlaveSyncStatus.get_bitbucket_status() get_tasks(pull_status) slave_sync_notify.SlaveServerSyncNotify.send_last_sync_time(pull_status) logger.info("HG_NODE: {}".format(HG_NODE)) for task_type in ordered_sync_task_type: for task_name,task in sync_tasks[task_type].iteritems(): if isinstance(task,list): #shared task logger.info("Shared Task : {0} {1} = [{2}]".format(task_type,task_name,",".join([t[0]['job_file'] for t in task]))) else: #unshared task logger.info("Task : {0} {1} = {2}".format(task_type,task_name,task[0]['job_file'])) for task in notify_tasks: logger.info("Task : {0} {1} = {2}".format("send_notify",taskname(task[0],task[1]),task[0]['job_file'])) expected_executed_tasks = -1 executed_task = 0 for task_type in ordered_sync_task_type: for task in sync_tasks[task_type].values(): if executed_task == expected_executed_tasks: break if isinstance(task,list): #shared task for shared_task in task: execute_task(*shared_task) else: #unshared task execute_task(*task) executed_task += 1 if executed_task == expected_executed_tasks: break if SlaveSyncStatus.all_succeed(): logger.info("All done!") else: raise Exception("Some files({0}) are processed failed.".format(' , '.join([s.file for s in SlaveSyncStatus.get_failed_status_objects()]))) #raise Exception("Rollback for testing") return finally: #save notify status SlaveSyncStatus.save_all() #send notify for task in notify_tasks: logger.info("Begin to send notify for task ({0}).".format(taskname(task[0],task[1]))) try: task[1][TASK_HANDLER_INDEX](*task) logger.info("Succeed to send notify for task ({0}).".format(taskname(task[0],task[1]))) except: logger.error("Failed to send notify for task ({0}). {1}".format(taskname(task[0],task[1]),traceback.format_exc())) #clear all tasks for k in sync_tasks.keys(): sync_tasks[k].clear() for reset_method in module_reset_handlers: reset_method()