""" Given a workflow_name, and if the module class is already imported, create an object an return it """ full_path = "workflow." + workflow_name + "." + workflow_name f = eval(full_path) # Create the object workflow_object = f(settings, logger, conn, token, decision, maximum_page_size) return workflow_object if __name__ == "__main__": ENV = None forks = None # Add options parser = OptionParser() parser.add_option("-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live") (options, args) = parser.parse_args() if options.env: ENV = options.env process.monitor_interrupt(lambda flag: decide(ENV, flag))
# "token": None, # "datetime": datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") # } m = Message() m.set_body(json.dumps(response_message)) output_queue.write(m) if __name__ == "__main__": ENV = None parser = OptionParser() parser.add_option( "-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live", ) (options, args) = parser.parse_args() ENV = options.env settings_lib = __import__("settings") settings = settings_lib.get_settings(ENV) lax_simulator = LaxSimulator(settings, logger) process.monitor_interrupt(lambda flag: lax_simulator.listen(flag))
Given an SWF connection and logger as resources, the token to specify an accepted activity, details and a reason to send, communicate with SWF that the activity failed and the workflow should be abandoned """ try: out = conn.request_cancel_workflow_execution(domain, workflow_id, run_id=run_id) logger.info("request_cancel_workflow_execution %s" % out) except boto.exception.SWFResponseError as e: _log_swf_response_error(logger, e) if __name__ == "__main__": ENV = None parser = OptionParser() parser.add_option( "-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live", ) (options, args) = parser.parse_args() if options.env: ENV = options.env process.monitor_interrupt(lambda flag: work(ENV, flag))
except: pass full_path = "starter." + workflow_name + "." + workflow_name + "()" s = eval(full_path) s.start(settings=settings, **workflow_data) # soon to be deprecated def process_data_publishperfectarticle(workflow_name, workflow_data): data = {'info': S3NotificationInfo.from_dict(workflow_data), 'run': str(uuid.uuid4())} return data def process_data_ingestarticlezip(workflow_name, workflow_data): data = {'info': S3NotificationInfo.from_dict(workflow_data), 'run': str(uuid.uuid4())} return data def process_data_postperfectpublication(workflow_name, workflow_data): data = {'info': workflow_data } return data workflow_data_processors = { 'PublishPerfectArticle': process_data_publishperfectarticle, 'IngestArticleZip': process_data_ingestarticlezip, 'SilentCorrectionsIngest': process_data_ingestarticlezip, 'PostPerfectPublication': process_data_postperfectpublication } if __name__ == "__main__": process.monitor_interrupt(main)
if re.match(rule['bucket_name_pattern'], info.bucket_name) and \ re.match(rule['file_name_pattern'], info.file_name): return rule['starter_name'] pass def reload_module(module_name): """ Given an module name, attempt to reload the module """ try: reload(eval(module_name)) except: pass if __name__ == "__main__": parser = OptionParser() parser.add_option("-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live") (options, args) = parser.parse_args() if options.env: ENV = options.env process.monitor_interrupt(lambda flag: work(ENV, flag))
conn = S3Connection(self._settings.aws_access_key_id, self._settings.aws_secret_access_key) bucket = conn.get_bucket(bucketname) key = Key(bucket) key.key = filename json_output = key.get_contents_as_string() return json_output if __name__ == "__main__": ENV = None parser = OptionParser() parser.add_option("-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live") (options, args) = parser.parse_args() ENV = options.env settings_lib = __import__('settings') settings = settings_lib.get_settings(ENV) shimmy = Shimmy(settings, logger) process.monitor_interrupt(lambda flag: shimmy.listen(flag))
# "message": "invalid data", # "id": None, # "token": None, # "datetime": datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") # } m = Message() m.set_body(json.dumps(response_message)) output_queue.write(m) if __name__ == "__main__": ENV = None parser = OptionParser() parser.add_option("-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live") (options, args) = parser.parse_args() ENV = options.env settings_lib = __import__('settings') settings = settings_lib.get_settings(ENV) lax_simulator = LaxSimulator(settings, logger) process.monitor_interrupt(lambda flag: lax_simulator.listen(flag))
def process_data_publishperfectarticle(workflow_name, workflow_data): data = { 'info': S3NotificationInfo.from_dict(workflow_data), 'run': str(uuid.uuid4()) } return data def process_data_ingestarticlezip(workflow_name, workflow_data): data = { 'info': S3NotificationInfo.from_dict(workflow_data), 'run': str(uuid.uuid4()) } return data def process_data_postperfectpublication(workflow_name, workflow_data): data = {'info': workflow_data} return data workflow_data_processors = { 'PublishPerfectArticle': process_data_publishperfectarticle, 'IngestArticleZip': process_data_ingestarticlezip, 'SilentCorrectionsIngest': process_data_ingestarticlezip, 'PostPerfectPublication': process_data_postperfectpublication } if __name__ == "__main__": process.monitor_interrupt(main)
try: reload(eval(module_name)) except NameError: pass def get_workflow_object(workflow_name, settings, logger, conn, token, decision, maximum_page_size): """ Given a workflow_name, and if the module class is already imported, create an object an return it """ full_path = "workflow." + workflow_name + "." + workflow_name f = eval(full_path) # Create the object workflow_object = f(settings, logger, conn, token, decision, maximum_page_size) return workflow_object if __name__ == "__main__": ENV = None forks = None # Add options parser = OptionParser() parser.add_option("-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live") (options, args) = parser.parse_args() if options.env: ENV = options.env process.monitor_interrupt(lambda flag: decide(ENV, flag))
self.logger.info("calling workflow PostPerfectPublication") return workflow_starter_message except Exception as e: self.logger.error("Error parsing Lax message. Message: " + e.message) raise @newrelic.agent.background_task(group='lax_response_adapter.py') def process_message(self, message, output_queue): message_str = str(message.get_body()) workflow_starter_message = self.parse_message(message_str) m = Message() m.set_body(json.dumps(workflow_starter_message)) output_queue.write(m) if __name__ == "__main__": ENV = None parser = OptionParser() parser.add_option("-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live") (options, args) = parser.parse_args() ENV = options.env settings_lib = __import__('settings') settings = settings_lib.get_settings(ENV) lax_response_adapter = LaxResponseAdapter(settings, logger) process.monitor_interrupt(lambda flag: lax_response_adapter.listen(flag))
def extract_update_date(self, passthrough_json, response_json): return eif_provider.extract_update_date(passthrough_json, response_json) def slurp_eif(self, bucketname, filename): conn = S3Connection(self._settings.aws_access_key_id, self._settings.aws_secret_access_key) bucket = conn.get_bucket(bucketname) key = Key(bucket) key.key = filename json_output = key.get_contents_as_string() return json_output if __name__ == "__main__": ENV = None parser = OptionParser() parser.add_option("-e", "--env", default="dev", action="store", type="string", dest="env", help="set the environment to run, either dev or live") (options, args) = parser.parse_args() ENV = options.env settings_lib = __import__('settings') settings = settings_lib.get_settings(ENV) shimmy = Shimmy(settings, logger) process.monitor_interrupt(lambda flag: shimmy.listen(flag))