def __init__(self): # initialize logging self.setup_logging() # change settings when in live mode default_configuration = "/dls_sw/apps/zocalo/secrets/credentials-testing.cfg" if "--live" in sys.argv: self.use_live_infrastructure = True default_configuration = "/dls_sw/apps/zocalo/secrets/credentials-live.cfg" else: self.use_live_infrastructure = False if os.path.exists(default_configuration): StompTransport.load_configuration_file(default_configuration)
def __init__(self): # initialize logging self.setup_logging() self.log.debug('Loading dlstbx workflows plugins') dlstbx = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) self.log.debug('Loading dlstbx credentials') # change settings when in live mode default_configuration = '/dls_sw/apps/zocalo/secrets/credentials-testing.cfg' if '--live' in sys.argv: self.use_live_infrastructure = True default_configuration = '/dls_sw/apps/zocalo/secrets/credentials-live.cfg' # override default stomp host from workflows.transport.stomp_transport import StompTransport try: StompTransport.load_configuration_file(default_configuration) except workflows.Error as e: self.log.warning(e)
def lazy_pprint(*args, **kwargs): from pprint import pprint # pprint(*args, **kwargs) # def get_output_file(file_path): # print(os.path.dirname(get_output_file())) # return os.path.dirname(get_output_file()) if __name__ == '__main__': default_configuration = '/dls_sw/apps/zocalo/secrets/credentials-live.cfg' # override default stomp host try: StompTransport.load_configuration_file(default_configuration) except workflows.Error as e: print("Error: %s\n" % str(e)) # StompTransport.add_command_line_options(parser) # (options, args) = parser.parse_args(sys.argv[1:]) stomp = StompTransport() output_file = get_output_file(sys.argv[-1]) message = { 'recipes': [], 'parameters': {}, } # Build a custom recipe recipe = {}
for p in processes: p.join() class Data(object): def __init__(self, a, b, c): self.a = a self.b = b self.c = c if __name__ == "__main__": import cPickle as pickle configuration = "/dls_sw/apps/zocalo/secrets/credentials-james.cfg" StompTransport.load_configuration_file(configuration) data = Data(1, 2, 3) message = {"hello": "world", "data": pickle.dumps(data)} master = Master() master.send(message) master.send(message) master.send(message) master.send(message) print(master.receive()) print("\nSubmitted.")
secure=False, ) def ensure_url(url: str) -> str: """Make sure a string URL has a schema, for urllib.parse.urlparse consumption""" if "://" not in url: return f"minio://{args.host}" return url logger = logging.getLogger() parser = ArgumentParser(description="Submit an S3 bucket for PIA") StompTransport.load_configuration_file( "/dls_sw/apps/zocalo/secrets/credentials-testing.cfg") StompTransport.add_command_line_options(parser) parser.add_argument( "s3_url", metavar="S3_URL", help="The access URL for the S3 bucket", type=ensure_url, ) # parser.add_argument("images", metavar="IMAGES", help="Image numbers to submit. Index or ranges '1,10' '1-10'. Defauts to all") parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true") args = parser.parse_args()
help="the DB config file", default="dbconfig.cfg", metavar="FILE") parser.add_option("-s", "--stomp-config", dest="stomp_config", help="the stomp config (i.e. message queue)", default="stomp.cfg", metavar="FILE") (options, args) = parser.parse_args(sys.argv[1:]) config = ConfigParser.RawConfigParser(allow_no_value=True) config.read(options.config) set_logging(config) StompTransport.load_configuration_file(options.stomp_config) StompTransport.add_command_line_options(parser) # Get a database connection mxprocessing = None with ispyb.open(options.db_config) as conn: mxprocessing = ispyb.factory.create_data_area( ispyb.factory.DataAreaType.MXPROCESSING, conn) def receive_message_but_exit_on_error(*args, **kwargs): try: receive_message(*args, **kwargs) except KeyboardInterrupt: print("Terminating.") sys.exit(0) except Exception as e:
import os import numpy as np import matplotlib.pyplot as plt import sys sys.path.append('/dls_sw/i24/scripts/setup_beamline') import pv, ca import Queue import workflows.recipe from workflows.transport.stomp_transport import StompTransport import logging logging.basicConfig(level=logging.DEBUG) StompTransport.load_configuration_file( '/dls_sw/apps/zocalo/secrets/credentials-i24.cfg') # StompTransport.load_configuration_file('/dls_sw/apps/zocalo/secrets/credentials-live.cfg') visit_directory = ca.cagetstring(pv.pilat_filepath) print visit_directory if visit_directory.startswith('/ramdisk'): visit_directory = visit_directory.replace('ramdisk', 'dls/i24/data') print 'visit_director', visit_directory filefromdet = ca.cagetstring('BL24I-EA-PILAT-01:cam1:FileName_RBV') pattern = os.path.join(visit_directory, "%s" % (filefromdet) + "%04d.cbf") print pattern #pattern = "/dls/i24/data/2018/nt14493-104/had3/agouti/agouti0044_%05d.cbf" #chip_name = 'chip_name' #sub_directory='image_analysis'
def run(): cmdline_args = sys.argv[1:] # Enable logging to console console = logging.StreamHandler() console.setLevel(logging.INFO) logging.getLogger("workflows").setLevel(logging.INFO) logging.getLogger("zocalo").setLevel(logging.INFO) logging.getLogger().setLevel(logging.WARN) logging.getLogger().addHandler(console) log = logging.getLogger("dlstbx.wrap") # Set up stomp defaults default_configuration = "/dls_sw/apps/zocalo/secrets/credentials-live.cfg" if "--test" in cmdline_args: default_configuration = "/dls_sw/apps/zocalo/secrets/credentials-testing.cfg" StompTransport.load_configuration_file(default_configuration) known_wrappers = { e.name: e.load for e in pkg_resources.iter_entry_points("zocalo.wrappers") } # Set up parser parser = OptionParser(usage="zocalo.wrap [options]") parser.add_option("-?", action="help", help=SUPPRESS_HELP) parser.add_option( "--wrap", action="store", dest="wrapper", type="choice", metavar="WRAP", default=None, choices=list(known_wrappers), help="Object to be wrapped (valid choices: %s)" % ", ".join(known_wrappers), ) parser.add_option( "--recipewrapper", action="store", dest="recipewrapper", metavar="RW", default=None, help="A serialized recipe wrapper file " "for downstream communication", ) parser.add_option( "--test", action="store_true", help="Run in ActiveMQ testing namespace (zocdev)" ) parser.add_option( "--live", action="store_true", help="Run in ActiveMQ live namespace (zocalo, default)", ) parser.add_option( "-t", "--transport", dest="transport", metavar="TRN", default="StompTransport", help="Transport mechanism. Known mechanisms: " + ", ".join(workflows.transport.get_known_transports()) + " (default: %default)", ) parser.add_option( "-v", "--verbose", dest="verbose", action="store_true", default=False, help="Show debug level messages", ) workflows.transport.add_command_line_options(parser) # Parse command line arguments (options, args) = parser.parse_args() if not cmdline_args: parser.print_help() sys.exit() # Instantiate specific wrapper if not options.wrapper: sys.exit("A wrapper object must be specified.") if options.verbose: console.setLevel(logging.DEBUG) # Enable logging to graylog graylog_handler = enable_graylog() log.info( "Starting wrapper for %s with recipewrapper file %s", options.wrapper, options.recipewrapper, ) # Connect to transport and start sending notifications transport = workflows.transport.lookup(options.transport)() transport.connect() st = zocalo.wrapper.StatusNotifications(transport.broadcast_status, options.wrapper) for env in ("SGE_CELL", "JOB_ID"): if env in os.environ: st.set_static_status_field("cluster_" + env, os.getenv(env)) # Instantiate chosen wrapper instance = known_wrappers[options.wrapper]()() instance.status_thread = st # If specified, read in a serialized recipewrapper if options.recipewrapper: with open(options.recipewrapper) as fh: recwrap = workflows.recipe.wrapper.RecipeWrapper( message=json.load(fh), transport=transport ) instance.set_recipe_wrapper(recwrap) if recwrap.environment.get("ID"): # If recipe ID available then include that in all future log messages class ContextFilter(logging.Filter): def filter(self, record): record.recipe_ID = recwrap.environment["ID"] return True graylog_handler.addFilter(ContextFilter()) if recwrap.recipe_step.get("wrapper", {}).get("task_information"): # If the recipe contains an extra task_information field then add this to the status display st.taskname += ( " (" + str(recwrap.recipe_step["wrapper"]["task_information"]) + ")" ) instance.prepare("Starting processing") st.set_status(workflows.services.common_service.Status.PROCESSING) log.info("Setup complete, starting processing") try: if instance.run(): log.info("successfully finished processing") instance.success("Finished processing") else: log.info("processing failed") instance.failure("Processing failed") st.set_status(workflows.services.common_service.Status.END) except KeyboardInterrupt: log.info("Shutdown via Ctrl+C") st.set_status(workflows.services.common_service.Status.END) except Exception as e: log.error(str(e), exc_info=True) instance.failure(e) st.set_status(workflows.services.common_service.Status.ERROR) instance.done("Finished processing") st.shutdown() st.join() log.debug("Terminating")
def run(): parser = OptionParser( usage="zocalo.go [options] dcid", description="Triggers processing of a standard " "recipe, of an arbitrary recipe from a local file, or of an entry in " "the ISPyB processing table.", ) parser.add_option("-?", action="help", help=SUPPRESS_HELP) parser.add_option( "-r", "--recipe", dest="recipe", metavar="RCP", action="append", default=[], help="Name of a recipe to run. Can be used multiple times. Recipe names correspond to filenames (excluding .json) in /dls_sw/apps/zocalo/live/recipes", ) parser.add_option( "-a", "--autoprocscalingid", dest="autoprocscalingid", metavar="APSID", action="store", type="string", default=None, help="An auto processing scaling ID for downstream processing recipes.", ) parser.add_option( "-f", "--file", dest="recipefile", metavar="FILE", action="store", type="string", default="", help="Run recipe contained in this file.", ) parser.add_option( "-n", "--no-dcid", dest="nodcid", action="store_true", default=False, help="Trigger recipe without specifying a data collection ID", ) parser.add_option( "--drop", dest="dropfile", action="store_true", default=False, help=SUPPRESS_HELP, ) # Write directly to file, do not attempt to send via stomp parser.add_option( "-p", "--reprocessing", dest="reprocess", action="store_true", default=False, help="Means a reprocessing ID is given rather than a data collection ID", ) parser.add_option( "-s", "--set", dest="parameters", action="append", default=[], metavar="KEY=VALUE", help="Set an additional variable for recipe evaluation", ) parser.add_option( "-v", "--verbose", dest="verbose", action="store_true", default=False, help="Show raw message before sending", ) parser.add_option( "--dry-run", dest="dryrun", action="store_true", default=False, help="Verify that everything is in place that the message could be sent, but don't actually send the message", ) parser.add_option( "--test", action="store_true", dest="test", default=False, help="Run in ActiveMQ testing (zocdev) namespace", ) default_configuration = "/dls_sw/apps/zocalo/secrets/credentials-live.cfg" allow_stomp_fallback = not any("stomp" in s.lower() for s in sys.argv) if "--test" in sys.argv: default_configuration = "/dls_sw/apps/zocalo/secrets/credentials-testing.cfg" allow_stomp_fallback = False # override default stomp host try: StompTransport.load_configuration_file(default_configuration) except workflows.Error as e: print("Error: %s\n" % str(e)) allow_stomp_fallback = False StompTransport.add_command_line_options(parser) (options, args) = parser.parse_args(sys.argv[1:]) def generate_headers(): return { "zocalo.go.user": getpass.getuser(), "zocalo.go.host": socket.gethostname(), } def write_message_to_dropfile(message, headers): message_serialized = ( json.dumps({"headers": headers, "message": message}, indent=2) + "\n" ) fallback = os.path.join("/dls_sw/apps/zocalo/dropfiles", str(uuid.uuid4())) if options.dryrun: print("Not storing message in %s (running with --dry-run)" % fallback) return with open(fallback, "w") as fh: fh.write(message_serialized) print("Message successfully stored in %s" % fallback) def send_to_stomp_or_defer(message, headers=None): if not headers: headers = generate_headers() if options.verbose: pprint(message) if allow_stomp_fallback and options.dropfile: return write_message_to_dropfile(message, headers) try: stomp = StompTransport() if options.dryrun: print("Not sending message (running with --dry-run)") return stomp.connect() stomp.send("processing_recipe", message, headers=headers) except ( KeyboardInterrupt, SyntaxError, AssertionError, AttributeError, ImportError, TypeError, ValueError, ): raise except Exception: if not allow_stomp_fallback: raise print("\n\n") import traceback traceback.print_exc() print("\n\nAttempting to store message in fallback location") write_message_to_dropfile(message, headers) message = {"recipes": options.recipe, "parameters": {}} for kv in options.parameters: if "=" not in kv: sys.exit(f"Invalid variable specification '{kv}'") key, value = kv.split("=", 1) message["parameters"][key] = value if ( not options.recipe and not options.recipefile and not options.nodcid and not options.reprocess ): sys.exit("No recipes specified.") if options.recipefile: with open(options.recipefile) as fh: custom_recipe = workflows.recipe.Recipe(json.load(fh)) custom_recipe.validate() message["custom_recipe"] = custom_recipe.recipe if options.nodcid: if options.recipe: print("Running recipes", options.recipe) if options.recipefile: print("Running recipe from file", options.recipefile) print("without specified data collection.") send_to_stomp_or_defer(message) print("\nSubmitted.") sys.exit(0) if not args: sys.exit("No data collection IDs specified.") if len(args) > 1: sys.exit("Only a single data collection ID can be specified.") dcid = int(args[0]) assert dcid > 0, "Invalid data collection ID given." if options.reprocess: # Given ID is a reprocessing ID. Nothing else needs to be specified. if options.recipe: print("Running recipes", options.recipe) message["parameters"]["ispyb_process"] = dcid send_to_stomp_or_defer(message) print("\nReprocessing task submitted for ID %d." % dcid) sys.exit(0) if message["recipes"]: print("Running recipes", message["recipes"]) if options.recipefile: print("Running recipe from file", options.recipefile) if not message["recipes"] and not message.get("custom_recipe"): sys.exit("No recipes specified.") print("for data collection", dcid) message["parameters"]["ispyb_dcid"] = dcid if options.autoprocscalingid: apsid = int(options.autoprocscalingid) assert apsid > 0, "Invalid auto processing scaling ID given." message["parameters"]["ispyb_autoprocscalingid"] = apsid send_to_stomp_or_defer(message) print("\nSubmitted.")