Пример #1
0
    def __init__(self, bus, port, options):
        plugins.SimplePlugin.__init__(self, bus)
        self.id = None
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.hostname is None:
            self.hostname = self.master_proxy.get_public_hostname()
        else:
            self.hostname = options.hostname
        self.lighty_conf_template = options.lighty_conf
        if options.blockstore is None:
            self.static_content_root = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            self.static_content_root = options.blockstore
        block_store_dir = os.path.join(self.static_content_root, "data")
        try:
            os.mkdir(block_store_dir)
        except:
            pass
        self.block_store = BlockStore(ciel.engine, self.hostname, self.port, block_store_dir, ignore_blocks=options.ignore_blocks, aux_listen_port=options.aux_port)
        self.block_store.subscribe()
        self.block_store.build_pin_set()
        self.block_store.check_local_blocks()
        create_watcher_thread(bus, self.block_store)
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store, self.upload_deferred_work)
        
        self.execution_features = ExecutionFeatures()
        
        #self.task_executor = TaskExecutorPlugin(bus, self, self.master_proxy, self.execution_features, 1)
        #self.task_executor.subscribe()
        
        self.scheduling_classes = parse_scheduling_class_option(options.scheduling_classes, options.num_threads)
        
        self.multiworker = MultiWorker(ciel.engine, self)
        self.multiworker.subscribe()
        self.process_pool = ProcessPool(bus, self)
        self.process_pool.subscribe()
        self.runnable_executors = self.execution_features.runnable_executors.keys()
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}

        cherrypy.config.update({"server.thread_pool" : 20})


        
        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase }

        self.subscribe()
Пример #2
0
def main():
    parser = OptionParser()
    parser.add_option("-m", "--master", action="store", dest="master", help="Master URI", metavar="MASTER", default=os.getenv("SW_MASTER"))
    parser.add_option("-s", "--skypy-stub", action="store", dest="skypy_stub", help="Path to Skypy stub.py", metavar="PATH", default=None)
    (options, args) = parser.parse_args()
   
    if not options.master:
        parser.print_help()
        print >> sys.stderr, "Must specify master URI with --master"
        sys.exit(1)

    master_uri = options.master
    
    script_name = args[0]
    script_args = args[1:]

    sp_package = {"skypymain": {"filename": script_name}}
    sp_args = {"pyfile_ref": {"__package__": "skypymain"}, "entry_point": "skypy_main", "entry_args": script_args}

    new_job = skywriting.runtime.util.start_job.submit_job_with_package(sp_package, "skypy", sp_args, os.getcwd(), master_uri, args)
    
    result = skywriting.runtime.util.start_job.await_job(new_job["job_id"], master_uri)

    fakeBlockStore = BlockStore(ciel.engine, None, None, "/tmp")
    reflist = fakeBlockStore.retrieve_object_for_ref(result, "json")

    return reflist[0]
Пример #3
0
def allinone_main(options, args):
    
    ciel.log = CielLogger()
    
    script_filename = args[0]
    run_id = args[1] if len(args) > 1 else 'allinone'
    
    base_dir = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
    ciel.log('Writing block store files to %s' % base_dir, 'ALLINONE', logging.INFO)
    
    if options.blockstore is not None:
        base_dir = options.blockstore
    else:
        base_dir = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        options.blockstore = base_dir
        
    block_store = BlockStore(ciel.engine, 'localhost', 8000, base_dir, True)
    
    initial_task_descriptor, cont_ref = build_initial_task_descriptor(script_filename, block_store, 'root', 'root_cont', 'root_output')
        
    initial_task_object = build_taskpool_task_from_descriptor(initial_task_descriptor, None)
    
    task_runner = TaskRunner(initial_task_object, cont_ref, block_store, options)
    
    try:
        print run_id, 'SUBMITTED_JOB', now_as_timestamp()
        result = task_runner.run()
        print run_id, 'GOT_RESULT', now_as_timestamp()
        print block_store.retrieve_object_for_ref(result, 'json')
        
    except:
        pass
Пример #4
0
def allinone_main(options, args):

    ciel.log = CielLogger()

    script_filename = args[0]
    run_id = args[1] if len(args) > 1 else "allinone"

    base_dir = tempfile.mkdtemp(prefix=os.getenv("TEMP", default="/tmp/sw-files-"))
    ciel.log("Writing block store files to %s" % base_dir, "ALLINONE", logging.INFO)

    if options.blockstore is not None:
        base_dir = options.blockstore
    else:
        base_dir = tempfile.mkdtemp(prefix=os.getenv("TEMP", default="/tmp/sw-files-"))
        options.blockstore = base_dir

    block_store = BlockStore(ciel.engine, "localhost", 8000, base_dir, True)

    initial_task_descriptor, cont_ref = build_initial_task_descriptor(
        script_filename, block_store, "root", "root_cont", "root_output"
    )

    initial_task_object = build_taskpool_task_from_descriptor(initial_task_descriptor, None)

    task_runner = TaskRunner(initial_task_object, cont_ref, block_store, options)

    try:
        print run_id, "SUBMITTED_JOB", now_as_timestamp()
        result = task_runner.run()
        print run_id, "GOT_RESULT", now_as_timestamp()
        print block_store.retrieve_object_for_ref(result, "json", None)

    except:
        pass
Пример #5
0
    def __init__(self, bus, hostname, port, options):
        plugins.SimplePlugin.__init__(self, bus)
        self.id = None
        self.hostname = hostname
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.blockstore is None:
            block_store_dir = tempfile.mkdtemp(
                prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            block_store_dir = options.blockstore
        self.block_store = BlockStore(cherrypy.engine,
                                      self.hostname,
                                      self.port,
                                      block_store_dir,
                                      ignore_blocks=options.ignore_blocks)
        self.block_store.build_pin_set()
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store,
                                            self.upload_deferred_work)
        self.execution_features = ExecutionFeatures()
        self.task_executor = TaskExecutorPlugin(bus, self.block_store,
                                                self.master_proxy,
                                                self.execution_features, 1)
        self.task_executor.subscribe()
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}

        cherrypy.config.update({"server.thread_pool": 20})

        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = {
                "tools.staticdir.on": True,
                "tools.staticdir.dir": options.staticbase
            }
        if options.lib is not None:
            self.cherrypy_conf["/stdlib"] = {
                "tools.staticdir.on": True,
                "tools.staticdir.dir": options.lib
            }

        self.subscribe()
Пример #6
0
def main():

    parser = OptionParser()
    parser.add_option("-m",
                      "--master",
                      action="store",
                      dest="master",
                      help="Master URI",
                      metavar="MASTER",
                      default=os.getenv("SW_MASTER"))

    (options, args) = parser.parse_args()
    master_uri = options.master

    if master_uri is None or master_uri == "":
        raise Exception("Must specify a master with -m or SW_MASTER")

    with open(args[0], "r") as package_file:
        job_dict = simplejson.load(package_file)

    package_dict = job_dict["package"]
    start_dict = job_dict["start"]
    start_handler = start_dict["handler"]
    start_args = start_dict["args"]
    try:
        job_options = job_dict["options"]
    except KeyError:
        job_options = {}

    (package_path, _) = os.path.split(args[0])

    print "BEFORE_SUBMIT", now_as_timestamp()

    new_job = submit_job_with_package(package_dict, start_handler, start_args,
                                      job_options, package_path, master_uri,
                                      args[1:])

    print "SUBMITTED", now_as_timestamp()

    job_url = urlparse.urljoin(master_uri,
                               "control/browse/job/%s" % new_job['job_id'])
    print "JOB_URL", job_url

    result = await_job(new_job['job_id'], master_uri)

    fakeBlockStore = BlockStore(ciel.engine, None, None, "/tmp")
    reflist = fakeBlockStore.retrieve_object_for_ref(result, "json")

    print "GOT_RESULT", now_as_timestamp()

    return reflist
Пример #7
0
def main():
    parser = OptionParser()
    parser.add_option("-m", "--master", action="store", dest="master", help="Master URI", metavar="MASTER", default=os.getenv("SW_MASTER"))
    parser.add_option("-r", "--refs", action="store_true", dest="refs", help="Set this option to look up reference names in the master", default=False)
    parser.add_option("-j", "--json", action="store_true", dest="json", help="Set this option to use JSON pretty printing", default=False)
    (options, args) = parser.parse_args()
    
    # Retrieves should work anyway; the arguments are mainly needed for storing
    # stuff.
    bs = BlockStore("dummy_hostname", "0", None)
    
    if options.refs:
        ref_ids = args
        
        for ref_id in ref_ids:
            
            # Fetch information about the ref from the master.
            h = httplib2.Http()
            _, content = h.request(urljoin(options.master, '/refs/%s' % ref_id), 'GET')
            ref_info = simplejson.loads(content, object_hook=json_decode_object_hook)
            ref = ref_info['ref']
            
            if options.json:
                obj = bs.retrieve_object_for_ref(ref, 'json')
                simplejson.dump(obj, sys.stdout, cls=SWReferenceJSONEncoder, indent=4)
                print
            else:
                fh = bs.retrieve_object_for_ref(ref, 'handle')
                for line in fh:
                    sys.stdout.write(line)
                fh.close()
            
    else:
        urls = args    
        
        for url in urls:
            if options.json:
                obj = bs.retrieve_object_for_ref(SWURLReference([url]), 'json')
                simplejson.dump(obj, sys.stdout, cls=SWReferenceJSONEncoder, indent=4)
                print
            else:
                fh = bs.retrieve_object_for_ref(SWURLReference([url]), 'handle')
                print fh
                for line in fh:
                    sys.stdout.write(line)
                fh.close()
Пример #8
0
def main():

    parser = OptionParser()
    parser.add_option("-m", "--master", action="store", dest="master", help="Master URI", metavar="MASTER", default=os.getenv("SW_MASTER"))
    
    (options, args) = parser.parse_args()
    master_uri = options.master

    if master_uri is None or master_uri == "":
        raise Exception("Must specify a master with -m or SW_MASTER")
    
    with open(args[0], "r") as package_file:
        job_dict = simplejson.load(package_file)

    package_dict = job_dict["package"]
    start_dict = job_dict["start"]
    start_handler = start_dict["handler"]
    start_args = start_dict["args"]
    try:
        job_options = job_dict["options"]
    except KeyError:
        job_options = {}
    

    (package_path, _) = os.path.split(args[0])

    print "BEFORE_SUBMIT", now_as_timestamp()

    new_job = submit_job_with_package(package_dict, start_handler, start_args, job_options, package_path, master_uri, args[1:])

    print "SUBMITTED", now_as_timestamp()
    
    job_url = urlparse.urljoin(master_uri, "control/browse/job/%s" % new_job['job_id'])
    print "JOB_URL", job_url

    result = await_job(new_job['job_id'], master_uri)

    fakeBlockStore = BlockStore(ciel.engine, None, None, "/tmp")
    reflist = fakeBlockStore.retrieve_object_for_ref(result, "json")

    print "GOT_RESULT", now_as_timestamp()

    return reflist
Пример #9
0
def main():
    parser = OptionParser()
    parser.add_option("-m", "--master", action="store", dest="master", help="Master URI", metavar="MASTER", default=os.getenv("SW_MASTER"))
    parser.add_option("-i", "--id", action="store", dest="id", help="Job ID", metavar="ID", default="default")
    parser.add_option("-e", "--env", action="store_true", dest="send_env", help="Set this flag to send the current environment with the script as _env", default=False)
    (options, args) = parser.parse_args()
   
    if not options.master:
        parser.print_help()
        print >> sys.stderr, "Must specify master URI with --master"
        sys.exit(1)

    if len(args) != 1:
        parser.print_help()
        print >> sys.stderr, "Must specify one script file to execute, as argument"
        sys.exit(1)

    script_name = args[0]
    master_uri = options.master
    id = options.id
    
    print id, "STARTED", now_as_timestamp()

    swi_package = {"swimain": {"filename": script_name}}
    swi_args = {"sw_file_ref": {"__package__": "swimain"}, "start_args": args}
    if options.send_env:
        swi_args["start_env"] = dict(os.environ)

    new_job = skywriting.runtime.util.start_job.submit_job_with_package(swi_package, "swi", swi_args, {}, os.getcwd(), master_uri, args[1:])
    
    result = skywriting.runtime.util.start_job.await_job(new_job["job_id"], master_uri)
    
    fakeBlockStore = BlockStore(ciel.engine, None, None, "/tmp")
    reflist = fakeBlockStore.retrieve_object_for_ref(result, "json")
    sw_return = fakeBlockStore.retrieve_object_for_ref(reflist[0], "json")
    #fakeBlockStore.stop_thread()
    return sw_return
Пример #10
0
def worker_process_main(base_dir, task_queue, response_queue):

    master_proxy = QueueMasterProxy(response_queue)
    execution_features = ExecutionFeatures()
    block_store = BlockStore(ciel.engine, 'localhost', 8000, base_dir, True)

    # XXX: Broken because we now need a pseudoworker in place of a block_store.
    thread_task_executor = TaskExecutorPlugin(ciel.engine,
                                              PseudoWorker(block_store),
                                              master_proxy, execution_features,
                                              1)

    while True:

        task = task_queue.get()
        if isinstance(task, ThreadTerminator):
            return

        task_descriptor = task.as_descriptor(False)

        thread_task_executor.handle_input(task_descriptor)
Пример #11
0
    def __init__(self, bus, hostname, port, options):
        plugins.SimplePlugin.__init__(self, bus)
        self.id = None
        self.hostname = hostname
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.blockstore is None:
            block_store_dir = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            block_store_dir = options.blockstore
        self.block_store = BlockStore(cherrypy.engine, self.hostname, self.port, block_store_dir, ignore_blocks=options.ignore_blocks)
        self.block_store.build_pin_set()
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store, self.upload_deferred_work)
        self.execution_features = ExecutionFeatures()
        self.task_executor = TaskExecutorPlugin(bus, self.block_store, self.master_proxy, self.execution_features, 1)
        self.task_executor.subscribe()
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}
    
        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase }
        if options.lib is not None:
            self.cherrypy_conf["/stdlib"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.lib }



        self.subscribe()
Пример #12
0
class Worker(plugins.SimplePlugin):
    def __init__(self, bus, port, options):
        plugins.SimplePlugin.__init__(self, bus)

        create_pycurl_thread(bus)
        if options.aux_port is not None:
            create_tcp_server(options.aux_port)

        self.id = None
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.hostname is None:
            self.hostname = self.master_proxy.get_public_hostname()
        else:
            self.hostname = options.hostname
        self.lighty_conf_template = options.lighty_conf
        if options.blockstore is None:
            self.static_content_root = tempfile.mkdtemp(
                prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            self.static_content_root = options.blockstore
        block_store_dir = os.path.join(self.static_content_root, "data")
        try:
            os.mkdir(block_store_dir)
        except:
            pass
        self.block_store = BlockStore(
            self.hostname,
            self.port,
            block_store_dir,
            ignore_blocks=options.ignore_blocks)
        self.block_store.build_pin_set()
        self.block_store.check_local_blocks()
        create_watcher_thread(bus, self.block_store)
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store,
                                            self.upload_deferred_work)
        self.execution_features = ExecutionFeatures()
        #self.task_executor = TaskExecutorPlugin(bus, self, self.master_proxy, self.execution_features, 1)
        #self.task_executor.subscribe()

        self.scheduling_classes = parse_scheduling_class_option(
            options.scheduling_classes, options.num_threads)

        self.multiworker = MultiWorker(ciel.engine, self)
        self.multiworker.subscribe()
        self.process_pool = ProcessPool(
            bus, self, self.execution_features.process_cacheing_executors)
        self.process_pool.subscribe()
        self.runnable_executors = self.execution_features.runnable_executors.keys(
        )
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}

        cherrypy.config.update({"server.thread_pool": 20})

        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = {
                "tools.staticdir.on": True,
                "tools.staticdir.dir": options.staticbase
            }

        self.subscribe()

    def subscribe(self):
        self.bus.subscribe('stop', self.stop, priority=10)
        self.bus.subscribe("worker_event", self.add_log_entry)

    def unsubscribe(self):
        self.bus.unsubscribe('stop', self.stop)
        self.bus.unsubscribe("worker_event", self.add_log_entry)

    def netloc(self):
        return '%s:%d' % (self.hostname, self.port)

    def as_descriptor(self):
        return {
            'netloc': self.netloc(),
            'features': self.runnable_executors,
            'has_blocks': not self.block_store.is_empty(),
            'scheduling_classes': self.scheduling_classes
        }

    def set_master(self, master_details):
        self.master_url = master_details['master']
        self.master_proxy.change_master(self.master_url)
        self.pinger.poke()

    def start_running(self):

        app = cherrypy.tree.mount(self.server_root, "", self.cherrypy_conf)

        if self.lighty_conf_template is not None:

            lighty = LighttpdAdapter(ciel.engine, self.lighty_conf_template,
                                     self.static_content_root, self.port)
            lighty.subscribe()
            # Zap CherryPy's original flavour server
            cherrypy.server.unsubscribe()
            server = cherrypy.process.servers.FlupFCGIServer(
                application=app, bindAddress=lighty.socket_path)
            adapter = cherrypy.process.servers.ServerAdapter(
                cherrypy.engine,
                httpserver=server,
                bind_addr=lighty.socket_path)
            # Insert a FastCGI server in its place
            adapter.subscribe()

        ciel.engine.start()
        if hasattr(ciel.engine, "signal_handler"):
            ciel.engine.signal_handler.subscribe()
        if hasattr(ciel.engine, "console_control_handler"):
            ciel.engine.console_control_handler.subscribe()
        ciel.engine.block()

    def stop(self):
        with self.log_lock:
            self.stopping = True
            self.log_condition.notify_all()

    def submit_task(self, task_descriptor):
        ciel.engine.publish("worker_event",
                            "Start task " + repr(task_descriptor["task_id"]))
        ciel.engine.publish('execute_task', task_descriptor)

    def abort_task(self, task_id):
        ciel.engine.publish("worker_event", "Abort task " + repr(task_id))
        self.task_executor.abort_task(task_id)

    def add_log_entry(self, log_string):
        with self.log_lock:
            self.event_log.append((datetime.now(), log_string))
            self.log_condition.notify_all()

    def get_log_entries(self, start_index, end_index):
        with self.log_lock:
            return self.event_log[start_index:end_index]

    def await_log_entries_after(self, index):
        with self.log_lock:
            while len(self.event_log) <= int(index):
                if self.stopping == True:
                    break
                self.log_condition.wait()
            if self.stopping:
                raise Exception("Worker stopping")
Пример #13
0
class Worker(plugins.SimplePlugin):
    
    def __init__(self, bus, port, options):
        plugins.SimplePlugin.__init__(self, bus)
        self.id = None
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.hostname is None:
            self.hostname = self.master_proxy.get_public_hostname()
        else:
            self.hostname = options.hostname
        self.lighty_conf_template = options.lighty_conf
        if options.blockstore is None:
            self.static_content_root = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            self.static_content_root = options.blockstore
        block_store_dir = os.path.join(self.static_content_root, "data")
        try:
            os.mkdir(block_store_dir)
        except:
            pass
        self.block_store = BlockStore(ciel.engine, self.hostname, self.port, block_store_dir, ignore_blocks=options.ignore_blocks, aux_listen_port=options.aux_port)
        self.block_store.subscribe()
        self.block_store.build_pin_set()
        self.block_store.check_local_blocks()
        create_watcher_thread(bus, self.block_store)
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store, self.upload_deferred_work)
        
        self.execution_features = ExecutionFeatures()
        
        #self.task_executor = TaskExecutorPlugin(bus, self, self.master_proxy, self.execution_features, 1)
        #self.task_executor.subscribe()
        
        self.scheduling_classes = parse_scheduling_class_option(options.scheduling_classes, options.num_threads)
        
        self.multiworker = MultiWorker(ciel.engine, self)
        self.multiworker.subscribe()
        self.process_pool = ProcessPool(bus, self)
        self.process_pool.subscribe()
        self.runnable_executors = self.execution_features.runnable_executors.keys()
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}

        cherrypy.config.update({"server.thread_pool" : 20})


        
        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase }

        self.subscribe()

    def subscribe(self):
        self.bus.subscribe('stop', self.stop, priority=10)
        self.bus.subscribe("worker_event", self.add_log_entry)
        
    def unsubscribe(self):
        self.bus.unsubscribe('stop', self.stop)
        self.bus.unsubscribe("worker_event", self.add_log_entry)

    def netloc(self):
        return '%s:%d' % (self.hostname, self.port)

    def as_descriptor(self):
        return {'netloc': self.netloc(), 'features': self.runnable_executors, 'has_blocks': not self.block_store.is_empty(), 'scheduling_classes': self.scheduling_classes}

    def set_master(self, master_details):
        self.master_url = master_details['master']
        self.master_proxy.change_master(self.master_url)
        self.pinger.poke()

    def start_running(self):

        app = cherrypy.tree.mount(self.server_root, "", self.cherrypy_conf)

        if self.lighty_conf_template is not None:

            lighty = LighttpdAdapter(ciel.engine, self.lighty_conf_template, self.static_content_root, self.port)
            lighty.subscribe()
            # Zap CherryPy's original flavour server
            cherrypy.server.unsubscribe()
            server = cherrypy.process.servers.FlupFCGIServer(application=app, bindAddress=lighty.socket_path)
            adapter = cherrypy.process.servers.ServerAdapter(cherrypy.engine, httpserver=server, bind_addr=lighty.socket_path)
            # Insert a FastCGI server in its place
            adapter.subscribe()

        ciel.engine.start()
        if hasattr(ciel.engine, "signal_handler"):
            ciel.engine.signal_handler.subscribe()
        if hasattr(ciel.engine, "console_control_handler"):
            ciel.engine.console_control_handler.subscribe()
        ciel.engine.block()

    def stop(self):
        with self.log_lock:
            self.stopping = True
            self.log_condition.notify_all()
    
    def submit_task(self, task_descriptor):
        ciel.engine.publish("worker_event", "Start task " + repr(task_descriptor["task_id"]))
        ciel.engine.publish('execute_task', task_descriptor)
                
    def abort_task(self, task_id):
        ciel.engine.publish("worker_event", "Abort task " + repr(task_id))
        self.task_executor.abort_task(task_id)

    def add_log_entry(self, log_string):
        with self.log_lock:
            self.event_log.append((datetime.now(), log_string))
            self.log_condition.notify_all()

    def get_log_entries(self, start_index, end_index):
        with self.log_lock:
            return self.event_log[start_index:end_index]

    def await_log_entries_after(self, index):
        with self.log_lock:
            while len(self.event_log) <= int(index):
                if self.stopping == True:
                    break
                self.log_condition.wait()
            if self.stopping:
                raise Exception("Worker stopping")
Пример #14
0
def main():
    parser = OptionParser()
    parser.add_option("-m",
                      "--master",
                      action="store",
                      dest="master",
                      help="Master URI",
                      metavar="MASTER",
                      default=os.getenv("SW_MASTER"))
    parser.add_option(
        "-r",
        "--refs",
        action="store_true",
        dest="refs",
        help="Set this option to look up reference names in the master",
        default=False)
    parser.add_option("-j",
                      "--json",
                      action="store_true",
                      dest="json",
                      help="Set this option to use JSON pretty printing",
                      default=False)
    (options, args) = parser.parse_args()

    # Retrieves should work anyway; the arguments are mainly needed for storing
    # stuff.
    bs = BlockStore("dummy_hostname", "0", None)

    if options.refs:
        ref_ids = args

        for ref_id in ref_ids:

            # Fetch information about the ref from the master.
            h = httplib2.Http()
            _, content = h.request(
                urljoin(options.master, '/refs/%s' % ref_id), 'GET')
            ref_info = simplejson.loads(content,
                                        object_hook=json_decode_object_hook)
            ref = ref_info['ref']

            if options.json:
                obj = bs.retrieve_object_for_ref(ref, 'json')
                simplejson.dump(obj,
                                sys.stdout,
                                cls=SWReferenceJSONEncoder,
                                indent=4)
                print
            else:
                fh = bs.retrieve_object_for_ref(ref, 'handle')
                for line in fh:
                    sys.stdout.write(line)
                fh.close()

    else:
        urls = args

        for url in urls:
            if options.json:
                obj = bs.retrieve_object_for_ref(SWURLReference([url]), 'json')
                simplejson.dump(obj,
                                sys.stdout,
                                cls=SWReferenceJSONEncoder,
                                indent=4)
                print
            else:
                fh = bs.retrieve_object_for_ref(SWURLReference([url]),
                                                'handle')
                print fh
                for line in fh:
                    sys.stdout.write(line)
                fh.close()
Пример #15
0
class Worker(plugins.SimplePlugin):
    
    def __init__(self, bus, hostname, port, options):
        plugins.SimplePlugin.__init__(self, bus)
        self.id = None
        self.hostname = hostname
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.blockstore is None:
            block_store_dir = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            block_store_dir = options.blockstore
        self.block_store = BlockStore(cherrypy.engine, self.hostname, self.port, block_store_dir, ignore_blocks=options.ignore_blocks)
        self.block_store.build_pin_set()
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store, self.upload_deferred_work)
        self.execution_features = ExecutionFeatures()
        self.task_executor = TaskExecutorPlugin(bus, self.block_store, self.master_proxy, self.execution_features, 1)
        self.task_executor.subscribe()
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}
    
        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase }
        if options.lib is not None:
            self.cherrypy_conf["/stdlib"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.lib }



        self.subscribe()

    def subscribe(self):
        self.bus.subscribe('stop', self.stop, priority=10)
        self.bus.subscribe("worker_event", self.add_log_entry)
        
    def unsubscribe(self):
        self.bus.unsubscribe('stop', self.stop)
        self.bus.unsubscribe("worker_event", self.add_log_entry)
        
    def netloc(self):
        return '%s:%d' % (self.hostname, self.port)

    def as_descriptor(self):
        return {'netloc': self.netloc(), 'features': self.execution_features.all_features(), 'has_blocks': not self.block_store.is_empty()}

    def set_master(self, master_details):
        self.master_url = master_details['master']
        self.master_proxy.change_master(self.master_url)
        self.pinger.poke()

    def start_running(self):

        cherrypy.engine.start()
        cherrypy.tree.mount(self.server_root, "", self.cherrypy_conf)
        if hasattr(cherrypy.engine, "signal_handler"):
            cherrypy.engine.signal_handler.subscribe()
        if hasattr(cherrypy.engine, "console_control_handler"):
            cherrypy.engine.console_control_handler.subscribe()
        cherrypy.engine.block()

    def stop(self):
        with self.log_lock:
            self.stopping = True
            self.log_condition.notify_all()
    
    def submit_task(self, task_descriptor):
        cherrypy.engine.publish("worker_event", "Start task " + repr(task_descriptor["task_id"]))
        cherrypy.engine.publish('execute_task', task_descriptor)
                
    def abort_task(self, task_id):
        cherrypy.engine.publish("worker_event", "Abort task " + repr(task_id))
        self.task_executor.abort_task(task_id)

    def notify_task_streams_done(self, task_id):
        self.task_executor.notify_streams_done(task_id)

    def add_log_entry(self, log_string):
        with self.log_lock:
            self.event_log.append((datetime.now(), log_string))
            self.log_condition.notify_all()

    def get_log_entries(self, start_index, end_index):
        with self.log_lock:
            return self.event_log[start_index:end_index]

    def await_log_entries_after(self, index):
        with self.log_lock:
            while len(self.event_log) <= int(index):
                if self.stopping == True:
                    break
                self.log_condition.wait()
            if self.stopping:
                raise Exception("Worker stopping")
Пример #16
0
def master_main(options):

    deferred_worker = DeferredWorkPlugin(ciel.engine)
    deferred_worker.subscribe()

    worker_pool = WorkerPool(ciel.engine, deferred_worker, None)
    worker_pool.subscribe()

    task_failure_investigator = TaskFailureInvestigator(worker_pool, deferred_worker)
    
    job_pool = JobPool(ciel.engine, options.journaldir, None, task_failure_investigator, deferred_worker, worker_pool)
    job_pool.subscribe()
    
    worker_pool.job_pool = job_pool

    backup_sender = BackupSender(cherrypy.engine)
    backup_sender.subscribe()

    if options.hostname is not None:
        local_hostname = options.hostname
    else:
        local_hostname = socket.getfqdn()
    local_port = cherrypy.config.get('server.socket_port')
    master_netloc = '%s:%d' % (local_hostname, local_port)
    ciel.log('Local port is %d' % local_port, 'STARTUP', logging.INFO)
    
    if options.blockstore is None:
        static_content_root = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
    else:
        static_content_root = options.blockstore
    block_store_dir = os.path.join(static_content_root, "data")
    try:
        os.mkdir(block_store_dir)
    except:
        pass

    block_store = BlockStore(ciel.engine, local_hostname, local_port, block_store_dir)
    block_store.subscribe()
    block_store.build_pin_set()
    block_store.check_local_blocks()

    if options.master is not None:
        monitor = MasterRecoveryMonitor(cherrypy.engine, 'http://%s/' % master_netloc, options.master, job_pool)
        monitor.subscribe()
    else:
        monitor = None

    recovery_manager = RecoveryManager(ciel.engine, job_pool, block_store, deferred_worker)
    recovery_manager.subscribe()
  
    root = MasterRoot(worker_pool, block_store, job_pool, backup_sender, monitor)

    cherrypy.config.update({"server.thread_pool" : 50})

    cherrypy_conf = dict()
    
    if options.staticbase is not None:
        cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase }

    app = cherrypy.tree.mount(root, "", cherrypy_conf)
    lighty_conf_template = options.lighty_conf
    if lighty_conf_template is not None:
        lighty = LighttpdAdapter(ciel.engine, lighty_conf_template, static_content_root, local_port)
        lighty.subscribe()
        # Zap CherryPy's original flavour server
        cherrypy.server.unsubscribe()
        server = cherrypy.process.servers.FlupFCGIServer(application=app, bindAddress=lighty.socket_path)
        adapter = cherrypy.process.servers.ServerAdapter(cherrypy.engine, httpserver=server, bind_addr=lighty.socket_path)
        # Insert a FastCGI server in its place
        adapter.subscribe()
    
    if hasattr(ciel.engine, "signal_handler"):
        ciel.engine.signal_handler.subscribe()
    if hasattr(ciel.engine, "console_control_handler"):
        ciel.engine.console_control_handler.subscribe()

    ciel.engine.start()
    
    if options.workerlist is not None:
        master_details = {'netloc': master_netloc}
        master_details_as_json = simplejson.dumps(master_details)
        with (open(options.workerlist, "r")) as f:
            for worker_url in f.readlines():
                try:
                    post_string(urllib2.urlparse.urljoin(worker_url, 'control/master/'), master_details_as_json)
                    # Worker will be created by a callback.
                except:
                    ciel.log.error("Error adding worker: %s" % (worker_url, ), "WORKER", logging.WARNING)
                    
    ciel.engine.block()
Пример #17
0
def master_main(options):

    create_pycurl_thread(ciel.engine)

    deferred_worker = DeferredWorkPlugin(ciel.engine)
    deferred_worker.subscribe()

    worker_pool = WorkerPool(ciel.engine, deferred_worker, None)
    worker_pool.subscribe()

    task_failure_investigator = TaskFailureInvestigator(worker_pool, deferred_worker)
    
    job_pool = JobPool(ciel.engine, options.journaldir, None, task_failure_investigator, deferred_worker, worker_pool)
    job_pool.subscribe()
    
    worker_pool.job_pool = job_pool

    backup_sender = BackupSender(cherrypy.engine)
    backup_sender.subscribe()

    if options.hostname is not None:
        local_hostname = options.hostname
    else:
        local_hostname = socket.getfqdn()
    local_port = cherrypy.config.get('server.socket_port')
    master_netloc = '%s:%d' % (local_hostname, local_port)
    ciel.log('Local port is %d' % local_port, 'STARTUP', logging.INFO)
    
    if options.blockstore is None:
        static_content_root = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
    else:
        static_content_root = options.blockstore
    block_store_dir = os.path.join(static_content_root, "data")
    try:
        os.mkdir(block_store_dir)
    except:
        pass

    block_store = BlockStore(local_hostname, local_port, block_store_dir)
    block_store.build_pin_set()
    block_store.check_local_blocks()

    if options.master is not None:
        monitor = MasterRecoveryMonitor(cherrypy.engine, 'http://%s/' % master_netloc, options.master, job_pool)
        monitor.subscribe()
    else:
        monitor = None

    recovery_manager = RecoveryManager(ciel.engine, job_pool, block_store, deferred_worker)
    recovery_manager.subscribe()
  
    root = MasterRoot(worker_pool, block_store, job_pool, backup_sender, monitor)

    cherrypy.config.update({"server.thread_pool" : 50})

    cherrypy_conf = dict()
    
    if options.staticbase is not None:
        cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase }

    app = cherrypy.tree.mount(root, "", cherrypy_conf)
    lighty_conf_template = options.lighty_conf
    if lighty_conf_template is not None:
        lighty = LighttpdAdapter(ciel.engine, lighty_conf_template, static_content_root, local_port)
        lighty.subscribe()
        # Zap CherryPy's original flavour server
        cherrypy.server.unsubscribe()
        server = cherrypy.process.servers.FlupFCGIServer(application=app, bindAddress=lighty.socket_path)
        adapter = cherrypy.process.servers.ServerAdapter(cherrypy.engine, httpserver=server, bind_addr=lighty.socket_path)
        # Insert a FastCGI server in its place
        adapter.subscribe()
    
    if hasattr(ciel.engine, "signal_handler"):
        ciel.engine.signal_handler.subscribe()
    if hasattr(ciel.engine, "console_control_handler"):
        ciel.engine.console_control_handler.subscribe()

    ciel.engine.start()
    
    if options.workerlist is not None:
        master_details = {'netloc': master_netloc}
        master_details_as_json = simplejson.dumps(master_details)
        with (open(options.workerlist, "r")) as f:
            for worker_url in f.readlines():
                try:
                    post_string(urllib2.urlparse.urljoin(worker_url, 'control/master/'), master_details_as_json)
                    # Worker will be created by a callback.
                except:
                    ciel.log.error("Error adding worker: %s" % (worker_url, ), "WORKER", logging.WARNING)
                    
    ciel.engine.block()
Пример #18
0
class Worker(plugins.SimplePlugin):
    def __init__(self, bus, hostname, port, options):
        plugins.SimplePlugin.__init__(self, bus)
        self.id = None
        self.hostname = hostname
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.blockstore is None:
            block_store_dir = tempfile.mkdtemp(
                prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            block_store_dir = options.blockstore
        self.block_store = BlockStore(cherrypy.engine,
                                      self.hostname,
                                      self.port,
                                      block_store_dir,
                                      ignore_blocks=options.ignore_blocks)
        self.block_store.build_pin_set()
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store,
                                            self.upload_deferred_work)
        self.execution_features = ExecutionFeatures()
        self.task_executor = TaskExecutorPlugin(bus, self.block_store,
                                                self.master_proxy,
                                                self.execution_features, 1)
        self.task_executor.subscribe()
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}

        cherrypy.config.update({"server.thread_pool": 20})

        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = {
                "tools.staticdir.on": True,
                "tools.staticdir.dir": options.staticbase
            }
        if options.lib is not None:
            self.cherrypy_conf["/stdlib"] = {
                "tools.staticdir.on": True,
                "tools.staticdir.dir": options.lib
            }

        self.subscribe()

    def subscribe(self):
        self.bus.subscribe('stop', self.stop, priority=10)
        self.bus.subscribe("worker_event", self.add_log_entry)

    def unsubscribe(self):
        self.bus.unsubscribe('stop', self.stop)
        self.bus.unsubscribe("worker_event", self.add_log_entry)

    def netloc(self):
        return '%s:%d' % (self.hostname, self.port)

    def as_descriptor(self):
        return {
            'netloc': self.netloc(),
            'features': self.execution_features.all_features(),
            'has_blocks': not self.block_store.is_empty()
        }

    def set_master(self, master_details):
        self.master_url = master_details['master']
        self.master_proxy.change_master(self.master_url)
        self.pinger.poke()

    def start_running(self):

        cherrypy.engine.start()
        cherrypy.tree.mount(self.server_root, "", self.cherrypy_conf)
        if hasattr(cherrypy.engine, "signal_handler"):
            cherrypy.engine.signal_handler.subscribe()
        if hasattr(cherrypy.engine, "console_control_handler"):
            cherrypy.engine.console_control_handler.subscribe()
        cherrypy.engine.block()

    def stop(self):
        with self.log_lock:
            self.stopping = True
            self.log_condition.notify_all()

    def submit_task(self, task_descriptor):
        cherrypy.engine.publish(
            "worker_event", "Start task " + repr(task_descriptor["task_id"]))
        cherrypy.engine.publish('execute_task', task_descriptor)

    def abort_task(self, task_id):
        cherrypy.engine.publish("worker_event", "Abort task " + repr(task_id))
        self.task_executor.abort_task(task_id)

    def notify_task_streams_done(self, task_id):
        self.task_executor.notify_streams_done(task_id)

    def add_log_entry(self, log_string):
        with self.log_lock:
            self.event_log.append((datetime.now(), log_string))
            self.log_condition.notify_all()

    def get_log_entries(self, start_index, end_index):
        with self.log_lock:
            return self.event_log[start_index:end_index]

    def await_log_entries_after(self, index):
        with self.log_lock:
            while len(self.event_log) <= int(index):
                if self.stopping == True:
                    break
                self.log_condition.wait()
            if self.stopping:
                raise Exception("Worker stopping")
Пример #19
0
def master_main(options):

    deferred_worker = DeferredWorkPlugin(cherrypy.engine)
    deferred_worker.subscribe()

    global_name_directory = GlobalNameDirectory(cherrypy.engine)
    global_name_directory.subscribe()

    worker_pool = WorkerPool(cherrypy.engine, deferred_worker)
    worker_pool.subscribe()

    lazy_task_pool = LazyTaskPool(cherrypy.engine, worker_pool)
    task_pool_adapter = LazyTaskPoolAdapter(lazy_task_pool)
    lazy_task_pool.subscribe()
    
    task_failure_investigator = TaskFailureInvestigator(lazy_task_pool, worker_pool, deferred_worker)
    
    job_pool = JobPool(cherrypy.engine, lazy_task_pool, options.journaldir, global_name_directory)
    job_pool.subscribe()

    backup_sender = BackupSender(cherrypy.engine)
    backup_sender.subscribe()

    local_hostname = socket.getfqdn()
    local_port = cherrypy.config.get('server.socket_port')
    master_netloc = '%s:%d' % (local_hostname, local_port)
    print 'Local port is', local_port
    
    if options.blockstore is None:
        block_store_dir = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
    else:
        block_store_dir = options.blockstore

    block_store = BlockStore(cherrypy.engine, local_hostname, local_port, block_store_dir)
    block_store.build_pin_set()

    if options.master is not None:
        monitor = MasterRecoveryMonitor(cherrypy.engine, 'http://%s/' % master_netloc, options.master, job_pool)
        monitor.subscribe()
    else:
        monitor = None

    recovery_manager = RecoveryManager(cherrypy.engine, job_pool, lazy_task_pool, block_store, deferred_worker)
    recovery_manager.subscribe()

    scheduler = LazyScheduler(cherrypy.engine, lazy_task_pool, worker_pool)
    scheduler.subscribe()
    
    root = MasterRoot(task_pool_adapter, worker_pool, block_store, global_name_directory, job_pool, backup_sender, monitor, task_failure_investigator)

    cherrypy.config.update({"server.thread_pool" : 50})

    cherrypy_conf = dict()
    
    if options.staticbase is not None:
        cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase }

    cherrypy.tree.mount(root, "", cherrypy_conf)
    
    if hasattr(cherrypy.engine, "signal_handler"):
        cherrypy.engine.signal_handler.subscribe()
    if hasattr(cherrypy.engine, "console_control_handler"):
        cherrypy.engine.console_control_handler.subscribe()

    cherrypy.engine.start()
    
    
    
    if options.workerlist is not None:
        master_details = {'netloc': master_netloc}
        master_details_as_json = simplejson.dumps(master_details)
        with (open(options.workerlist, "r")) as f:
            for worker_url in f.readlines():
                try:
                    http = httplib2.Http()
                    http.request(urllib2.urlparse.urljoin(worker_url, '/master/'), "POST", master_details_as_json)
                    # Worker will be created by a callback.
                except:
                    cherrypy.log.error("Error adding worker: %s" % (worker_url, ), "WORKER", logging.WARNING)
                    
    cherrypy.engine.block()
Пример #20
0
    def __init__(self, bus, port, options):
        plugins.SimplePlugin.__init__(self, bus)

        create_pycurl_thread(bus)
        if options.aux_port is not None:
            create_tcp_server(options.aux_port)

        self.id = None
        self.port = port
        self.master_url = options.master
        self.master_proxy = MasterProxy(self, bus, self.master_url)
        self.master_proxy.subscribe()
        if options.hostname is None:
            self.hostname = self.master_proxy.get_public_hostname()
        else:
            self.hostname = options.hostname
        self.lighty_conf_template = options.lighty_conf
        if options.blockstore is None:
            self.static_content_root = tempfile.mkdtemp(
                prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
        else:
            self.static_content_root = options.blockstore
        block_store_dir = os.path.join(self.static_content_root, "data")
        try:
            os.mkdir(block_store_dir)
        except:
            pass
        self.block_store = BlockStore(
            self.hostname,
            self.port,
            block_store_dir,
            ignore_blocks=options.ignore_blocks)
        self.block_store.build_pin_set()
        self.block_store.check_local_blocks()
        create_watcher_thread(bus, self.block_store)
        self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work')
        self.upload_deferred_work.subscribe()
        self.upload_manager = UploadManager(self.block_store,
                                            self.upload_deferred_work)
        self.execution_features = ExecutionFeatures()
        #self.task_executor = TaskExecutorPlugin(bus, self, self.master_proxy, self.execution_features, 1)
        #self.task_executor.subscribe()

        self.scheduling_classes = parse_scheduling_class_option(
            options.scheduling_classes, options.num_threads)

        self.multiworker = MultiWorker(ciel.engine, self)
        self.multiworker.subscribe()
        self.process_pool = ProcessPool(
            bus, self, self.execution_features.process_cacheing_executors)
        self.process_pool.subscribe()
        self.runnable_executors = self.execution_features.runnable_executors.keys(
        )
        self.server_root = WorkerRoot(self)
        self.pinger = Pinger(bus, self.master_proxy, None, 30)
        self.pinger.subscribe()
        self.stopping = False
        self.event_log = []
        self.log_lock = Lock()
        self.log_condition = Condition(self.log_lock)

        self.cherrypy_conf = {}

        cherrypy.config.update({"server.thread_pool": 20})

        if options.staticbase is not None:
            self.cherrypy_conf["/skyweb"] = {
                "tools.staticdir.on": True,
                "tools.staticdir.dir": options.staticbase
            }

        self.subscribe()
Пример #21
0
 def __init__(self, bus, hostname, port, base_dir, lib, coreid, ignore_blocks=False):
     BlockStore.__init__(self, bus, hostname, port, base_dir, ignore_blocks)
     self.lib = lib
     self.coreid = coreid
Пример #22
0
def master_main(options):

    deferred_worker = DeferredWorkPlugin(cherrypy.engine)
    deferred_worker.subscribe()

    global_name_directory = GlobalNameDirectory(cherrypy.engine)
    global_name_directory.subscribe()

    worker_pool = WorkerPool(cherrypy.engine, deferred_worker)
    worker_pool.subscribe()

    lazy_task_pool = LazyTaskPool(cherrypy.engine, worker_pool)
    task_pool_adapter = LazyTaskPoolAdapter(lazy_task_pool)
    lazy_task_pool.subscribe()

    task_failure_investigator = TaskFailureInvestigator(
        lazy_task_pool, worker_pool, deferred_worker)

    job_pool = JobPool(cherrypy.engine, lazy_task_pool, options.journaldir,
                       global_name_directory)
    job_pool.subscribe()

    backup_sender = BackupSender(cherrypy.engine)
    backup_sender.subscribe()

    local_hostname = socket.getfqdn()
    local_port = cherrypy.config.get('server.socket_port')
    master_netloc = '%s:%d' % (local_hostname, local_port)
    print 'Local port is', local_port

    if options.blockstore is None:
        block_store_dir = tempfile.mkdtemp(
            prefix=os.getenv('TEMP', default='/tmp/sw-files-'))
    else:
        block_store_dir = options.blockstore

    block_store = BlockStore(cherrypy.engine, local_hostname, local_port,
                             block_store_dir)
    block_store.build_pin_set()

    if options.master is not None:
        monitor = MasterRecoveryMonitor(cherrypy.engine,
                                        'http://%s/' % master_netloc,
                                        options.master, job_pool)
        monitor.subscribe()
    else:
        monitor = None

    recovery_manager = RecoveryManager(cherrypy.engine, job_pool,
                                       lazy_task_pool, block_store,
                                       deferred_worker)
    recovery_manager.subscribe()

    scheduler = LazyScheduler(cherrypy.engine, lazy_task_pool, worker_pool)
    scheduler.subscribe()

    root = MasterRoot(task_pool_adapter, worker_pool, block_store,
                      global_name_directory, job_pool, backup_sender, monitor,
                      task_failure_investigator)

    cherrypy.config.update({"server.thread_pool": 50})

    cherrypy_conf = dict()

    if options.staticbase is not None:
        cherrypy_conf["/skyweb"] = {
            "tools.staticdir.on": True,
            "tools.staticdir.dir": options.staticbase
        }

    cherrypy.tree.mount(root, "", cherrypy_conf)

    if hasattr(cherrypy.engine, "signal_handler"):
        cherrypy.engine.signal_handler.subscribe()
    if hasattr(cherrypy.engine, "console_control_handler"):
        cherrypy.engine.console_control_handler.subscribe()

    cherrypy.engine.start()

    if options.workerlist is not None:
        master_details = {'netloc': master_netloc}
        master_details_as_json = simplejson.dumps(master_details)
        with (open(options.workerlist, "r")) as f:
            for worker_url in f.readlines():
                try:
                    http = httplib2.Http()
                    http.request(
                        urllib2.urlparse.urljoin(worker_url, '/master/'),
                        "POST", master_details_as_json)
                    # Worker will be created by a callback.
                except:
                    cherrypy.log.error(
                        "Error adding worker: %s" % (worker_url, ), "WORKER",
                        logging.WARNING)

    cherrypy.engine.block()