예제 #1
0
    def on_any_event(self, event):

        if self.ignored(event):
            return

        pipeline = Pipeline(event.src_path)
        pipeline.process()
예제 #2
0
 def __init__(self):
     '''
     Constructor
     '''
     Pipeline.__init__(self)
     self.generators = {}
     self.loadGenerators()
예제 #3
0
 def __init__(self):
     '''
     Constructor
     '''
     Pipeline.__init__(self)
     self.tools = {}
     self.loadTools(Tool)
예제 #4
0
 def __init__(self):
     '''
     Constructor
     '''
     Pipeline.__init__(self)
     self.tools = {}
     self.loadTools(Tool)
예제 #5
0
 def __init__(self):
     '''
     Constructor
     '''
     Pipeline.__init__(self)
     self.generators = {}
     self.loadGenerators()
예제 #6
0
 def setUp(self):
     print("----------------------------------------")
     print("TEST: setUp")
     #  logging.getLogger().setLevel(logging.DEBUG)
     Metadata.requests_get = lambda url, cache_timeout=30: None  # don't perform HTTP requests
     Metadata.refresh_old_books_cache_if_necessary = lambda report=None: None  # don't perform HTTP requests
     if os.path.exists(self.target):
         shutil.rmtree(self.target)
     os.makedirs(self.dir_in)
     os.makedirs(self.dir_out)
     os.makedirs(self.dir_reports)
     self.pipeline = Pipeline(
         during_working_hours=True,
         during_night_and_weekend=True,
         _title="test",
         _uid="test"
     )
     self.pipeline._inactivity_timeout = 1
예제 #7
0
    def on_book_modified(self):
        # Produce newspaper only when it was automatically triggered (by the newspaper trigger thread)
        main_event = Pipeline.get_main_event(self.book)
        if main_event != "autotriggered":
            self.utils.report.should_email = False
            self.utils.report.info(
                "Newpaper was not automatically triggered. Ignoring '{}' event."
                .format(main_event))
            if self.stopAfterNJobs > 0:
                self.stopAfterNJobs += 1
            return True

        return self.on_book()
예제 #8
0
def init_agent():
    services, workers, session, _ = parse_old_config()
    endpoint = Service('cmd_responder', EventSetOutputConnector('cmd_responder').send,
                       StateManager.save_dialog_dict, 1, ['responder'])
    input_srv = Service('input', None, StateManager.add_human_utterance_simple_dict, 1, ['input'])
    pipeline = Pipeline(services)
    pipeline.add_responder_service(endpoint)
    pipeline.add_input_service(input_srv)
    agent = Agent(pipeline, StateManager())
    return agent, session
예제 #9
0
def prepare_agent(services, endpoint: Service, input_serv: Service,
                  use_response_logger: bool):
    pipeline = Pipeline(services)
    pipeline.add_responder_service(endpoint)
    pipeline.add_input_service(input_serv)
    if use_response_logger:
        response_logger_callable = response_logger
    else:
        response_logger_callable = None
    agent = Agent(pipeline,
                  StateManager(),
                  response_logger_callable=response_logger_callable)
    return agent.register_msg, agent.process
예제 #10
0
"""
main.py
"""
import sys
from components.log_server import LogServer
from core.pipeline import Pipeline

# start GUI
if __name__ == '__main__':
    LOGSERVER = LogServer()
    PIPELINE = Pipeline(LOGSERVER, sys.argv[1])
    PIPELINE.run()
예제 #11
0
    def test_get_main_event(self):
        book = {}

        book["events"] = ["autotriggered", "triggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["triggered", "autotriggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["triggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["created", "modified", "deleted", "autotriggered", "triggered"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["created", "modified", "deleted", "triggered", "autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["triggered", "autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["autotriggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["created", "deleted", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["modified", "created", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["deleted", "created", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["modified", "deleted", "created"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["deleted", "modified", "created"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["modified", "created"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["created", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "deleted")

        book["events"] = ["deleted", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "deleted")

        book["events"] = ["modified"]
        self.assertEqual(Pipeline.get_main_event(book), "modified")

        book["events"] = ["modified", "modified", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "modified")

        book["events"] = ["modified", "autotriggered", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "modified")

        book["events"] = ["autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "autotriggered")

        book["events"] = ["autotriggered", "autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "autotriggered")
예제 #12
0
class PipelineTest(unittest.TestCase):
    target = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'target', 'unittest'))
    os.environ["CACHE_DIR"] = os.path.join(target, 'cache')
    dir_base = {"master": target}
    dir_in = os.path.join(target, 'in')
    dir_out = os.path.join(target, 'out')
    dir_reports = os.path.join(target, 'reports')
    pipeline = None

    def setUp(self):
        print("----------------------------------------")
        print("TEST: setUp")
        #  logging.getLogger().setLevel(logging.DEBUG)
        Metadata.requests_get = lambda url, cache_timeout=30: None  # don't perform HTTP requests
        Metadata.refresh_old_books_cache_if_necessary = lambda report=None: None  # don't perform HTTP requests
        if os.path.exists(self.target):
            shutil.rmtree(self.target)
        os.makedirs(self.dir_in)
        os.makedirs(self.dir_out)
        os.makedirs(self.dir_reports)
        self.pipeline = Pipeline(
            during_working_hours=True,
            during_night_and_weekend=True,
            _title="test",
            _uid="test"
        )
        self.pipeline._inactivity_timeout = 1

    def tearDown(self):
        time.sleep(2)
        self.pipeline.stop()
        self.pipeline.join()
        time.sleep(2)
        shutil.rmtree(self.target)
        #  logging.getLogger().setLevel(logging.INFO)

    def waitUntil(self, max_time, condition_text, fn):
        start = time.time()
        while time.time() - start < max_time:
            print("status: {}, queue: {}".format(self.pipeline.get_status(), ",".join([b["name"] for b in self.pipeline._queue])))
            if fn(self):
                return
            time.sleep(0.5)
        self.assertEqual(fn(self), True, condition_text)

    def test_file(self):
        print("TEST: " + inspect.stack()[0][3])

        self.pipeline._handle_book_events_thread = lambda: None  # disable handling of book events (prevent emptying _md5 variable)
        self.pipeline.start(inactivity_timeout=2, dir_in=self.dir_in, dir_out=self.dir_out, dir_reports=self.dir_reports, dir_base=self.dir_base)
        time.sleep(2)
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 0)

        Path(os.path.join(self.dir_in, '1_foo.epub')).touch()
        time.sleep(2)
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 1)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '1_foo.epub']), 1)
        self.assertEqual(len(self.pipeline._queue), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_foo.epub']), 1)

        with open(os.path.join(self.dir_in, '1_foo.epub'), "a") as f:
            f.write("2_bar")
        time.sleep(2)
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 1)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '1_foo.epub']), 1)
        self.assertEqual(len(self.pipeline._queue), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_foo.epub']), 1)

        shutil.move(os.path.join(self.dir_in, '1_foo.epub'), os.path.join(self.dir_in, '2_bar.epub'))
        time.sleep(2)
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 1)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '2_bar.epub']), 1)
        self.assertEqual(len(self.pipeline._queue), 2)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_foo.epub']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '2_bar.epub']), 1)

        with open(os.path.join(self.dir_in, '3_baz.epub'), "a") as f:
            f.write("3_baz")
        time.sleep(2)
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 2)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '2_bar.epub']), 1)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '3_baz.epub']), 1)
        self.assertEqual(len(self.pipeline._queue), 3)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_foo.epub']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '2_bar.epub']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '3_baz.epub']), 1)

        os.remove(os.path.join(self.dir_in, '2_bar.epub'))
        time.sleep(2)
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 1)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '3_baz.epub']), 1)
        self.assertEqual(len(self.pipeline._queue), 3)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_foo.epub']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '2_bar.epub']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '3_baz.epub']), 1)

    def test_folder(self):
        print("TEST: " + inspect.stack()[0][3])

        # create three books before starting the pipeline
        os.makedirs(os.path.join(self.dir_in, '1_book'))
        Path(os.path.join(self.dir_in, '1_book/ncc.html')).touch()
        os.makedirs(os.path.join(self.dir_in, '2_book'))
        Path(os.path.join(self.dir_in, '2_book/ncc.html')).touch()
        Path(os.path.join(self.dir_in, '2_book/image.png')).touch()
        os.makedirs(os.path.join(self.dir_in, '3_book'))
        Path(os.path.join(self.dir_in, '3_book/ncc.html')).touch()
        time.sleep(1)

        # start the pipeline
        self.pipeline._handle_book_events_thread = lambda: None  # disable handling of book events (prevent emptying _md5 variable)
        self.pipeline.start(inactivity_timeout=2, dir_in=self.dir_in, dir_out=self.dir_out, dir_reports=self.dir_reports, dir_base=self.dir_base)
        time.sleep(3)

        # there should be no books in the queue, even though there is a folder in the input directory
        self.assertEqual(len(self.pipeline._queue), 0)

        # modify the book
        Path(os.path.join(self.dir_in, '1_book/audio1.mp3')).touch()
        Path(os.path.join(self.dir_in, '1_book/audio2.mp3')).touch()
        Path(os.path.join(self.dir_in, '1_book/content.html')).touch()
        Path(os.path.join(self.dir_in, '1_book/image.png')).touch()
        time.sleep(3.5)

        # there should be 1 book in the queue
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 3)
        self.assertEqual("1_book" in self.pipeline.dir_in_obj._md5, True)
        self.assertEqual("2_book" in self.pipeline.dir_in_obj._md5, True)
        self.assertEqual("3_book" in self.pipeline.dir_in_obj._md5, True)
        self.assertEqual(len(self.pipeline._queue), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_book']), 1)

        # move a file from 2_book to 3_book
        shutil.move(os.path.join(self.dir_in, '2_book/image.png'), os.path.join(self.dir_in, '3_book/image.png'))
        time.sleep(2)

        # now there should be 3 books in the queue
        self.assertEqual(len(self.pipeline.dir_in_obj._md5), 3)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '1_book']), 1)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '2_book']), 1)
        self.assertEqual(len([b for b in self.pipeline.dir_in_obj._md5 if b == '3_book']), 1)
        self.assertEqual(len(self.pipeline._queue), 3)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_book']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '2_book']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '3_book']), 1)

    def test_queue_handler(self):
        print("TEST: " + inspect.stack()[0][3])

        # start the pipeline
        self.pipeline.on_book_created = lambda: time.sleep(6)  # pretend like it takes a few seconds to handle a book
        self.pipeline.on_book_modified = lambda: time.sleep(6)  # pretend like it takes a few seconds to handle a book
        self.pipeline.on_book_deleted = lambda: time.sleep(6)  # pretend like it takes a few seconds to handle a book
        self.pipeline.start(inactivity_timeout=2, dir_in=self.dir_in, dir_out=self.dir_out, dir_reports=self.dir_reports, dir_base=self.dir_base)
        time.sleep(1)

        # There should be no books in the queue to begin with
        self.assertEqual(len(self.pipeline._queue), 0)

        # Create a book
        Path(os.path.join(self.dir_in, '1_book')).touch()

        self.waitUntil(10, "1_book in queue", lambda test: len(test.pipeline._queue) == 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '1_book']), 1)
        self.waitUntil(10, "1_book being processed", lambda test: test.pipeline.get_status() == "1_book")
        self.assertEqual(len(self.pipeline._queue), 0)

        # Create two more books while the first one is being processed
        Path(os.path.join(self.dir_in, '3_book')).touch()
        self.waitUntil(5, "3_book in queue", lambda test: "3_book" in [b["name"] for b in test.pipeline._queue])
        Path(os.path.join(self.dir_in, '2_book')).touch()
        self.waitUntil(5, "2_book in queue", lambda test: "2_book" in [b["name"] for b in test.pipeline._queue])

        self.assertEqual(len(self.pipeline._queue), 2)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '2_book']), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '3_book']), 1)

        # wait until 1_book should have been processed and 2_book have started
        self.waitUntil(10, "2_book being processed", lambda test: test.pipeline.get_status() == "2_book")
        self.assertEqual(len(self.pipeline._queue), 1)
        self.assertEqual(len([b['name'] for b in self.pipeline._queue if b['name'] == '3_book']), 1)

        # wait until 2_book should have been processed and 3_book have started
        self.waitUntil(15, "3_book being processed", lambda test: test.pipeline.get_status() == "3_book")
        self.assertEqual(len(self.pipeline._queue), 0)

        # wait until 3_book should have finished
        self.waitUntil(15, "done processing books", lambda test: test.pipeline.get_status() == "Venter")
        self.assertEqual(len(self.pipeline._queue), 0)

    def test_get_main_event(self):
        book = {}

        book["events"] = ["autotriggered", "triggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["triggered", "autotriggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["triggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["created", "modified", "deleted", "autotriggered", "triggered"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["created", "modified", "deleted", "triggered", "autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["triggered", "autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "triggered")

        book["events"] = ["autotriggered", "created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["created", "modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["created", "deleted", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["modified", "created", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "create_before_delete")

        book["events"] = ["deleted", "created", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["modified", "deleted", "created"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["deleted", "modified", "created"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["modified", "created"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["created", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "created")

        book["events"] = ["modified", "deleted"]
        self.assertEqual(Pipeline.get_main_event(book), "deleted")

        book["events"] = ["deleted", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "deleted")

        book["events"] = ["modified"]
        self.assertEqual(Pipeline.get_main_event(book), "modified")

        book["events"] = ["modified", "modified", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "modified")

        book["events"] = ["modified", "autotriggered", "modified"]
        self.assertEqual(Pipeline.get_main_event(book), "modified")

        book["events"] = ["autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "autotriggered")

        book["events"] = ["autotriggered", "autotriggered"]
        self.assertEqual(Pipeline.get_main_event(book), "autotriggered")
예제 #13
0
    def plot(self, uids, name):
        dot = Digraph(name="Produksjonssystem", format="png")
        dot.graph_attr["bgcolor"] = "transparent"

        node_ranks = {}
        for rank in Directory.dirs_ranked:
            node_ranks[rank["id"]] = []

        # remember edges so that we don't plot them twice
        edges = {}

        for uid in uids:
            pipeline = None
            for p in self.pipelines:
                if p[0].uid == uid:
                    pipeline = p
                    break
            if not pipeline:
                continue

            group_pipeline = pipeline[0].get_current_group_pipeline()

            title = group_pipeline.get_group_title()
            pipeline_id = group_pipeline.get_group_id()  # re.sub(r"[^a-z\d]", "", title.lower())

            queue = group_pipeline.get_queue()

            queue_created = len([book for book in queue if Pipeline.get_main_event(book) == "created"]) if queue else 0
            queue_deleted = len([book for book in queue if Pipeline.get_main_event(book) == "deleted"]) if queue else 0
            queue_modified = len([book for book in queue if Pipeline.get_main_event(book) == "modified"]) if queue else 0
            queue_triggered = len([book for book in queue if Pipeline.get_main_event(book) == "triggered"]) if queue else 0
            queue_autotriggered = len([book for book in queue if Pipeline.get_main_event(book) == "autotriggered"]) if queue else 0
            queue_string = []
            if queue_created:
                queue_string.append("nye:"+str(queue_created))
            if queue_modified:
                queue_string.append("endret:"+str(queue_modified))
            if queue_deleted:
                queue_string.append("slettet:"+str(queue_deleted))
            if queue_triggered:
                queue_string.append("trigget:"+str(queue_triggered))
            if queue_autotriggered:
                queue_string.append("autotrigget:"+str(queue_autotriggered))
            queue_string = ", ".join(queue_string)

            queue_size = 0
            if queue:
                queue_size = len(queue)
                if not group_pipeline.should_handle_autotriggered_books():
                    queue_size -= queue_autotriggered
            book = Metadata.pipeline_book_shortname(group_pipeline)

            relpath_in = None
            netpath_in = ""
            rank_in = None
            if pipeline[0].dir_in:
                for rank in Directory.dirs_ranked:
                    for dir in rank["dirs"]:
                        if os.path.normpath(pipeline[0].dir_in) == os.path.normpath(rank["dirs"][dir]):
                            rank_in = rank["id"]
                            break
            if pipeline[0].dir_in and not pipeline[0].dir_base:
                relpath_in = os.path.basename(os.path.dirname(pipeline[0].dir_in))
            elif pipeline[0].dir_in and pipeline[0].dir_base:
                base_path = Filesystem.get_base_path(pipeline[0].dir_in, pipeline[0].dir_base)
                relpath_in = os.path.relpath(pipeline[0].dir_in, base_path)
                if "master" in pipeline[0].dir_base and pipeline[0].dir_base["master"] == base_path:
                    pass
                else:
                    if pipeline[0].dir_in not in self.buffered_network_paths:
                        smb, file, unc = Filesystem.networkpath(pipeline[0].dir_in)
                        host = Filesystem.get_host_from_url(smb)
                        self.buffered_network_paths[pipeline[0].dir_in] = smb
                        self.buffered_network_hosts[pipeline[0].dir_in] = host
                    netpath_in = self.buffered_network_hosts[pipeline[0].dir_in]
                    if not netpath_in:
                        netpath_in = self.buffered_network_paths[pipeline[0].dir_in]
            book_count_in = self.get_book_count(pipeline[0].dir_in)
            label_in = "< <font point-size='24'>{}</font>{}{} >".format(
                relpath_in,
                "\n<br/><i><font point-size='20'>{} {}</font></i>".format(book_count_in, "bok" if book_count_in == 1 else "bøker"),
                "\n<br/><i><font point-size='20'>{}</font></i>".format(netpath_in.replace("\\", "\\\\")) if netpath_in else "")

            relpath_out = None
            netpath_out = ""
            rank_out = None
            if pipeline[0].dir_out:
                for rank in Directory.dirs_ranked:
                    for dir in rank["dirs"]:
                        if os.path.normpath(pipeline[0].dir_out) == os.path.normpath(rank["dirs"][dir]):
                            rank_out = rank["id"]
                            break
            if pipeline[0].dir_out and not pipeline[0].dir_base:
                relpath_out = os.path.basename(os.path.dirname(pipeline[0].dir_out))
            elif pipeline[0].dir_out and pipeline[0].dir_base:
                base_path = Filesystem.get_base_path(pipeline[0].dir_out, pipeline[0].dir_base)
                relpath_out = os.path.relpath(pipeline[0].dir_out, base_path)
                if "master" in pipeline[0].dir_base and pipeline[0].dir_base["master"] == base_path:
                    pass
                else:
                    if pipeline[0].dir_out not in self.buffered_network_paths:
                        smb, file, unc = Filesystem.networkpath(pipeline[0].dir_out)
                        host = Filesystem.get_host_from_url(smb)
                        self.buffered_network_paths[pipeline[0].dir_out] = unc
                        self.buffered_network_hosts[pipeline[0].dir_out] = host
                    netpath_out = self.buffered_network_hosts[pipeline[0].dir_out]
                    if not netpath_out:
                        netpath_out = self.buffered_network_paths[pipeline[0].dir_out]
            book_count_out = self.get_book_count(pipeline[0].dir_out, pipeline[0].parentdirs)
            label_out = "< <font point-size='24'>{}</font>{}{} >".format(
                relpath_out,
                "\n<br/><i><font point-size='20'>{} {}</font></i>".format(book_count_out, "bok" if book_count_out == 1 else "bøker"),
                "\n<br/><i><font point-size='20'>{}</font></i>".format(netpath_out.replace("\\", "\\\\")) if netpath_out else "")

            if rank_out:
                node_ranks[rank_out].append(pipeline_id)
            elif rank_in:
                next_rank = self.next_rank(rank_in)
                if next_rank:
                    node_ranks[next_rank].append(pipeline_id)
                else:
                    node_ranks[rank_in].append(pipeline_id)

            state = group_pipeline.get_state()
            status = group_pipeline.get_status()
            progress_text = group_pipeline.get_progress()
            pipeline_label = "< <font point-size='26'>{}</font>{} >".format(
                title,
                "".join(["\n<br/><i><font point-size='22'>{}</font></i>".format(val) for val in [queue_string, progress_text, status] if val]))

            fillcolor = "lightskyblue1"
            if book or queue_size:
                fillcolor = "lightslateblue"
            elif state == "considering":
                fillcolor = "lightskyblue3"
            elif not group_pipeline.running:
                fillcolor = "white"
            elif isinstance(group_pipeline, DummyPipeline):
                fillcolor = "snow"
            dot.attr("node", shape="box", style="filled", fillcolor=fillcolor)
            dot.node(pipeline_id, pipeline_label.replace("\\", "\\\\"))

            if relpath_in:
                fillcolor = "wheat"
                if not pipeline[0].dir_in_obj or not pipeline[0].dir_in_obj.is_available():
                    fillcolor = "white"
                dot.attr("node", shape="folder", style="filled", fillcolor=fillcolor)
                dot.node(pipeline[1], label_in)
                if pipeline[1] not in edges:
                    edges[pipeline[1]] = []
                if pipeline_id not in edges[pipeline[1]]:
                    edges[pipeline[1]].append(pipeline_id)
                    dot.edge(pipeline[1], pipeline_id)
                node_ranks[rank_in].append(pipeline[1])

            if relpath_out:
                fillcolor = "wheat"
                if not pipeline[0].dir_out_obj or not pipeline[0].dir_out_obj.is_available():
                    fillcolor = "white"
                dot.attr("node", shape="folder", style="filled", fillcolor=fillcolor)
                dot.node(pipeline[2], label_out)
                if pipeline_id not in edges:
                    edges[pipeline_id] = []
                if pipeline[2] not in edges[pipeline_id]:
                    edges[pipeline_id].append(pipeline[2])
                    dot.edge(pipeline_id, pipeline[2])
                node_ranks[rank_out].append(pipeline[2])

        for rank in node_ranks:
            subgraph = Digraph("cluster_" + rank, graph_attr={"style": "dotted"})
            subgraph.graph_attr["bgcolor"] = "#FFFFFFAA"

            if node_ranks[rank]:
                subgraph.attr("node", shape="none", style="filled", fillcolor="transparent")
                subgraph.node("_ranklabel_" + rank, "< <i><font point-size='28'>{}</font></i> >".format(" <br/>".join(str(self.rank_name(rank)).split(" "))))

            for dir in node_ranks[rank]:
                subgraph.node(dir)

            dot.subgraph(subgraph)

        dot.render(os.path.join(self.report_dir, name + "_"))

        # there seems to be some race condition when doing this across a mounted network drive,
        # so if we get an exception we retry a few times and hope that it works.
        # see: https://github.com/nlbdev/produksjonssystem/issues/81
        for t in reversed(range(10)):
            try:
                shutil.copyfile(os.path.join(self.report_dir, name + "_.png"), os.path.join(self.report_dir, name + ".png"))
                with open(os.path.join(self.report_dir, name + ".js"), "w") as javascript_file:
                    javascript_file.write("setTime(\"{}\");".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")))
                break
            except Exception as e:
                logging.debug(" Unable to copy plot image: {}".format(os.path.join(self.report_dir, name + "_.png")))
                time.sleep(0.5)
                if t == 0:
                    raise e

        dashboard_file = os.path.join(self.report_dir, name + ".html")
        if not os.path.isfile(dashboard_file):
            dashboard_template = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../dashboard.html'))
            if not os.path.exists(self.report_dir):
                os.makedirs(self.report_dir)
            shutil.copyfile(dashboard_template, dashboard_file)