def make_single_graph(dest_dir, fp, all_info, conf):
	FGMD = conf.FILE_GRAPH_MAX_DIAMETER
	# dictionary: {node : set of neighbours}
	graph = {}
	# distance from the source to the node
	dists = {fp.get_abs_name() : 0}
	
	found_files = set([])
	list_files = [fp.get_abs_name()]
	while len(list_files) > 0:
		abs_name = list_files[0]
		del list_files[0]
		
		# distance from the source to this node
		D = dists[abs_name]
		
		neigh_set = set([])
		inc_files = all_info[abs_name].get_included_files()
		for f in inc_files:
			
			# distance from the source to a neighbour of node
			if f in dists:
				dists[f] = min(D + 1, dists[f])
			else:
				dists[f] = D + 1
			
			# if that distance is greater than the allowed,
			# do not add more vertices to graph
			if FGMD <= 0 or dists[f] <= FGMD:
				
				neigh_set.add( all_info[f].get_rel_name() )
				if f not in found_files:
					# this file has yet to be processed: add to 'queue'
					list_files.append(f)
					found_files.add(f)
		
		rel_name = all_info[abs_name].get_rel_name()
		if rel_name not in graph:
			graph[rel_name] = neigh_set
		else:
			graph[rel_name].update(neigh_set)
	
	top_rel_path = fp.get_rel_path()
	my_short_name_pl = fp.get_short_name()
	my_rel_name_dot = fp.get_rel_dot_name()
	my_rel_name_png = fp.get_rel_png_name()
	
	dot_abs_name = join(dest_dir, '.cache', my_rel_name_dot)
	dot_abs_name = utils.resolve_path(dot_abs_name)
	
	png_abs_name = join(dest_dir, my_rel_name_png)
	png_abs_name = utils.resolve_path(png_abs_name)
	
	graph_to_png(graph, dot_abs_name, png_abs_name, conf)
Example #2
0
def get_logger():
    global logger, _logging_level

    if logger:
        return logger

    logger = logging.getLogger("MongoctlLogger")

    log_file_name = "mongoctl.log"
    conf_dir = mongoctl_globals.DEFAULT_CONF_ROOT
    log_dir = utils.resolve_path(os.path.join(conf_dir, LOG_DIR))
    utils.ensure_dir(log_dir)

    logger.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)8s | %(asctime)s | %(message)s")
    logfile = os.path.join(log_dir, log_file_name)
    fh = TimedRotatingFileHandler(logfile, backupCount=50, when="midnight")

    fh.setFormatter(formatter)
    fh.setLevel(logging.DEBUG)
    # add the handler to the root logger
    logging.getLogger().addHandler(fh)

    global _log_to_stdout
    if _log_to_stdout:
        sh = logging.StreamHandler(sys.stdout)
        std_formatter = logging.Formatter("%(message)s")
        sh.setFormatter(std_formatter)
        sh.setLevel(_logging_level)
        logging.getLogger().addHandler(sh)

    return logger
Example #3
0
def simple_file_logger(name, log_file_name):
    lgr = logging.getLogger(name)

    if lgr.handlers:
        return lgr
    lgr.propagate = False

    log_dir = resolve_path(mbs_config.MBS_LOG_PATH)
    ensure_dir(log_dir)

    lgr.setLevel(logging.INFO)

    formatter = logging.Formatter("%(levelname)8s | %(asctime)s | %(message)s")

    logfile = os.path.join(log_dir, log_file_name)
    fh = TimedRotatingFileHandler(logfile, backupCount=10, when="midnight")

    fh.setFormatter(formatter)
    # add the handler to the root logger
    lgr.addHandler(fh)

    if LOG_TO_STDOUT:
        sh = logging.StreamHandler(sys.stdout)
        sh.setFormatter(formatter)
        lgr.addHandler(sh)

    return lgr
Example #4
0
def add_sim_modules_to_project(tags, sim_dict, user_paths):
    #utils.pretty_print_dict(tags)
    #utils.pretty_print_dict(sim_dict)
    #Get the directory of where to put the sim modules
    base_dir = utils.resolve_path(tags["BASE_DIR"])
    project_dir = tags["PROJECT_NAME"]
    out_dir = os.path.join(base_dir, "sim", "sim_modules")
    if not os.path.exists(out_dir):
        utils.create_dir(out_dir)


    #Find all the file locations
    module_filename = utils.find_module_filename(sim_dict["name"], user_paths)
    module_filepath = utils.find_rtl_file_location(module_filename, user_paths)

    out_file_path = os.path.join(out_dir, module_filename)
    #print "copy %s > %s" % (module_filepath, out_file_path)
    shutil.copy2(module_filepath, os.path.join(out_dir, out_file_path))

    #Get the locations for each of the auxilary files
    for f in sim_dict["aux_files"]:
        module_path = utils.find_rtl_file_location(f)
        out_file_path = os.path.join(out_dir, f)
        #print "copy %s > %s" % (module_path, out_file_path)
        shutil.copy2(module_path, os.path.join(out_dir, f))
def simple_file_logger(name, log_file_name):
    lgr = logging.getLogger(name)

    if lgr.handlers:
        return lgr
    lgr.propagate = False

    log_dir = resolve_path(mbs_config.MBS_LOG_PATH)
    ensure_dir(log_dir)

    lgr.setLevel(logging.INFO)

    formatter = logging.Formatter("%(levelname)8s | %(asctime)s | %(message)s")

    logfile = os.path.join(log_dir, log_file_name)
    fh = TimedRotatingFileHandler(logfile, backupCount=10, when="midnight")

    fh.setFormatter(formatter)
    # add the handler to the root logger
    lgr.addHandler(fh)

    if LOG_TO_STDOUT:
        sh = logging.StreamHandler(sys.stdout)
        sh.setFormatter(formatter)
        lgr.addHandler(sh)

    return lgr
Example #6
0
  def write_file(self, location = "", filename=""):
    """write_file

    Search through the specified location, if the location doesn't exist then
    create the location.
    then write out the specified file

    Args:
      location: the location where the file is to be written
      filename: the name of the output file to write

    Returns:
      Nothing

    Raises:
      IOError
    """
    home = False
    location = utils.resolve_path(location)
    if not os.path.exists(location):
       utils.create_dir(location)
    fname = os.path.join(location, filename)
    fileout = open(fname, "w")
    fileout.write(self.buf)
    fileout.close()
    return
Example #7
0
def get_logger():
    global logger, _logging_level

    if logger:
        return logger

    logger = logging.getLogger("MongoctlLogger")

    log_file_name="mongoctl.log"
    conf_dir = mongoctl_globals.DEFAULT_CONF_ROOT
    log_dir = utils.resolve_path(os.path.join(conf_dir, LOG_DIR))
    utils.ensure_dir(log_dir)


    logger.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)8s | %(asctime)s | %(message)s")
    logfile = os.path.join(log_dir, log_file_name)
    fh = TimedRotatingFileHandler(logfile, backupCount=50, when="midnight")

    fh.setFormatter(formatter)
    fh.setLevel(logging.DEBUG)
    # add the handler to the root logger
    logging.getLogger().addHandler(fh)

    global _log_to_stdout
    if _log_to_stdout:
        sh = logging.StreamHandler(sys.stdout)
        std_formatter = logging.Formatter("%(message)s")
        sh.setFormatter(std_formatter)
        sh.setLevel(_logging_level)
        logging.getLogger().addHandler(sh)

    return logger
Example #8
0
def set_css_from_file(sheet):
    pathname = utils.resolve_path(sheet)
    provider = Gtk.CssProvider()
    with open(pathname, mode='rb') as f:
        provider.load_from_data(f.read())
    Gtk.StyleContext.add_provider_for_screen(
        Gdk.Screen.get_default(), provider,
        Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)
    def get_log_path(self):
        log_dir = resolve_path(
            os.path.join(mbs_config.MBS_LOG_PATH,
                         self._task.type_name.lower() + "s"))

        log_file_name = "%s-%s.log" % (self._task.type_name.lower(),
                                       str(self._task.id))
        log_file_path = os.path.join(log_dir, log_file_name)
        return log_file_path
Example #10
0
def get_bounded_mask(image: np.array):
    image = read_image(image)
    pred = Predictor(image, resolve_path('../best_model_2.pth'), (320, 320))
    mask = pred.predict_mask()
    # mask = resize(mask, pred.image.shape[::-1])
    mask = pred.rescale_mask(mask)
    mask = pred.mask_original(mask)
    output = pred.largest_countour_crop(mask)
    return output
Example #11
0
    def __init__(self, config):

        # init config and database
        self._config = config
        self._mbs_db_client = None

        self._type_bindings = self._get_type_bindings()

        # make the maker
        self._maker =  Maker(type_bindings=self._type_bindings)

        #  notifications
        self._notifications = None

        # init object collections
        self._backup_collection = None
        self._plan_collection = None
        self._deleted_plan_collection = None
        self._audit_collection = None
        self._restore_collection = None

        # load backup system/engines lazily
        self._backup_system = None
        self._api_server = None

        # listens for backup events coming through rest
        self._backup_event_listener = None

        self._api_client = None


        self._engines = None

        # init the encryptor
        self._encryptor = self._get_encryptor()

        #
        self._backup_source_builder = None
        self._default_backup_assistant = None
        self._temp_dir = resolve_path(DEFAULT_BACKUP_TEMP_DIR_ROOT)

        self._event_colllection = None
        self._event_listener_collection = None
        self._event_queue = None

        # allow boto debug to be configuable
        if config.get("enableBotoDebug"):
            import boto
            boto.set_stream_logger('boto')

        # read log path
        if config.get("logPath"):
            import mbs_config
            mbs_config.MBS_LOG_PATH = config.get("logPath")
Example #12
0
    def __init__(self, config):

        # init config and database
        self._config = config
        self._mbs_db_client = None

        self._type_bindings = self._get_type_bindings()

        # make the maker
        self._maker = Maker(type_bindings=self._type_bindings)

        #  notifications
        self._notifications = None

        # init object collections
        self._backup_collection = None
        self._plan_collection = None
        self._deleted_plan_collection = None
        self._audit_collection = None
        self._restore_collection = None

        # load backup system/engines lazily
        self._backup_system = None
        self._api_server = None

        # listens for backup events coming through rest
        self._backup_event_listener = None

        self._api_client = None

        self._engines = None

        # init the encryptor
        self._encryptor = self._get_encryptor()

        #
        self._backup_source_builder = None
        self._default_backup_assistant = None
        self._temp_dir = resolve_path(DEFAULT_BACKUP_TEMP_DIR_ROOT)

        self._event_colllection = None
        self._event_listener_collection = None
        self._event_queue = None

        # allow boto debug to be configuable
        if config.get("enableBotoDebug"):
            import boto
            boto.set_stream_logger('boto')

        # read log path
        if config.get("logPath"):
            import mbs_config
            mbs_config.MBS_LOG_PATH = config.get("logPath")
Example #13
0
 def set_docproxy(self, docproxy):
     if self.docproxy is docproxy: return
     if self.docproxy:
         self.docproxy.release()
     docproxy.obtain()
     self.docproxy = docproxy
     self._docre.docproxy = docproxy
     fill = docproxy.document.fill or resolve_path(
         main.Gribouillis.TRANSPARENT_BACKGROUND)
     self.set_background(fill)
     self.width = self.height = None
     self.repaint_doc()
Example #14
0
        def new_entry(label, idx):
            icons_path = resolve_path(prefs['view-icons-path'])

            def get_icons(off):
                icons = pymui.ColGroup(4, Frame='Group')
                for name in sorted(contexts.ICONS.keys()):
                    obj = pymui.Dtpic(Name=os.path.join(
                        icons_path, name + '.png'),
                                      InputMode='RelVerify',
                                      LightenOnMouse=True)
                    icons.AddChild(obj)
                    obj.Notify('Pressed',
                               self._on_popup_icon_sel,
                               when=False,
                               name=name,
                               idx=idx,
                               off=off)
                return icons

            colg.AddChild(pymui.Label(label + ':'))

            icons_names = prefs['view-icons-names']
            bt = pymui.Dtpic(Name=os.path.join(icons_path,
                                               icons_names[idx] + '.png'),
                             Frame='ImageButton',
                             InputMode='RelVerify')
            popup = pymui.Popobject(Button=bt, Object=get_icons(0), Light=True)
            popup.name = icons_names[idx]
            colg.AddChild(popup)
            self._popup[idx] = popup

            bt = pymui.Dtpic(Name=os.path.join(icons_path,
                                               icons_names[8 + idx] + '.png'),
                             Frame='ImageButton',
                             InputMode='RelVerify')
            popup = pymui.Popobject(Button=bt, Object=get_icons(8), Light=True)
            popup.name = icons_names[8 + idx]
            colg.AddChild(popup)
            self._popup[8 + idx] = popup

            string = pymui.String(
                Frame='String',
                CycleChain=True,
                Contents=prefs['view-toolswheel-binding'][idx] or '',
                ShortHelp=_T("Command to execute when tool region selected"))
            self._toolswheel_strings[idx] = string

            popup = pymui.Poplist(Array=all_commands,
                                  String=string,
                                  Button=pymui.Image(Frame='ImageButton',
                                                     Spec=pymui.MUII_PopUp,
                                                     InputMode='RelVerify'))
            colg.AddChild(popup)
Example #15
0
 def __init__(self, dv=None):
     super(BrushPreview, self).__init__(InnerSpacing=(0,)*4,
                                        Frame='Virtual',
                                        DoubleBuffer=True)
                                        
     # surface used for brush preview
     self._surface = BoundedPlainSurface(_pixbuf.FORMAT_ARGB8, BrushPreview.WIDTH, BrushPreview.HEIGHT)
     self._brush = DrawableBrush()
     self._brush.rgb = (0,0,0)
     self._states = list(self._brush.gen_preview_states(BrushPreview.WIDTH,
                                                        BrushPreview.HEIGHT))
     self.Background = "5:" + utils.resolve_path(main.Gribouillis.TRANSPARENT_BACKGROUND)
Example #16
0
def compose_dl_ep_cmd(pl, ep):
    final_output = "%s/%s" % (CONF["destination"], ep["filename"])
    final_output = final_output % {
        "playlist": pl["title"],
        "playlist_id": pl["id"],
        "playlist_title": pl["title"],
    }

    return [
        "youtube-dl", "--output",
        resolve_path(final_output), "--format", CONF["options"]["format"],
        "https://www.youtube.com/watch?v=%s" % ep["id"]
    ]
Example #17
0
def get_matching_files(dirname, patterns, rec):
    files = []
    dirs = []
    this_dir = listdir(dirname)
    for f in this_dir:
        abs_name = join(dirname, f)
        abs_name = utils.resolve_path(abs_name)

        if isfile(abs_name):
            extension = splitext(f)[1]
            if extension in patterns:
                files.append(abs_name)
        else:
            dirs.append(f)

    if rec:
        for d in dirs:
            abs_path = join(dirname, d)
            abs_path = utils.resolve_path(abs_path)

            files += get_matching_files(abs_path, patterns, rec)

    return files
Example #18
0
    def _init_toolsweel(self):
        icons_path = resolve_path(prefs['view-icons-path'])
        for i, cmd in enumerate(prefs['view-toolswheel-binding']):
            self._toolswheel_strings[i].Contents = cmd

        for i, name in enumerate(prefs['view-icons-names'][:8]):
            self._popup[i].name = name
            self._popup[i].Button.object.Name = os.path.join(
                icons_path, name + '.png')

        for i, name in enumerate(prefs['view-icons-names'][8:]):
            self._popup[8 + i].name = name
            self._popup[8 + i].Button.object.Name = os.path.join(
                icons_path, name + '.png')
Example #19
0
    def change_image(self, image_id, pathimg, width=64, height=64):
        """
        Set source of a GtkImage with its id

        :param imageId: Id of a GtkImage
        :type imageId: str
        :param file: Absolute or relative path of the image source
        :type file: str
        :param width: Width in pixel of the image by default 64
        :type width: int
        :param height: Height in pixel of the image by default 64
        :type height: int
        """
        image = Pixbuf.new_from_file_at_size(utils.resolve_path(pathimg),
                                             width=width,
                                             height=height)
        self.get_object(image_id).set_from_pixbuf(image)
Example #20
0
def setup_logging(log_to_stdout=False, log_file_name=None):
    global LOG_TO_STDOUT
    LOG_TO_STDOUT = log_to_stdout

    root_logger.setLevel(logging.INFO)

    formatter = logging.Formatter("%(levelname)8s | %(asctime)s | %(message)s")
    if LOG_TO_STDOUT:
        sh = logging.StreamHandler(sys.stdout)
        sh.setFormatter(formatter)
        root_logger.addHandler(sh)
    else:
        log_file_name = log_file_name or "mbs.log"
        log_dir = resolve_path(mbs_config.MBS_LOG_PATH)
        ensure_dir(log_dir)
        logfile = os.path.join(log_dir, log_file_name)
        fh = TimedRotatingFileHandler(logfile, backupCount=50, when="midnight")

        fh.setFormatter(formatter)
        # add the handler to the root logger
        root_logger.addHandler(fh)
def setup_logging(log_to_stdout=False, log_file_name=None):
    global LOG_TO_STDOUT
    LOG_TO_STDOUT = log_to_stdout

    root_logger.setLevel(logging.INFO)

    formatter = logging.Formatter("%(levelname)8s | %(asctime)s | %(message)s")
    if LOG_TO_STDOUT:
        sh = logging.StreamHandler(sys.stdout)
        sh.setFormatter(formatter)
        root_logger.addHandler(sh)
    else:
        log_file_name = log_file_name or "mbs.log"
        log_dir = resolve_path(mbs_config.MBS_LOG_PATH)
        ensure_dir(log_dir)
        logfile = os.path.join(log_dir, log_file_name)
        fh = TimedRotatingFileHandler(logfile, backupCount=50, when="midnight")

        fh.setFormatter(formatter)
        # add the handler to the root logger
        root_logger.addHandler(fh)
Example #22
0
    def __init__(self, root, docproxy=None, rulers=None):
        super(DocViewport, self).__init__(InnerSpacing=0,
                                          FillArea=False,
                                          DoubleBuffer=False)

        self.device = InputDevice()
        self.root = root
        self._ev = pymui.EventHandler()

        # Viewports and Renders
        if self.opengl:
            self._docre = DocumentOpenGLRender()
        else:
            self._docre = view.DocumentCairoRender()
        self._docvp = view.ViewPort(self._docre)
        self._toolsre = view.ToolsCairoRender()
        self._toolsvp = view.ViewPort(self._toolsre)
        self._curvp = tools.Cursor()

        # Tools
        self.line_guide = tools.LineGuide(300, 200)
        self.ellipse_guide = tools.EllipseGuide(300, 200)

        # Aliases
        self.get_view_area = self._docvp.get_view_area
        self.enable_fast_filter = self._docre.enable_fast_filter
        self.get_handler_at_pos = self._toolsre.get_handler_at_pos

        self.set_background(
            resolve_path(main.Gribouillis.TRANSPARENT_BACKGROUND))

        if rulers:
            self._hruler, self._vruler = rulers
        else:
            self._do_rulers = utils.idle_cb
            self._update_rulers = utils.idle_cb

        if docproxy is not None:
            self.set_docproxy(docproxy)
Example #23
0
    def __init__(self, id=None, max_workers=10,
                       temp_dir=None,
                       command_port=8888):
        Thread.__init__(self)
        self._id = id
        self._engine_guid = None
        self._max_workers = int(max_workers)
        self._temp_dir = resolve_path(temp_dir or DEFAULT_BACKUP_TEMP_DIR_ROOT)
        self._command_port = command_port
        self._command_server = EngineCommandServer(self)
        self._tags = None
        self._stopped = False

        # create the backup processor
        bc = get_mbs().backup_collection
        self._backup_processor = TaskQueueProcessor("Backups", bc, self,
                                                    self._max_workers)

        # create the restore processor
        rc = get_mbs().restore_collection
        self._restore_processor = TaskQueueProcessor("Restores", rc, self,
                                                     self._max_workers)
Example #24
0
    def __init__(self,
                 id=None,
                 max_workers=10,
                 temp_dir=None,
                 command_port=8888):
        Thread.__init__(self)
        self._id = id
        self._engine_guid = None
        self._max_workers = int(max_workers)
        self._temp_dir = resolve_path(temp_dir or DEFAULT_BACKUP_TEMP_DIR_ROOT)
        self._command_port = command_port
        self._command_server = EngineCommandServer(self)
        self._tags = None
        self._stopped = False

        # create the backup processor
        bc = get_mbs().backup_collection
        self._backup_processor = TaskQueueProcessor("Backups", bc, self,
                                                    self._max_workers)

        # create the restore processor
        rc = get_mbs().restore_collection
        self._restore_processor = TaskQueueProcessor("Restores", rc, self,
                                                     self._max_workers)
Example #25
0
def get_output_dir(config_filename, debug = False):
    tags = get_project_tags(config_filename)
    if "BASE_DIR" not in tags:
        tags["BASE_DIR"] = os.path.join(os.getcwd(), tags["PROJECT_NAME"])

    return utils.resolve_path(tags["BASE_DIR"])
Example #26
0
    def generate_project(self, config_filename, output_directory = None):
        """Generate the folders and files for the project

        Using the project tags and template tags this function generates all
        the directories and files of the project. It will go through the template
        structure and determine what files need to be added and call either
        a generation script (in the case of \"top.v\") or simply copy the file
        over (in the case of a peripheral or memory module.

        Args:
          config_filename: name of the JSON configuration file
            output_directory: Path to override default output directory

        Return:
          True: Success
          False: Failure

        Raises:
          TypeError
          IOError
          SapError
        """
        status = self.s
        if status: status.Debug("Openning site manager")

        sm = site_manager.SiteManager()
        path_dicts = sm.get_paths_dict()

        self.read_config_file(config_filename)
        path_dict = sm.get_paths_dict()
        if output_directory is not None:
            self.project_tags["BASE_DIR"] = output_directory

        board_dict = utils.get_board_config(self.project_tags["board"], debug = False)
        cfiles = []
        cpaths = []

        self.user_paths.extend(utils.get_local_verilog_paths())
        self.user_paths = list(set(self.user_paths))
        if "paths" in board_dict:
            self.user_paths.extend(board_dict["paths"])
            self.user_paths = list(set(self.user_paths))

        if "parent_board" in board_dict:
            self.user_paths.extend(get_parent_board_paths(board_dict))
            self.user_paths = list(set(self.user_paths))

        if self.s: self.s.Verbose("Paths:")
        for p in self.user_paths:
            if self.s: self.s.Verbose("\t%s" %p)

        # Go through the board dict and see if there is anything that needs to be
        # incorporated into the project tags
        for key in board_dict:
            if key not in self.project_tags:
                self.project_tags[key] = board_dict[key]
            elif isinstance(self.project_tags[key], OrderedDict):
                for k in board_dict[key]:
                    self.project_tags[key][k] = board_dict[key][k]
            elif isinstance(self.project_tags[key], list):
                self.project_tags[key].extend(board_dict[key])
            elif isinstance(self.project_tags[key], dict):
                for k in board_dict[key]:
                    self.project_tags[key][k] = board_dict[key][k]

        self.filegen = ModuleProcessor(user_paths = self.user_paths)

        pt = self.project_tags
        if "constraint_files" not in pt.keys():
            pt["constraint_files"] = []

        cfiles = pt["constraint_files"]
        for c in cfiles:
            board = self.project_tags["board"]
            try:
                cpaths.append(utils.get_constraint_file_path(board, c))
            except IBuilderError as e:
                if self.s: self.s.Verbose("Could not find constraint: %s in default board searching parent board..." % c)
                #path = os.path.join(utils.get_board_directory(board), board)
                board_dict = utils.get_board_config(board)

                mname = "default.json"
                if "default_project" in board_dict:
                    mname = board_dict["default_project"]

                if "parent_board" in board_dict:
                    for parent in board_dict["parent_board"]:
                        if self.s: self.s.Verbose("\tsearching: %s @ %s..." % (parent, utils.get_board_directory(parent)))
                        filepath = utils.get_constraint_file_path(parent, c)
                        if filepath is None:
                            if self.s: self.s.Verbose("Did not file file: %s in parent board" % (c))
                        else:
                            if self.s: self.s.Verbose("Found file, located at: %s" % filepath)
                            cpaths.append(utils.get_constraint_file_path(parent, c, debug = True))

        #if the user didn't specify any constraint files
        #load the default
        if len(cfiles) == 0:
            if status: status.Debug("loading default constraints for: %s" % board_dict["board_name"])
            cfiles = board_dict["default_constraint_files"]
            for c in cfiles:
                cpaths.append(utils.get_constraint_file_path(self.project_tags["board"], c))
                #cpaths.append(utils.get_constraint_file_path(c))


        #extrapolate the bus template
        clock_rate = ""
        if "clockrate" in board_dict:
            if self.s: self.s.Info("User Specified a clockrate of: %d" % board_dict["clockrate"])
            clock_rate = str(board_dict["clockrate"])
        if len(clock_rate) == 0:
            for c in cpaths:
                clock_rate = utils.read_clock_rate(c)
                if len(clock_rate) > 0:
                    #First place I can find the clock rate drop out
                    break

        if len (clock_rate) == 0:
            if self.s: self.s.Fatal("Unable to find the clock rate in any of the constraint"
                      "files: %s" % str(cpaths))
            raise PGE("Unable to find the clock rate in any of the constraint"
                      "files: %s" % str(cpaths))

        #self.project_tags["CLOCK_RATE"] = utils.read_clock_rate(cpaths[0])
        self.project_tags["CLOCK_RATE"] = clock_rate
        self.read_template_file(self.project_tags["TEMPLATE"])

        #set all the tags within the filegen structure
        if status: status.Verbose("set all tags wihin filegen structure")
        self.filegen.set_tags(self.project_tags)

        #generate the project directories and files
        utils.create_dir(self.project_tags["BASE_DIR"])
        if status: status.Verbose("generated project base direcotry: %s" %
            utils.resolve_path(self.project_tags["BASE_DIR"]))

        #generate the arbiter tags, this is important because the top
        #needs the arbiter tags
        arb_tags = arbiter.generate_arbiter_tags(self.project_tags, False)
        self.project_tags["ARBITERS"] = arb_tags

        #print "Parent dir: " + self.project_tags["BASE_DIR"]
        for key in self.template_tags["PROJECT_TEMPLATE"]["files"]:
            self.recursive_structure_generator(
                    self.template_tags["PROJECT_TEMPLATE"]["files"],
                    key,
                    self.project_tags["BASE_DIR"])

        if status: status.Verbose("finished generating project directories")

        if arbiter.is_arbiter_required(self.project_tags):
            if status: status.Verbose("generate the arbiters")
        self.generate_arbiters()

        #Generate all the slaves
        for slave in self.project_tags["SLAVES"]:
            fdict = {"location":""}
            file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "slave")
            #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/slave"
            fn = self.project_tags["SLAVES"][slave]["filename"]
            try:
                #self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = True)
                self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest, debug = False)
            except ModuleFactoryError as err:
                if status: status.Error("ModuleFactoryError while generating slave: %s" % str(err))
                raise ModuleFactoryError(err)

            slave_dir = os.path.split(utils.find_rtl_file_location(fn, self.user_paths))[0]
            if "constraint_files" in self.project_tags["SLAVES"][slave]:
                temp_paths = self.user_paths
                temp_paths.append(slave_dir)

                for c in self.project_tags["SLAVES"][slave]["constraint_files"]:
                    file_location = utils.get_constraint_file_path(self.project_tags["board"], c, temp_paths)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "constraints", c))

            if "cores" in self.project_tags["SLAVES"][slave]:
                if status: status.Verbose("User Specified an core(s) for a slave")
                for c in self.project_tags["SLAVES"][slave]["cores"]:

                    file_location = os.path.join(slave_dir, os.pardir, "cores", c)
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "cores", c))

            #each slave

        if "MEMORY" in self.project_tags:
            for mem in self.project_tags["MEMORY"]:
                fdict = {"location":""}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "mem")
                #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/mem"
                fn = self.project_tags["MEMORY"][mem]["filename"]
                try:
                    self.filegen.process_file(filename = fn, file_dict = fdict, directory = file_dest)
                except ModuleFactoryError as err:
                    if status: status.Error("ModuleFactoryError while generating memory: %s" % str(err))
                    raise ModuleFactoryError(err)

            mem_dir = os.path.split(utils.find_rtl_file_location(fn, self.user_paths))[0]
            if "constraint_files" in self.project_tags["MEMORY"][mem]:
                temp_paths = self.user_paths
                temp_paths.append(mem_dir)

                for c in self.project_tags["MEMORY"][mem]["constraint_files"]:
                    file_location = utils.get_constraint_file_path(self.project_tags["board"], c, temp_paths)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "constraints", c))

            if "cores" in self.project_tags["MEMORY"][mem]:
                if status: status.Verbose("User Specified an core(s) for a mem")
                for c in self.project_tags["MEMORY"][mem]["cores"]:

                    file_location = os.path.join(mem_dir, os.pardir, "cores", c)
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "cores", c))



        '''
        if 'infrastructure' in self.project_tags:
            if status: status.Verbose("User Specified an infrastructure(s)")
            for entry in self.project_tags["infrastructure"]:
                name = entry.keys()[0]
                im = entry[name]
                path = utils.get_board_directory(name)
                path = os.path.join(path, name, "infrastructure", im["filename"])

                ftdict = {"location":path}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "infrastructure")
                fn = im["filename"]
                self.filegen.process_file(filename = fn, file_dict = fdict, directory=file_dest)
        '''

        if "cores" in self.project_tags:
            if status: status.Verbose("User Specified an core(s)")
            for entry in self.project_tags["cores"]:
                name = entry.keys()[0]
                for core in entry[name]:
                    file_location = None
                    path = utils.get_board_directory(name)
                    path = os.path.join(path, name, "cores")
                    for root, dirs, files in os.walk(path):
                        if core in files:
                            file_location =  os.path.join(root, core)
                            break
                    if not os.path.exists(file_location):
                        raise PGE("Core: %s does not exist" % file_location)
                    dest_path = utils.resolve_path(self.project_tags["BASE_DIR"])
                    shutil.copy (file_location, os.path.join(dest_path, "cores", core))

        #Copy the user specified constraint files to the constraints directory
        for constraint_fname in cfiles:
            abs_proj_base = utils.resolve_path(self.project_tags["BASE_DIR"])
            constraint_path = utils.get_constraint_file_path(self.project_tags["board"], constraint_fname)
            if os.path.exists(constraint_fname):
                constraint_fname = os.path.split(constraint_fname)[-1]
            #constraint_path = constraint_fname
            if len(constraint_path) == 0:
                print ("Couldn't find constraint: %s, searched in the current directory and %s/hdl/%s" %
                    (constraint_fname, abs_proj_base, self.project_tags["board"]))
                continue
            shutil.copy (constraint_path, os.path.join(abs_proj_base, "constraints", constraint_fname))
            #shutil.copy (constraint_path, abs_proj_base + "/constraints/" + constraint_fname)

        #Generate the IO handler
        interface_filename = self.project_tags["INTERFACE"]["filename"]
        fdict = {"location":""}
        #file_dest = self.project_tags["BASE_DIR"] + "/rtl/bus/interface"
        file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "bus", "interface")
        result = self.filegen.process_file(filename = interface_filename, file_dict=fdict , directory=file_dest)

        if status:
            status.Verbose("verilog files: ")
            for f in self.filegen.verilog_file_list:
                status.Verbose("\t%s" % f)
                #if len(self.filegen.verilog_dependency_list) > 0:
                #    status.Verbose("\t\tdependent files: ")
        if status: status.Verbose("copy over the dependencies...")
        for d in self.filegen.verilog_dependency_list:
            fdict = {"location":""}
            file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies")
            result = self.filegen.process_file(filename = d, file_dict = fdict, directory = file_dest, debug = True)
            if status: status.Verbose("\tDependent File: %s" % d)

        if "dependencies" in self.project_tags:
            if status: status.Verbose("User Specified dependencies")
            for name in self.project_tags["dependencies"]:
                if status: status.Verbose("\tUser Dependent File: %s" % name)

                fdict = {"location":""}
                file_dest = os.path.join(self.project_tags["BASE_DIR"], "rtl", "dependencies")
                result = self.filegen.process_file(filename = name, file_dict = fdict, directory = file_dest, debug = True)



        return True
 def _get_pid_file_path(self):
     pid_file_name = "backup_system_pid.txt"
     return resolve_path(os.path.join(mbs_config.mbs_conf_dir(),
                                      pid_file_name))
Example #28
0
    elif module == 'whoosh':
        return whoosh.versionstring()
    elif module == 'pygments':
        return pygments.__version__
    elif module == 'flask':
        return flask.__version__
    elif module == 'sherlock':
        import sherlock
        return sherlock.__version__
    return '0.0'


# determine actual index name
force_rebuild = False
if '--test' in sys.argv:
    settings.DEFAULT_INDEX_NAME = 'test'
    force_rebuild = True

if '--index' in sys.argv and 'rebuild' in sys.argv:
    force_rebuild = True

# build the full path
FULL_INDEX_PATH = utils.resolve_path(settings.INDEX_PATH)
FULL_INDEXES_PATH = utils.resolve_path(settings.INDEXES_PATH)

# force index rebuilding
FORCE_INDEX_REBUILD = force_rebuild

LONG_DATE_FORMAT = '%A, %B %d, %Y %I:%M%p'
SHORT_DATE_FORMAT = '%m/%d/%Y %H:%M'
Example #29
0
 def _on_popup_icon_sel(self, evt, name, idx, off):
     popup = self._popup[off + idx]
     popup.name = name
     popup.Button.object.Name = os.path.join(
         resolve_path(prefs['view-icons-path']), name + '.png')
     popup.Close(0)
Example #30
0
 def temp_dir(self, temp_dir):
     self._temp_dir = resolve_path(temp_dir)
 def _get_pid_file_path(self):
     pid_file_name = "backup_system_pid.txt"
     return resolve_path(
         os.path.join(mbs_config.MBS_CONF_DIR, pid_file_name))
Example #32
0
def load_config():
    """
    Loads the configuration and parses the command line arguments.

    This function is the "main" function of this module and brings together all
    of the modules various functions.

    After this function executes, :data:`CONFIG` will contain the final
    configuration, and :data:`ARGS` will contain any left over command line
    arguments that weren't parsed (which will likely be the command that the
    user wants to execute).

    :returns: A ``dict`` containing the final configuration.

    """

    global ARGS
    options, ARGS = parse_arguments()
    options = dict(i for i in options.__dict__.items() if i[1] is not None)

    if "verbosity" in options:
        logcontrol.set_level(options["verbosity"])

    logger.debug(
        "Command line options passed in...\n%s",
        pprint.pformat(options)
    )
    logger.debug(
        "Command line arguments passed in...\n%s",
        pprint.pformat(ARGS)
    )

    # Try and find a configuration file
    config_file_path = None
    if options.get("config") is not None:
        config_file_path = options["config"]
    else:
        # Figure out all of the places we should look for a configuration file.
        possible_config_paths = generate_search_path()

        # Ensure any ., .., and ~ symbols are correctly handled.
        possible_config_paths = utils.resolve_paths(possible_config_paths)

        logger.debug(
            "Searching for configuration file in...\n%s",
            pprint.pformat(possible_config_paths, width = 72)
        )

        for i in possible_config_paths:
            if os.path.isfile(i):
                config_file_path = i
                break
    configuration = {}
    if config_file_path is None:
        logger.info("No configuration file found.")
    else:
        logger.info("Loading configuration file at %s.", config_file_path)

        try:
            f = open(config_file_path)
        except IOError:
            logger.critical(
                "Could not open configuration file at %s.",
                config_file_path,
                exc_info = sys.exc_info()
            )
            raise

        try:
            configuration = utils.load_yaml(f)

            if not isinstance(configuration, dict):
                logger.critical(
                    "Your configuration file is not properly formatted. "
                    "The top level item must be a dictionary."
                )
                sys.exit(1)
        except ValueError:
            logger.critical(
                "Could not parse configuration file at %s.",
                config_file_path,
                exc_info = sys.exc_info()
            )
            raise
        finally:
            f.close()

    # Make a dictionary with the default values in it
    default_configuration = dict(
        (i.name, i.default_value) for i in KNOWN_OPTIONS.values()
                if i.default_value is not None
    )

    # Join the various dictionaries we have together. Priority is bottom-to-top.
    final_config = dict(
        default_configuration.items() +
        configuration.items() +
        options.items()
    )

    for i in (j.name for j in KNOWN_OPTIONS.values() if j.required):
        if i not in final_config:
            logger.critical(
                "Required value %s is unspecified. This value needs to be "
                "set in either the configuration file or on the command line.",
                i
            )
            sys.exit(1)

    # Go through and resolve any paths
    for i in (j.name for j in KNOWN_OPTIONS.values() if j.data_type is Path):
        if i in final_config:
            final_config[i] = utils.resolve_path(final_config[i])

    return final_config
Example #33
0
 def _get_pid_file_path(self):
     pid_file_name = "engine_%s_pid.txt" % self.id
     return resolve_path(
         os.path.join(mbs_config.MBS_CONF_DIR, pid_file_name))
Example #34
0
        "output": "%(playlist)s/%(title)s.%(ext)s",
        "format": "best"
    }
}

CONF = {}
CONF.update(DEFAULT_CONF)

conf_path = "%s/conf.json" % path.dirname(path.realpath(__file__))

if not path.exists(conf_path):
    with open(conf_path, "w", encoding="utf-8") as fp:
        fp.write(json.dumps(CONF, ensure_ascii=False))

CONF.update(json.load(open(conf_path)))
CONF["ytsub_dir"] = resolve_path(CONF["ytsub_dir"])
makedirs(CONF["ytsub_dir"], exist_ok=True)


def call(verb, argv, **kwargs):
    getattr(sys.modules[__name__], "ytsub_%s" % verb)(*argv, **kwargs)


def ytsub_add(*args, overwrite=False, **kwargs):
    if not args or len(args) > 1:
        print_help(short=True)
        return

    pl_id = parse_arg_to_pl(args[0])["id"]
    (existed, pl) = load_pl(pl_id, create_new=True)
Example #35
0
  def process_file(self, filename, file_dict, directory="", debug=False):
    """process_file

    read in a file, modify it (if necessary), then write it to the location
    specified by the directory variable

    Args:
      filename: the name of the file to process
      file_dict: dictionary associated with this file
      directory: output directory

    Return:

    Raises:
      ModuleFactoryError
      IOError

    """
    verbose = False
    debug = False
    if (filename.endswith(".v")):
        self.verilog_file_list.append(filename)

    if debug:
        print "in process file"
        print "\t%s" % filename
    #maybe load a tags??

    #using the location value in the file_dict find the file and
    #pull it into a buf

    self.buf = ""
    file_location = ""
    paths = self.user_paths


    #There are two types of files
    #ones that are copied over from a location
    #ones that are generated by scripts

    #The file is specified by a location and basically needs to be copied over
    if file_dict.has_key("location"):
        #print "Location: %s" % file_dict["location"]
        #file_location = os.path.join( utils.nysa_base,
        loc = file_dict["location"].split("/")
        #print "Loc list: %s" % str(loc)
        if loc[0] == "${NYSA}":
            loc[0]  = utils.nysa_base


        #print "Loc list: %s" % str(loc)

        file_location = "/"
        for d in loc:
            file_location = os.path.join(file_location, d)

        if (debug):
            print ("getting file: " + filename + " from location: " + file_location)

        found_file = False
        try:
            filein = open(os.path.join(utils.resolve_path(file_location), filename))
            self.buf = filein.read()
            filein.close()
            found_file = True
        except IOError as err:
            pass

        if not found_file:
            if debug:
                print "searching for file...",
            try:
                absfilename = utils.find_rtl_file_location(filename, self.user_paths)
                filepath = os.path.dirname(os.path.dirname(absfilename))
                paths.insert(0, filepath)
                paths = list(set(paths))

                filein = open(absfilename)
                self.buf = filein.read()
                filein.close()

            except:
                if debug:
                    print "Failed to find file"
                raise ModuleFactoryError("File %s not found searched %s and in the HDL dir (%s)" %  (filename, \
                                          file_location, \
                                          utils.nysa_base + os.path.sep + "cbuilder" + os.path.sep + "verilog"))


        if verbose:
          print "found file!"
          print "file content: " + self.buf

    #File is generated by a script
    elif (not file_dict.has_key("gen_script")):
      raise ModuleFactoryError( "File %s does not declare a location or a \
                                  script! Check the template file" % filename)

    if verbose:
      print "Project name: " + self.tags["PROJECT_NAME"]

    #if the generation flag is set in the dictionary
    if "gen_script" in file_dict:
      if debug:
        print "found the generation script"
        print "run generation script: " + file_dict["gen_script"]
      #open up the new gen module
      ms = sys.modules.keys()
      gs = ""
      for m in ms:
          if m.endswith("gen_scripts"):
              gs = m
      #print "gs: %s" % gs


      cl = __import__("%s.gen" % gs, fromlist=[gs])
      #cl = importlib.import_module("gen_scripts", "gen")
      #if debug:
      #  print "cl: " + str(cl)
      Gen = getattr(gen, "Gen")
      if debug:
        print "Gen: " + str(Gen)
      self.gen_module = __import__("%s.%s" % (gs, file_dict["gen_script"]), fromlist=[gs])
      gen_success_flag = False

      #find the script and dynamically add it
      for name in dir(self.gen_module):
        obj = getattr(self.gen_module, name)
  #      print "object type: " + str(obj)
#XXX: debug section start
        if verbose:
          print "name: " + name
        if isclass(obj):
          if verbose:
            print "\tobject type: " + str(obj)
            print "\tis class"
          if issubclass(obj, cl.Gen):
            if verbose:
              print "\t\tis subclass"
#XXX: debug section end
        if isclass(obj) and issubclass(obj, cl.Gen) and obj is not cl.Gen:
          self.gen = obj()
          if verbose:
            print "obj = " + str(self.gen)

          self.buf = self.gen.gen_script(tags = self.tags, buf = self.buf, user_paths = self.user_paths)
          gen_success_flag = True

      if not gen_success_flag:
        raise ModuleFactoryError("Failed to execute the generation script %s" %
                                  file_dict["gen_script"])
    else:
      #no script to execute, just tags
      self.apply_tags()

    if verbose:
      print self.buf
    if (len(self.buf) > 0):
      result = self.write_file(directory, filename)

    if self.has_dependencies(filename):
      deps = self.get_list_of_dependencies(filename)
      for d in deps:
        try:
          f = utils.find_module_filename(d, self.user_paths)
          if (len(f) == 0):
            print "Error: couldn't find dependency filename"
            continue
          if (f not in self.verilog_dependency_list and
            f not in self.verilog_file_list):
            if debug:
              print "found dependency: " + f
            self.verilog_dependency_list.append(f)
        except ModuleNotFound as err:
          continue
Example #36
0
 def __init__(self, f, winid):
     pathname = utils.resolve_path(f)
     Gtk.Builder.__init__(self)
     self.add_from_file(pathname)
     self.__winid = winid
     self.add_event(self.__winid, 'destroy', self.on_delete_window)
def _get_client_config():
    conf_path = resolve_path(os.path.join("~/.mbs", "mbs-api-client.config"))
    if os.path.exists(conf_path):
        return read_config_json("mbs-api-client", conf_path)
    else:
        raise Exception("mbs api client conf %s does not exist")
Example #38
0
 def _get_pid_file_path(self):
     pid_file_name = "engine_%s_pid.txt" % self.id
     return resolve_path(os.path.join("~/.mbs", pid_file_name))
Example #39
0
def get_pl_path(pl_id):
    return resolve_path("%s/%s.json" % (CONF["ytsub_dir"], pl_id))
Example #40
0
    def get_log_path(self):
        log_dir = resolve_path(os.path.join(mbs_config.MBS_LOG_PATH, self._task.type_name.lower() + "s"))

        log_file_name = "%s-%s.log" % (self._task.type_name.lower(), str(self._task.id))
        log_file_path = os.path.join(log_dir, log_file_name)
        return log_file_path
Example #41
0
 def _get_pid_file_path(self):
     pid_file_name = "engine_%s_pid.txt" % self.id
     return resolve_path(os.path.join(mbs_config.MBS_CONF_DIR, 
                                      pid_file_name))
Example #42
0
def task_log_dir(task_type_name):
    log_dir = resolve_path(os.path.join(mbs_config.MBS_LOG_PATH, task_type_name.lower() + "s"))
    return log_dir
Example #43
0
 def temp_dir(self, temp_dir):
     self._temp_dir = resolve_path(temp_dir)
Example #44
0
    elif module == 'whoosh':
        return whoosh.versionstring()
    elif module == 'pygments':
        return pygments.__version__
    elif module == 'flask':
        return flask.__version__
    elif module == 'sherlock':
        import sherlock
        return sherlock.__version__
    return '0.0'


# determine actual index name
force_rebuild = False
if '--test' in sys.argv:
    settings.DEFAULT_INDEX_NAME = 'test'
    force_rebuild = True

if '--index' in sys.argv and 'rebuild' in sys.argv:
    force_rebuild = True

# build the full path
FULL_INDEX_PATH = utils.resolve_path(settings.INDEX_PATH)
FULL_INDEXES_PATH = utils.resolve_path(settings.INDEXES_PATH)

# force index rebuilding
FORCE_INDEX_REBUILD = force_rebuild

LONG_DATE_FORMAT = '%A, %B %d, %Y %I:%M%p'
SHORT_DATE_FORMAT = '%m/%d/%Y %H:%M'
def make_full_graph(dest_dir, all_info, conf):
	# -------------------------------------------------
	# Build the whole graph to find the source vertices
	
	# input degree of each node in the graph
	in_degree = {}
	# dictionary: {node : set of neighbours}
	graph = {}
	
	found_files = set([])
	list_files = []
	
	for abs_name in all_info:
		list_files.append(abs_name)
		in_degree[abs_name] = 0
	
	# build the graph while finding the sources
	while len(list_files) > 0:
		abs_name = list_files[0]
		del list_files[0]
		
		neigh_set = set([])
		inc_files = all_info[abs_name].get_included_files()
		for f in inc_files:
			in_degree[f] += 1
			neigh_set.add( all_info[f].get_rel_name() )
			if f not in found_files:
				# this file has yet to be processed: add to 'queue'
				list_files.append(f)
				found_files.add(f)
		
		rel_name = all_info[abs_name].get_rel_name()
		if rel_name not in graph: graph[rel_name] = neigh_set
		else: graph[rel_name].update(neigh_set)
	
	# ------------------------------------------
	# Build the graph again, but only with those
	# vertices within the maximum distance
	
	PGMD = conf.PROJECT_GRAPH_MAX_DIAMETER
	graph = {}
	list_files = []
	
	# initialise distances
	dists = {}
	for f,d in in_degree.items():
		if d == 0:
			dists[f] = 0
			list_files.append(f)
	
	found_files = set([])
	while len(list_files) > 0:
		abs_name = list_files[0]
		del list_files[0]
		
		# distance from the source to this node
		D = dists[abs_name]
		
		neigh_set = set([])
		inc_files = all_info[abs_name].get_included_files()
		for f in inc_files:
			
			# distance from the source to a neighbour of node
			if f in dists:
				dists[f] = min(D + 1, dists[f])
			else:
				dists[f] = D + 1
			
			# if that distance is greater than the allowed,
			# do not add more vertices to graph
			if PGMD <= 0 or dists[f] <= PGMD:
			
				neigh_set.add( all_info[f].get_rel_name() )
				if f not in found_files:
					# this file has yet to be processed: add to 'queue'
					list_files.append(f)
					found_files.add(f)
		
		rel_name = all_info[abs_name].get_rel_name()
		if rel_name not in graph:
			graph[rel_name] = neigh_set
		else:
			graph[rel_name].update(neigh_set)
	
	dot_abs_name = join(dest_dir, '.cache', 'project_graph.dot')
	dot_abs_name = utils.resolve_path(dot_abs_name)
	
	png_abs_name = join(dest_dir, 'project_graph.png')
	png_abs_name = utils.resolve_path(png_abs_name)
	
	graph_to_png(graph, dot_abs_name, png_abs_name, conf)
Example #46
0
def get_output_dir(config_filename, debug=False):
    tags = get_project_tags(config_filename)
    if "BASE_DIR" not in tags:
        tags["BASE_DIR"] = os.path.join(os.getcwd(), tags["PROJECT_NAME"])

    return utils.resolve_path(tags["BASE_DIR"])