示例#1
0
    def adjust_bounds_to_absolute_limits(self, limits_low, limits_high, reference=None):
        """ change the current bounds settings according to some absolute values

        This does not change the type of this bounds instance (e.g. relative).
        @value limits_low: a tuple describing the new lower absolute boundary
        @type limits_low: (tuple|list) of float
        @value limits_high: a tuple describing the new lower absolute boundary
        @type limits_high: (tuple|list) of float
        @value reference: a reference object described by a tuple (or list) of
            three item. These three values describe only the lower boundary of
            this object (for the x, y and z axes). Each item must be a float
            value. This argument is ignored for the boundary type "TYPE_CUSTOM".
        @type reference: (tuple|list) of float
        """
        # use the default reference if none was given
        if reference is None:
            reference = self.reference
        # check if a reference is given (if necessary)
        if self.bounds_type in (Bounds.TYPE_RELATIVE_MARGIN, Bounds.TYPE_FIXED_MARGIN):
            if reference is None:
                raise ValueError, "any non-custom boundary definition " + "requires an a reference object for caluclating " + "absolute limits"
            else:
                ref_low, ref_high = reference.get_absolute_limits()
        # calculate the new settings
        if self.bounds_type == Bounds.TYPE_RELATIVE_MARGIN:
            for index in range(3):
                dim_width = ref_high[index] - ref_low[index]
                if dim_width == 0:
                    # We always loose relative margins if the specific dimension
                    # is zero. There is no way to avoid this.
                    message = (
                        "Non-zero %s boundary lost during conversion "
                        + "to relative margins due to zero size "
                        + "dimension '%s'." % "xyz"[index]
                    )
                    # Display warning messages, if we can't reach the requested
                    # absolute dimension.
                    if ref_low[index] != limits_low[index]:
                        log.info(message % "lower")
                    if ref_high[index] != limits_high[index]:
                        log.info(message % "upper")
                    self.bounds_low[index] = 0
                    self.bounds_high[index] = 0
                else:
                    self.bounds_low[index] = (ref_low[index] - limits_low[index]) / dim_width
                    self.bounds_high[index] = (limits_high[index] - ref_high[index]) / dim_width
        elif self.bounds_type == Bounds.TYPE_FIXED_MARGIN:
            for index in range(3):
                self.bounds_low[index] = ref_low[index] - limits_low[index]
                self.bounds_high[index] = limits_high[index] - ref_high[index]
        elif self.bounds_type == Bounds.TYPE_CUSTOM:
            for index in range(3):
                self.bounds_low[index] = limits_low[index]
                self.bounds_high[index] = limits_high[index]
        else:
            # this should not happen
            raise NotImplementedError, "the function " + "'adjust_bounds_to_absolute_limits' is currently not " + "implemented for the bounds_type '%s'" % str(
                self.bounds_type
            )
示例#2
0
 def _restore_undo_state(self, widget=None, event=None):
     if len(self._undo_states) > 0:
         latest = StringIO.StringIO(self._undo_states.pop(-1))
         model = pickle.Unpickler(latest).load()
         self.load_model(model)
         self.gui.get_object("UndoButton").set_sensitive(
                 len(self._undo_states) > 0)
         log.info("Restored the previous state of the model")
         self.settings.emit_event("model-change-after")
     else:
         log.info("No previous undo state available - request ignored")
示例#3
0
 def _get_font_files(self):
     if self.font_dir is None:
         return []
     log.info("Font directory: %s" % self.font_dir)
     result = []
     files = os.listdir(self.font_dir)
     for fname in files:
         filename = os.path.join(self.font_dir, fname)
         if filename.lower().endswith(".cxf") and os.path.isfile(filename):
             result.append(filename)
     result.sort()
     return result
示例#4
0
 def load_task_settings_file(self, widget=None, filename=None):
     if callable(filename):
         filename = filename()
     if not filename:
         filename = self.settings.get("get_filename_func")("Loading settings ...",
                 mode_load=True, type_filter=FILTER_CONFIG)
         # Only update the last_task_settings attribute if the task file was
         # loaded interactively. E.g. ignore the initial task file loading.
         if filename:
             self.last_task_settings_uri = pycam.Utils.URIHandler(filename)
     if filename:
         log.info("Loading task settings file: %s" % str(filename))
         self.load_task_settings(filename)
         self.add_to_recent_file_list(filename)
示例#5
0
 def save_workspace_to_file(self, filename, remember_uri=True):
     from pycam.Flow.parser import dump_yaml
     if remember_uri:
         self.last_workspace_uri = pycam.Utils.URIHandler(filename)
         self.settings.get("set_last_filename")(filename)
     log.info("Storing workspace in file: %s", filename)
     try:
         with open_file_context(filename, "w", True) as out_file:
             dump_yaml(target=out_file)
         return True
     except OSError as exc:
         log.error("Failed to store workspace in file '%s': %s", filename,
                   exc)
         return False
示例#6
0
文件: common.py 项目: I--Fox--I/pycam
 def __init__(self, title, message):
     try:
         import Tkinter
     except ImportError:
         # tk is not installed
         log.warn("Failed to show error dialog due to a missing Tkinter " \
                 + "Python package.")
         return
     try:
         root = Tkinter.Tk()
     except Tkinter.TclError, err_msg:
         log.info(("Failed to create error dialog window (%s). Probably " \
                 + "you are running PyCAM from a terminal.") % err_msg)
         return
示例#7
0
 def __init__(self, title, message):
     try:
         import Tkinter
     except ImportError:
         # tk is not installed
         log.warn("Failed to show error dialog due to a missing Tkinter " \
                 + "Python package.")
         return
     try:
         root = Tkinter.Tk()
     except Tkinter.TclError, err_msg:
         log.info(("Failed to create error dialog window (%s). Probably " \
                 + "you are running PyCAM from a terminal.") % err_msg)
         return
示例#8
0
文件: events.py 项目: willicam/pycam
 def register_ui(self, section, name, widget, weight=0, args_dict=None):
     if section not in self.ui_sections:
         log.info("Tried to register widget for non-existing UI: %s -> %s",
                  name, section)
         self.ui_sections[section] = UISection(None, None, [])
     current_widgets = [
         item.obj for item in self.ui_sections[section].widgets
     ]
     if (widget is not None) and (widget in current_widgets):
         log.info("Tried to register widget twice: %s -> %s", section, name)
         return
     self.ui_sections[section].widgets.append(
         UIWidget(name, widget, weight, args_dict))
     self._rebuild_ui_section(section)
示例#9
0
 def unblock_event(self, event, disable_log=False):
     if event in self.event_handlers:
         if self.event_handlers[event].blocker_tokens:
             self.event_handlers[event].blocker_tokens.pop()
             if not disable_log:
                 log.debug2(
                     "Unblocking an event: %s (%d blockers remaining)",
                     event, len(self.event_handlers[event].blocker_tokens))
         else:
             if not disable_log:
                 log.debug("Trying to unblock non-blocked event '%s'",
                           event)
     else:
         # "disable_log" is only relevant for the debugging messages above
         log.info("Trying to unblock an unknown event: %s", event)
示例#10
0
 def unregister_ui(self, section, widget):
     if (section in self.ui_sections) or (None in self.ui_sections):
         if section not in self.ui_sections:
             section = None
         ui_section = self.ui_sections[section]
         removal_list = []
         for index, item in enumerate(ui_section.widgets):
             if item.obj == widget:
                 removal_list.append(index)
         removal_list.reverse()
         for index in removal_list:
             ui_section.widgets.pop(index)
         self._rebuild_ui_section(section)
     else:
         log.info("Trying to unregister unknown ui section: %s", section)
示例#11
0
 def load_task_settings_file(self, widget=None, filename=None):
     if callable(filename):
         filename = filename()
     if not filename:
         filename = self.settings.get("get_filename_func")(
             "Loading settings ...",
             mode_load=True,
             type_filter=FILTER_CONFIG)
         # Only update the last_task_settings attribute if the task file was
         # loaded interactively. E.g. ignore the initial task file loading.
         if filename:
             self.last_task_settings_uri = pycam.Utils.URIHandler(filename)
     if filename:
         log.info("Loading task settings file: %s" % str(filename))
         self.load_task_settings(filename)
         self.add_to_recent_file_list(filename)
示例#12
0
def _spawn_daemon(manager, number_of_processes, worker_uuid_list):
    """ wait for items in the 'tasks' queue to appear and then spawn workers
    """
    global __multiprocessing, __closing
    tasks = manager.tasks()
    results = manager.results()
    stats = manager.statistics()
    cache = manager.cache()
    pending_tasks = manager.pending_tasks()
    log.debug("Spawner daemon started with %d processes" % number_of_processes)
    log.debug("Registering %d worker threads: %s" \
            % (len(worker_uuid_list), worker_uuid_list))
    last_cache_update = time.time()
    # use only the hostname (for brevity) - no domain part
    hostname = platform.node().split(".", 1)[0]
    try:
        while not __closing.get():
            # check the expire timeout of the cache from time to time
            if last_cache_update + 30 < time.time():
                cache.expire_cache_items()
                last_cache_update = time.time()
            if not tasks.empty():
                workers = []
                for task_id in worker_uuid_list:
                    task_name = "%s-%s" % (hostname, task_id)
                    worker = __multiprocessing.Process(
                        name=task_name,
                        target=_handle_tasks,
                        args=(tasks, results, stats, cache, pending_tasks,
                              __closing))
                    worker.start()
                    workers.append(worker)
                # wait until all workers are finished
                for worker in workers:
                    worker.join()
            else:
                time.sleep(1.0)
    except KeyboardInterrupt:
        log.info("Spawner daemon killed by keyboard interrupt")
        # set the "closing" flag and just exit
        try:
            __closing.set(True)
        except (IOError, EOFError):
            pass
    except (IOError, EOFError):
        # the connection was closed
        log.info("Spawner daemon lost connection to server")
示例#13
0
def _spawn_daemon(manager, number_of_processes, worker_uuid_list):
    """ wait for items in the 'tasks' queue to appear and then spawn workers
    """
    global __multiprocessing, __closing
    tasks = manager.tasks()
    results = manager.results()
    stats = manager.statistics()
    cache = manager.cache()
    pending_tasks = manager.pending_tasks()
    log.debug("Spawner daemon started with %d processes" % number_of_processes)
    log.debug("Registering %d worker threads: %s" \
            % (len(worker_uuid_list), worker_uuid_list))
    last_cache_update = time.time()
    # use only the hostname (for brevity) - no domain part
    hostname = platform.node().split(".", 1)[0]
    try:
        while not __closing.get():
            # check the expire timeout of the cache from time to time
            if last_cache_update + 30 < time.time():
                cache.expire_cache_items()
                last_cache_update = time.time()
            if not tasks.empty():
                workers = []
                for task_id in worker_uuid_list:
                    task_name = "%s-%s" % (hostname, task_id)
                    worker = __multiprocessing.Process(
                            name=task_name, target=_handle_tasks,
                            args=(tasks, results, stats, cache,
                                    pending_tasks, __closing))
                    worker.start()
                    workers.append(worker)
                # wait until all workers are finished
                for worker in workers:
                    worker.join()
            else:
                time.sleep(1.0)
    except KeyboardInterrupt:
        log.info("Spawner daemon killed by keyboard interrupt")
        # set the "closing" flag and just exit
        try:
            __closing.set(True)
        except (IOError, EOFError):
            pass
    except (IOError, EOFError):
        # the connection was closed
        log.info("Spawner daemon lost connection to server")
示例#14
0
def import_font(filename, callback=None):
    try:
        infile = pycam.Utils.URIHandler(filename).open()
    except IOError as exc:
        raise LoadFileError("CXFImporter: Failed to read file ({}): {}"
                            .format(filename, exc)) from exc
    try:
        parsed_font = CXFParser(infile, callback=callback)
    except _CXFParseError as exc:
        raise LoadFileError("CFXImporter: Skipped font definition file '{}'. Reason: {}."
                            .format(filename, exc)) from exc
    charset = Charset(**parsed_font.meta)
    for key, value in parsed_font.letters.items():
        charset.add_character(key, value)
    log.info("CXFImporter: Imported CXF font from '%s': %d letters",
             filename, len(parsed_font.letters))
    infile.close()
    return charset
示例#15
0
def is_multiprocessing_available():
    if (pycam.Utils.get_platform() == pycam.Utils.OSPlatform.WINDOWS) and \
            hasattr(sys, "frozen") and sys.frozen:
        return False
    try:
        import multiprocessing
        # try to initialize a semaphore - this can trigger shm access failures
        # (e.g. on Debian Lenny with Python 2.6.6)
        multiprocessing.Semaphore()
        return True
    except ImportError:
        if "missing_module" not in __issued_warnings:
            log.info("Python's multiprocessing module is missing: disabling parallel processing")
            __issued_warnings.append("missing_module")
    except OSError:
        if "shm_access_failed" not in __issued_warnings:
            log.info("Python's multiprocessing module failed to acquire read/write access to "
                     "shared memory (shm) - disabling parallel processing")
            __issued_warnings.append("shm_access_failed")
    return False
示例#16
0
def import_font(filename, callback=None):
    try:
        infile = pycam.Utils.URIHandler(filename).open()
    except IOError as err_msg:
        log.error("CXFImporter: Failed to read file (%s): %s", filename,
                  err_msg)
        return None
    try:
        parsed_font = CXFParser(infile, callback=callback)
    except _CXFParseError as err_msg:
        log.warn("CFXImporter: Skipped font defintion file '%s'. Reason: %s.",
                 filename, err_msg)
        return None
    charset = Charset(**parsed_font.meta)
    for key, value in parsed_font.letters.iteritems():
        charset.add_character(key, value)
    log.info("CXFImporter: Imported CXF font from '%s': %d letters", filename,
             len(parsed_font.letters))
    infile.close()
    return charset
示例#17
0
 def get_barycenter(self):
     area = self.get_area()
     if not area:
         return None
     # see: http://stackoverflow.com/questions/2355931/foo/2360507
     # first: calculate cx and y
     cxy, cxz, cyx, cyz, czx, czy = (0, 0, 0, 0, 0, 0)
     for index in range(len(self._points)):
         p1 = self._points[index]
         p2 = self._points[(index + 1) % len(self._points)]
         cxy += (p1[0] + p2[0]) * (p1[0] * p2[1] - p1[1] * p2[0])
         cxz += (p1[0] + p2[0]) * (p1[0] * p2[2] - p1[2] * p2[0])
         cyx += (p1[1] + p2[1]) * (p1[0] * p2[1] - p1[1] * p2[0])
         cyz += (p1[1] + p2[1]) * (p1[1] * p2[2] - p1[2] * p2[1])
         czx += (p1[2] + p2[2]) * (p1[2] * p2[0] - p1[0] * p2[2])
         czy += (p1[2] + p2[2]) * (p1[1] * p2[2] - p1[2] * p2[1])
     if abs(self.maxz - self.minz) < epsilon:
         return (cxy / (6 * area), cyx / (6 * area), self.minz)
     elif abs(self.maxy - self.miny) < epsilon:
         return (cxz / (6 * area), self.miny, czx / (6 * area))
     elif abs(self.maxx - self.minx) < epsilon:
         return (self.minx, cyz / (6 * area), czy / (6 * area))
     else:
         # calculate area of xy projection
         poly_xy = self.get_plane_projection(Plane((0, 0, 0), (0, 0, 1)))
         poly_xz = self.get_plane_projection(Plane((0, 0, 0), (0, 1, 0)))
         poly_yz = self.get_plane_projection(Plane((0, 0, 0), (1, 0, 0)))
         if (poly_xy is None) or (poly_xz is None) or (poly_yz is None):
             log.warn("Invalid polygon projection for barycenter: %s",
                      str(self))
             return None
         area_xy = poly_xy.get_area()
         area_xz = poly_xz.get_area()
         area_yz = poly_yz.get_area()
         if 0 in (area_xy, area_xz, area_yz):
             log.info(
                 "Failed assumtion: zero-sized projected area - %s / %s / %s",
                 area_xy, area_xz, area_yz)
             return None
         if abs(cxy / area_xy - cxz / area_xz) > epsilon:
             log.info("Failed assumption: barycenter xy/xz - %s / %s",
                      cxy / area_xy, cxz / area_xz)
         if abs(cyx / area_xy - cyz / area_yz) > epsilon:
             log.info("Failed assumption: barycenter yx/yz - %s / %s",
                      cyx / area_xy, cyz / area_yz)
         if abs(czx / area_xz - czy / area_yz) > epsilon:
             log.info("Failed assumption: barycenter zx/zy - %s / %s",
                      czx / area_xz, cyz / area_yz)
         return (cxy / (6 * area_xy), cyx / (6 * area_xy),
                 czx / (6 * area_xz))
示例#18
0
def is_multiprocessing_available():
    if (pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS) and \
            hasattr(sys, "frozen") and sys.frozen:
        return False
    try:
        import multiprocessing
        # try to initialize a semaphore - this can trigger shm access failures
        # (e.g. on Debian Lenny with Python 2.6.6)
        multiprocessing.Semaphore()
        return True
    except ImportError:
        if not "missing_module" in __issued_warnings:
            log.info("Python's multiprocessing module is missing: " + \
                    "disabling parallel processing")
            __issued_warnings.append("missing_module")
    except OSError:
        if not "shm_access_failed" in __issued_warnings:
            log.info("Python's multiprocessing module failed to acquire " + \
                    "read/write access to shared memory (shm) - disabling " + \
                    "parallel processing")
            __issued_warnings.append("shm_access_failed")
    return False
示例#19
0
 def load_preferences(self):
     """ load all settings (see Preferences window) from a file in the user's home directory """
     config = ConfigParser()
     try:
         with pycam.Gui.Settings.open_preferences_file() as in_file:
             config.read_file(in_file)
     except FileNotFoundError as exc:
         log.info(
             "No preferences file found (%s). Starting with default preferences.",
             exc)
     except OSError as exc:
         log.error("Failed to read preferences: %s", exc)
         return
     # report any ignored (obsolete) preference keys present in the file
     for item, value in config.items("DEFAULT"):
         if item not in PREFERENCES_DEFAULTS.keys():
             log.warn("Skipping obsolete preference item: %s", str(item))
     for item in PREFERENCES_DEFAULTS:
         if not config.has_option("DEFAULT", item):
             # a new preference setting is missing in the (old) file
             continue
         value_json = config.get("DEFAULT", item)
         try:
             value = json.loads(value_json)
         except ValueError as exc:
             log.warning("Failed to parse configuration setting '%s': %s",
                         item, exc)
             value = PREFERENCES_DEFAULTS[item]
         wanted_type = type(PREFERENCES_DEFAULTS[item])
         if wanted_type is float:
             # int is accepted for floats, too
             wanted_type = (float, int)
         if not isinstance(value, wanted_type):
             log.warning(
                 "Falling back to default configuration setting for '%s' due to "
                 "an invalid value type being parsed: %s != %s", item,
                 type(value), wanted_type)
             value = PREFERENCES_DEFAULTS[item]
         self.settings.set(item, value)
示例#20
0
 def save_task_settings_file(self, widget=None, filename=None):
     if callable(filename):
         filename = filename()
     if not isinstance(filename, (basestring, pycam.Utils.URIHandler)):
         # we open a dialog
         filename = self.settings.get("get_filename_func")("Save settings to ...",
                 mode_load=False, type_filter=FILTER_CONFIG,
                 filename_templates=(self.last_task_settings_uri, self.last_model_uri))
         if filename:
             self.last_task_settings_uri = pycam.Utils.URIHandler(filename)
     # no filename given -> exit
     if not filename:
         return
     settings = self.settings.dump_state()
     try:
         out_file = open(filename, "w")
         out_file.write(settings)
         out_file.close()
         log.info("Task settings written to %s" % filename)
         self.add_to_recent_file_list(filename)
     except IOError:
         log.error("Failed to save settings file")
示例#21
0
def retrieve_cached_download(storage_filename, download_url):
    """ retrieve the full filename of a locally cached download

    @throws OSError in case of any problems (download or data storage)
    @returns absolute filename
    """
    # this may raise an OSError
    cache_dir = get_cache_directory()
    full_filename = os.path.join(cache_dir, storage_filename)
    if os.path.exists(full_filename):
        log.debug("Use cached file (%s) instead of downloading '%s'", full_filename, download_url)
    else:
        log.info("Downloading '%s' to '%s'", download_url, full_filename)
        # download the file
        temporary_filename = full_filename + ".part"
        # remove the file if it was left there in a previous attempt
        try:
            os.remove(temporary_filename)
        except OSError:
            pass
        # this may raise an HTTP-related error (inherited from OSError)
        urllib.request.urlretrieve(download_url, temporary_filename)
        os.rename(temporary_filename, full_filename)
    return full_filename
示例#22
0
                        raise _CXFParseError("Failed to read item coordinates" \
                                + " in line %d" % feeder.get_index())
                self.letters[character] = char_definition
            else:
                # unknown line format
                raise _CXFParseError("Failed to parse unknown content in " \
                        + "line %d" % feeder.get_index())


def import_font(filename, callback=None):
    try:
        infile = pycam.Utils.URIHandler(filename).open()
    except IOError, err_msg:
        log.error("CXFImporter: Failed to read file (%s): %s" \
                % (filename, err_msg))
        return None
    try:
        parsed_font = CXFParser(infile, callback=callback)
    except _CXFParseError, err_msg:
        log.warn("CFXImporter: Skipped font defintion file '%s'. Reason: %s." \
                % (filename, err_msg))
        return None
    charset = Charset(**parsed_font.meta)
    for key, value in parsed_font.letters.iteritems():
        charset.add_character(key, value)
    log.info("CXFImporter: Imported CXF font from '%s': %d letters" \
            % (filename, len(parsed_font.letters)))
    infile.close()
    return charset

示例#23
0
    if callback and callback():
        log.warn("DXFImporter: load model operation was cancelled")
        return None

    # 3D models are preferred over 2D models
    if triangles:
        if lines:
            log.warn("DXFImporter: Ignoring 2D elements in DXF file: " + \
                    "%d lines" % len(lines))
        model = pycam.Geometry.Model.Model()
        for index, triangle in enumerate(triangles):
            model.append(triangle)
            # keep the GUI smooth
            if callback and (index % 50 == 0):
                callback()
        log.info("DXFImporter: Imported DXF model (3D): %d triangles" % \
                len(model.triangles()))
        return model
    elif lines:
        model = pycam.Geometry.Model.ContourModel()
        for index, line in enumerate(lines):
            model.append(line)
            # keep the GUI smooth
            if callback and (index % 50 == 0):
                callback()
        # z scaling is always targeted at the 0..1 range
        if color_as_height and (model.minz != model.maxz):
            # scale z to 1
            scale_z = 1.0 / (model.maxz - model.minz)
            if callback:
                callback(text="Scaling height for multi-layered 2D model")
            log.info("DXFImporter: scaling height for multi-layered 2D model")
示例#24
0
def execute(parser, opts, args, pycam):
    # try to change the process name
    pycam.Utils.setproctitle("pycam")

    if len(args) > 0:
        inputfile = pycam.Utils.URIHandler(args[0])
    else:
        inputfile = None

    if opts.debug:
        log.setLevel(logging.DEBUG)
    elif opts.quiet:
        log.setLevel(logging.WARNING)
        # disable the progress bar
        opts.progress = "none"
        # silence all warnings
        warnings.filterwarnings("ignore")
    else:
        # silence gtk warnings
        try:
            import gtk
            warnings.filterwarnings("ignore", category=gtk.Warning)
        except ImportError:
            pass

    # show version and exit
    if opts.show_version:
        if opts.quiet:
            # print only the bare version number
            print VERSION
        else:
            text = '''PyCAM %s
Copyright (C) 2008-2010 Lode Leroy
Copyright (C) 2010-2011 Lars Kruse

License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
This is free software: you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law.''' % VERSION
            print text
        return EXIT_CODES["ok"]

    if not opts.disable_psyco:
        try:
            import psyco
            psyco.full()
            log.info("Psyco enabled")
        except ImportError:
            log.info("Psyco is not available (performance will probably " \
                    + "suffer slightly)")
    else:
        log.info("Psyco was disabled via the commandline")

    # check if server-auth-key is given -> this is mandatory for server mode
    if (opts.enable_server or opts.start_server) and not opts.server_authkey:
        parser.error("You need to supply a shared secret for server mode. " \
                + "This is supposed to prevent you from exposing your host " \
                + "to remote access without authentication.\n" \
                + "Please add the '--server-auth-key' argument followed by " \
                + "a shared secret password.")
        return EXIT_CODES["server_without_password"]

    # initialize multiprocessing
    try:
        if opts.start_server:
            pycam.Utils.threading.init_threading(opts.parallel_processes,
                    remote=opts.remote_server, run_server=True,
                    server_credentials=opts.server_authkey)
            pycam.Utils.threading.cleanup()
            return EXIT_CODES["ok"]
        else:
            pycam.Utils.threading.init_threading(opts.parallel_processes,
                    enable_server=opts.enable_server, remote=opts.remote_server,
                    server_credentials=opts.server_authkey)
    except socket.error, err_msg:
        log.error("Failed to connect to remote server: %s" % err_msg)
        return EXIT_CODES["connection_error"]
示例#25
0
文件: Model.py 项目: zancas/pycam
 def append(self, item, unify_overlaps=False, allow_reverse=False):
     super(ContourModel, self).append(item)
     if isinstance(item, Line):
         item_list = [item]
         if allow_reverse:
             item_list.append(Line(item.p2, item.p1))
         found = False
         # Going back from the end to start. The last line_group always has
         # the highest chance of being suitable for the next line.
         line_group_indexes = range(len(self._line_groups) - 1, -1, -1)
         for line_group_index in line_group_indexes:
             line_group = self._line_groups[line_group_index]
             for candidate in item_list:
                 if line_group.is_connectable(candidate):
                     line_group.append(candidate)
                     self._merge_polygon_if_possible(
                         line_group, allow_reverse=allow_reverse)
                     found = True
                     break
             if found:
                 break
         else:
             # add a single line as part of a new group
             new_line_group = Polygon(plane=self._plane)
             new_line_group.append(item)
             self._line_groups.append(new_line_group)
     elif isinstance(item, Polygon):
         if not unify_overlaps or (len(self._line_groups) == 0):
             self._line_groups.append(item)
             for subitem in next(item):
                 self._update_limits(subitem)
         else:
             # go through all polygons and check if they can be combined
             is_outer = item.is_outer()
             new_queue = [item]
             processed_polygons = []
             queue = self.get_polygons()
             while len(queue) > 0:
                 polygon = queue.pop()
                 if polygon.is_outer() != is_outer:
                     processed_polygons.append(polygon)
                 else:
                     processed = []
                     while len(new_queue) > 0:
                         new = new_queue.pop()
                         if new.is_polygon_inside(polygon):
                             # "polygon" is obsoleted by "new"
                             processed.extend(new_queue)
                             break
                         elif polygon.is_polygon_inside(new):
                             # "new" is obsoleted by "polygon"
                             continue
                         elif not new.is_overlap(polygon):
                             processed.append(new)
                             continue
                         else:
                             union = polygon.union(new)
                             if union:
                                 for p in union:
                                     if p.is_outer() == is_outer:
                                         new_queue.append(p)
                                     else:
                                         processed_polygons.append(p)
                             else:
                                 processed.append(new)
                             break
                     else:
                         processed_polygons.append(polygon)
                     new_queue = processed
             while len(self._line_groups) > 0:
                 self._line_groups.pop()
             log.info("Processed polygons: %s",
                      [len(p.get_lines()) for p in processed_polygons])
             log.info("New queue: %s",
                      [len(p.get_lines()) for p in new_queue])
             for processed_polygon in processed_polygons + new_queue:
                 self._line_groups.append(processed_polygon)
             # TODO: this is quite expensive - can we do it differently?
             self.reset_cache()
     else:
         # ignore any non-supported items (they are probably handled by a
         # parent class)
         pass
示例#26
0
     offset = (0, 0, 0)
 elif opts.boundary_mode == "inside":
     offset = (-0.5 * opts.tool_diameter, -0.5 * opts.tool_diameter, 0)
 else:
     # "outside"
     offset = (0.5 * opts.tool_diameter, 0.5 * opts.tool_diameter, 0)
 process_bounds = Bounds(Bounds.TYPE_FIXED_MARGIN, offset, offset)
 process_bounds.set_reference(bounds)
 tps.set_bounds(process_bounds)
 if opts.export_gcode:
     # generate the toolpath
     start_time = time.time()
     toolpath = pycam.Toolpath.Generator.generate_toolpath_from_settings(
         model, tps, callback=progress_bar.update)
     progress_bar.finish()
     log.info("Toolpath generation time: %f" \
             % (time.time() - start_time))
     # write result
     if isinstance(toolpath, basestring):
         # an error occoured
         log.error(toolpath)
     else:
         description = "Toolpath generated via PyCAM v%s" % VERSION
         tp_obj = Toolpath(toolpath, description, tps)
         handler, closer = get_output_handler(opts.export_gcode)
         if handler is None:
             return EXIT_CODES["write_output_failed"]
         generator = pycam.Exporters.GCodeExporter.GCodeGenerator(
             handler,
             metric_units=(opts.unit_size == "mm"),
             safety_height=opts.safety_height,
             toggle_spindle_status=opts.gcode_no_start_stop_spindle,
示例#27
0
 def __getitem__(self, key):
     try:
         return self.__getitem_orig(key)[self.GET_INDEX]()
     except TypeError, err_msg:
         log.info("Failed to retrieve setting '%s': %s" % (key, err_msg))
         return None
示例#28
0
 def unregister_namespace(self, name):
     if name not in self.namespace:
         log.info("Tried to unregister an unknown name from namespace: %s",
                  name)
示例#29
0
 def unregister_namespace(self, name):
     if not name in self.namespace:
         log.info("Tried to unregister an unknown name from namespace: " + \
                   str(name))
示例#30
0
                    t = Triangle(p1, p2, p3, n)
                else:
                    # The three points are in a line - or two points are
                    # identical. Usually this is caused by points, that are too
                    # close together. Check the tolerance value in
                    # pycam/Geometry/PointKdtree.py.
                    log.warn("Skipping invalid triangle: %s / %s / %s " \
                            % (p1, p2, p3) + "(maybe the resolution of the " \
                            + "model is too high?)")
                    n, p1, p2, p3 = (None, None, None, None)
                    continue
                n, p1, p2, p3 = (None, None, None, None)
                model.append(t)
                continue
            m = endsolid.match(line)
            if m:
                continue

    log.info("Imported STL model: %d vertices, %d edges, %d triangles" \
            % (vertices, edges, len(model.triangles())))
    vertices = 0
    edges = 0
    kdtree = None

    if not model:
        # no valid items added to the model
        return None
    else:
        return model
    
示例#31
0
        except EOFError:
            __manager = None
            return "Failed to bind to socket for unknown reasons"
        # create the spawning process
        __closing = __manager.Value("b", False)
        if __num_of_processes > 0:
            # only start the spawner, if we want to use local workers
            spawner = __multiprocessing.Process(name="spawn",
                    target=_spawn_daemon, args=(__manager, __num_of_processes,
                    worker_uuid_list))
            spawner.start()
        else:
            spawner = None
        # wait forever - in case of a server
        if run_server:
            log.info("Running a local server and waiting for remote " + \
                    "connections.")
            # the server can be stopped via CTRL-C - it is caught later
            if not spawner is None:
                spawner.join()

def cleanup():
    global __multiprocessing, __manager, __closing
    if __multiprocessing and __closing:
        log.debug("Shutting down process handler")
        try:
            __closing.set(True)
        except (IOError, EOFError):
            log.debug("Connection to manager lost during cleanup")
        # Only managers that were started via ".start()" implement a "shutdown".
        # Managers started via ".connect" may skip this.
        if hasattr(__manager, "shutdown"):
示例#32
0
 def register_namespace(self, name, value):
     if name in self.namespace:
         log.info("Trying to register the same key in namespace twice: " + \
                 str(name))
     self.namespace[name] = value
示例#33
0
def init_threading(number_of_processes=None, enable_server=False, remote=None,
        run_server=False, server_credentials="", local_port=DEFAULT_PORT):
    global __multiprocessing, __num_of_processes, __manager, __closing, \
            __task_source_uuid
    if __multiprocessing:
        # kill the manager and clean everything up for a re-initialization
        cleanup()
    if (not is_server_mode_available()) and (enable_server or run_server):
        # server mode is disabled for the Windows pyinstaller standalone
        # due to "pickle errors". How to reproduce: run the standalone binary
        # with "--enable-server --server-auth-key foo".
        feature_matrix_text = "Take a look at the wiki for a matrix of " + \
                "platforms and available features: " + \
                "http://sf.net/apps/mediawiki/pycam/?title=" + \
                "Parallel_Processing_on_different_Platforms"
        if enable_server:
            log.warn("Unable to enable server mode with your current " + \
                    "setup.\n" + feature_matrix_text)
        elif run_server:
            log.warn("Unable to run in server-only mode with the Windows " + \
                    "standalone executable.\n" + feature_matrix_text)
        else:
            # no further warnings required
            pass
        enable_server = False
        run_server = False
    # only local -> no server settings allowed
    if (not enable_server) and (not run_server):
        remote = None
        run_server = None
        server_credentials = ""
    try:
        import multiprocessing
        mp_is_available = True
    except ImportError:
        mp_is_available = False
    if not mp_is_available:
        __multiprocessing = False
        # Maybe a multiprocessing feature was explicitely requested?
        # Issue some warnings if necessary.
        multiprocessing_missing_text = "Failed to enable server mode due to " \
                + "a lack of 'multiprocessing' capabilities. Please use " \
                + "Python2.6 or install the 'python-multiprocessing' package."
        if enable_server:
            log.warn("Failed to enable server mode due to a lack of " \
                    + "'multiprocessing' capabilities. " \
                    + multiprocessing_missing_text)
        elif run_server:
            log.warn("Failed to run in server-only mode due to a lack of " \
                    + "'multiprocessing' capabilities. " \
                    + multiprocessing_missing_text)
        else:
            # no further warnings required
            pass
    else:
        if number_of_processes is None:
            # use defaults
            # don't enable threading for a single cpu
            if (multiprocessing.cpu_count() > 1) or remote or run_server or \
                    enable_server:
                __multiprocessing = multiprocessing
                __num_of_processes = multiprocessing.cpu_count()
            else:
                __multiprocessing = False
        elif (number_of_processes < 1) and (remote is None) and \
                (enable_server is None):
            # Zero processes are allowed if we use a remote server or offer a
            # server.
            __multiprocessing = False
        else:
            __multiprocessing = multiprocessing
            __num_of_processes = number_of_processes
    # initialize the manager
    if not __multiprocessing:
        __manager = None
        log.info("Disabled parallel processing")
    elif not enable_server and not run_server:
        __manager = None
        log.info("Enabled %d parallel local processes" % __num_of_processes)
    else:
        # with multiprocessing
        log.info("Enabled %d parallel local processes" % __num_of_processes)
        log.info("Allow remote processing")
        # initialize the uuid list for all workers
        worker_uuid_list = [str(uuid.uuid1())
                for index in range(__num_of_processes)]
        __task_source_uuid = str(uuid.uuid1())
        if remote is None:
            # try to guess an appropriate interface for binding
            if pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS:
                # Windows does not support a wildcard interface listener
                all_ips = pycam.Utils.get_all_ips()
                if all_ips:
                    address = (all_ips[0], local_port)
                    log.info("Binding to local interface with IP %s" % \
                            str(all_ips[0]))
                else:
                    return "Failed to find any local IP"
            else:
                # empty hostname -> wildcard interface
                # (this does not work with Windows - see above)
                address = ('', local_port)
        else:
            if ":" in remote:
                host, port = remote.split(":", 1)
                try:
                    port = int(port)
                except ValueError:
                    log.warning(("Invalid port specified: '%s' - using " + \
                            "default port (%d) instead") % \
                            (port, DEFAULT_PORT))
                    port = DEFAULT_PORT
            else:
                host = remote
                port = DEFAULT_PORT
            address = (host, port)
        if remote is None:
            tasks_queue = multiprocessing.Queue()
            results_queue = multiprocessing.Queue()
            statistics = ProcessStatistics()
            cache = ProcessDataCache()
            pending_tasks = PendingTasks()
            info = ManagerInfo(tasks_queue, results_queue, statistics, cache,
                    pending_tasks)
            TaskManager.register("tasks", callable=info.get_tasks_queue)
            TaskManager.register("results", callable=info.get_results_queue)
            TaskManager.register("statistics", callable=info.get_statistics)
            TaskManager.register("cache", callable=info.get_cache)
            TaskManager.register("pending_tasks",
                    callable=info.get_pending_tasks)
        else:
            TaskManager.register("tasks")
            TaskManager.register("results")
            TaskManager.register("statistics")
            TaskManager.register("cache")
            TaskManager.register("pending_tasks")
        __manager = TaskManager(address=address, authkey=server_credentials)
        # run the local server, connect to a remote one or begin serving
        try:
            if remote is None:
                __manager.start()
                log.info("Started a local server.")
            else:
                __manager.connect()
                log.info("Connected to a remote task server.")
        except (multiprocessing.AuthenticationError, socket.error), err_msg:
            __manager = None
            return err_msg
        except EOFError:
            __manager = None
            return "Failed to bind to socket for unknown reasons"
示例#34
0
def init_threading(number_of_processes=None,
                   enable_server=False,
                   remote=None,
                   run_server=False,
                   server_credentials="",
                   local_port=DEFAULT_PORT):
    global __multiprocessing, __num_of_processes, __manager, __closing, \
            __task_source_uuid
    if __multiprocessing:
        # kill the manager and clean everything up for a re-initialization
        cleanup()
    if (not is_server_mode_available()) and (enable_server or run_server):
        # server mode is disabled for the Windows pyinstaller standalone
        # due to "pickle errors". How to reproduce: run the standalone binary
        # with "--enable-server --server-auth-key foo".
        feature_matrix_text = "Take a look at the wiki for a matrix of " + \
                "platforms and available features: " + \
                "http://sf.net/apps/mediawiki/pycam/?title=" + \
                "Parallel_Processing_on_different_Platforms"
        if enable_server:
            log.warn("Unable to enable server mode with your current " + \
                    "setup.\n" + feature_matrix_text)
        elif run_server:
            log.warn("Unable to run in server-only mode with the Windows " + \
                    "standalone executable.\n" + feature_matrix_text)
        else:
            # no further warnings required
            pass
        enable_server = False
        run_server = False
    # only local -> no server settings allowed
    if (not enable_server) and (not run_server):
        remote = None
        run_server = None
        server_credentials = ""
    try:
        import multiprocessing
        mp_is_available = True
    except ImportError:
        mp_is_available = False
    if not mp_is_available:
        __multiprocessing = False
        # Maybe a multiprocessing feature was explicitely requested?
        # Issue some warnings if necessary.
        multiprocessing_missing_text = "Failed to enable server mode due to " \
                + "a lack of 'multiprocessing' capabilities. Please use " \
                + "Python2.6 or install the 'python-multiprocessing' package."
        if enable_server:
            log.warn("Failed to enable server mode due to a lack of " \
                    + "'multiprocessing' capabilities. " \
                    + multiprocessing_missing_text)
        elif run_server:
            log.warn("Failed to run in server-only mode due to a lack of " \
                    + "'multiprocessing' capabilities. " \
                    + multiprocessing_missing_text)
        else:
            # no further warnings required
            pass
    else:
        if number_of_processes is None:
            # use defaults
            # don't enable threading for a single cpu
            if (multiprocessing.cpu_count() > 1) or remote or run_server or \
                    enable_server:
                __multiprocessing = multiprocessing
                __num_of_processes = multiprocessing.cpu_count()
            else:
                __multiprocessing = False
        elif (number_of_processes < 1) and (remote is None) and \
                (enable_server is None):
            # Zero processes are allowed if we use a remote server or offer a
            # server.
            __multiprocessing = False
        else:
            __multiprocessing = multiprocessing
            __num_of_processes = number_of_processes
    # initialize the manager
    if not __multiprocessing:
        __manager = None
        log.info("Disabled parallel processing")
    elif not enable_server and not run_server:
        __manager = None
        log.info("Enabled %d parallel local processes" % __num_of_processes)
    else:
        # with multiprocessing
        log.info("Enabled %d parallel local processes" % __num_of_processes)
        log.info("Allow remote processing")
        # initialize the uuid list for all workers
        worker_uuid_list = [
            str(uuid.uuid1()) for index in range(__num_of_processes)
        ]
        __task_source_uuid = str(uuid.uuid1())
        if remote is None:
            # try to guess an appropriate interface for binding
            if pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS:
                # Windows does not support a wildcard interface listener
                all_ips = pycam.Utils.get_all_ips()
                if all_ips:
                    address = (all_ips[0], local_port)
                    log.info("Binding to local interface with IP %s" % \
                            str(all_ips[0]))
                else:
                    return "Failed to find any local IP"
            else:
                # empty hostname -> wildcard interface
                # (this does not work with Windows - see above)
                address = ('', local_port)
        else:
            if ":" in remote:
                host, port = remote.split(":", 1)
                try:
                    port = int(port)
                except ValueError:
                    log.warning(("Invalid port specified: '%s' - using " + \
                            "default port (%d) instead") % \
                            (port, DEFAULT_PORT))
                    port = DEFAULT_PORT
            else:
                host = remote
                port = DEFAULT_PORT
            address = (host, port)
        if remote is None:
            tasks_queue = multiprocessing.Queue()
            results_queue = multiprocessing.Queue()
            statistics = ProcessStatistics()
            cache = ProcessDataCache()
            pending_tasks = PendingTasks()
            info = ManagerInfo(tasks_queue, results_queue, statistics, cache,
                               pending_tasks)
            TaskManager.register("tasks", callable=info.get_tasks_queue)
            TaskManager.register("results", callable=info.get_results_queue)
            TaskManager.register("statistics", callable=info.get_statistics)
            TaskManager.register("cache", callable=info.get_cache)
            TaskManager.register("pending_tasks",
                                 callable=info.get_pending_tasks)
        else:
            TaskManager.register("tasks")
            TaskManager.register("results")
            TaskManager.register("statistics")
            TaskManager.register("cache")
            TaskManager.register("pending_tasks")
        __manager = TaskManager(address=address, authkey=server_credentials)
        # run the local server, connect to a remote one or begin serving
        try:
            if remote is None:
                __manager.start()
                log.info("Started a local server.")
            else:
                __manager.connect()
                log.info("Connected to a remote task server.")
        except (multiprocessing.AuthenticationError, socket.error), err_msg:
            __manager = None
            return err_msg
        except EOFError:
            __manager = None
            return "Failed to bind to socket for unknown reasons"
示例#35
0
 def revise_directions(self, callback=None):
     """ Go through all open polygons and try to merge them regardless of
     their direction. Afterwards all closed polygons are analyzed regarding
     their inside/outside relationships.
     Beware: never use this function if the direction of lines may not
     change.
     """
     number_of_initial_closed_polygons = len([poly
             for poly in self.get_polygons() if poly.is_closed])
     open_polygons = [poly for poly in self.get_polygons()
             if not poly.is_closed]
     if callback:
         progress_callback = pycam.Utils.ProgressCounter(
                 2 * number_of_initial_closed_polygons + len(open_polygons),
                 callback).increment
     else:
         progress_callback = None
     # try to connect all open polygons
     for poly in open_polygons:
         self._line_groups.remove(poly)
     poly_open_before = len(open_polygons)
     for poly in open_polygons:
         for line in poly.get_lines():
             self.append(line, allow_reverse=True)
         if progress_callback and progress_callback():
             return
     poly_open_after = len([poly for poly in self.get_polygons()
             if not poly.is_closed])
     if poly_open_before != poly_open_after:
         log.info("Reduced the number of open polygons from " + \
                 "%d down to %d" % (poly_open_before, poly_open_after))
     else:
         log.debug("No combineable open polygons found")
     # auto-detect directions of closed polygons: inside and outside
     finished = []
     remaining_polys = [poly for poly in self.get_polygons()
             if poly.is_closed]
     if progress_callback:
         # shift the counter back by the number of new closed polygons
         progress_callback(2 * (number_of_initial_closed_polygons - \
                 len(remaining_polys)))
     remaining_polys.sort(key=lambda poly: abs(poly.get_area()))
     while remaining_polys:
         # pick the largest polygon
         current = remaining_polys.pop()
         # start with the smallest finished polygon
         for comp, is_outer in finished:
             if comp.is_polygon_inside(current):
                 finished.insert(0, (current, not is_outer))
                 break
         else:
             # no enclosing polygon was found
             finished.insert(0, (current, True))
         if progress_callback and progress_callback():
             return
     # Adjust the directions of all polygons according to the result
     # of the previous analysis.
     change_counter = 0
     for polygon, is_outer in finished:
         if polygon.is_outer() != is_outer:
             polygon.reverse_direction()
             change_counter += 1
         if progress_callback and progress_callback():
             self.reset_cache()
             return
     log.info("The winding of %d polygon(s) was fixed." % change_counter)
     self.reset_cache()
示例#36
0
def import_model(filename,
                 color_as_height=False,
                 fonts_cache=None,
                 callback=None,
                 **kwargs):
    if hasattr(filename, "read"):
        infile = filename
    else:
        try:
            infile = pycam.Utils.URIHandler(filename).open()
        except IOError as exc:
            raise LoadFileError(
                "DXFImporter: Failed to read file ({}): {}".format(
                    filename, exc))

    result = DXFParser(infile,
                       color_as_height=color_as_height,
                       fonts_cache=fonts_cache,
                       callback=callback)

    model_data = result.get_model()
    lines = model_data["lines"]
    triangles = model_data["triangles"]

    if callback and callback():
        raise AbortOperationException(
            "DXFImporter: load model operation was cancelled")

    # 3D models are preferred over 2D models
    if triangles:
        if lines:
            log.warn("DXFImporter: Ignoring 2D elements in DXF file: %d lines",
                     len(lines))
        model = pycam.Geometry.Model.Model()
        for index, triangle in enumerate(triangles):
            model.append(triangle)
            # keep the GUI smooth
            if callback and (index % 50 == 0):
                callback()
        log.info("DXFImporter: Imported DXF model (3D): %d triangles",
                 len(model.triangles()))
        return model
    elif lines:
        model = pycam.Geometry.Model.ContourModel()
        for index, line in enumerate(lines):
            model.append(line)
            # keep the GUI smooth
            if callback and (index % 50 == 0):
                callback()
        # z scaling is always targeted at the 0..1 range
        if color_as_height and (model.minz != model.maxz):
            # scale z to 1
            scale_z = 1.0 / (model.maxz - model.minz)
            if callback:
                callback(text="Scaling height for multi-layered 2D model")
            log.info("DXFImporter: scaling height for multi-layered 2D model")
            model.scale(scale_x=1.0,
                        scale_y=1.0,
                        scale_z=scale_z,
                        callback=callback)
        # shift the model down to z=0
        if model.minz != 0:
            if callback:
                callback(text="Shifting 2D model down to to z=0")
            model.shift(0, 0, -model.minz, callback=callback)
        log.info(
            "DXFImporter: Imported DXF model (2D): %d lines / %d polygons",
            len(lines), len(model.get_polygons()))
        return model
    else:
        link = "http://pycam.sourceforge.net/supported-formats"
        raise LoadFileError(
            'DXFImporter: No supported elements found in DXF file!\n'
            '<a href="%s">Read PyCAM\'s modeling hints.</a>'.format(link))
示例#37
0
    if callback and callback():
        log.warn("DXFImporter: load model operation was cancelled")
        return None

    # 3D models are preferred over 2D models
    if triangles:
        if lines:
            log.warn("DXFImporter: Ignoring 2D elements in DXF file: " + \
                    "%d lines" % len(lines))
        model = pycam.Geometry.Model.Model()
        for index, triangle in enumerate(triangles):
            model.append(triangle)
            # keep the GUI smooth
            if callback and (index % 50 == 0):
                callback()
        log.info("DXFImporter: Imported DXF model (3D): %d triangles" % \
                len(model.triangles()))
        return model
    elif lines:
        model = pycam.Geometry.Model.ContourModel()
        for index, line in enumerate(lines):
            model.append(line)
            # keep the GUI smooth
            if callback and (index % 50 == 0):
                callback()
        # z scaling is always targeted at the 0..1 range
        if color_as_height and (model.minz != model.maxz):
            # scale z to 1
            scale_z = 1.0 / (model.maxz - model.minz)
            if callback:
                callback(text="Scaling height for multi-layered 2D model")
            log.info("DXFImporter: scaling height for multi-layered 2D model")
示例#38
0
     offset = (0, 0, 0)
 elif opts.boundary_mode == "inside":
     offset = (-0.5 * opts.tool_diameter, -0.5 * opts.tool_diameter, 0)
 else:
     # "outside"
     offset = (0.5 * opts.tool_diameter, 0.5 * opts.tool_diameter, 0)
 process_bounds = Bounds(Bounds.TYPE_FIXED_MARGIN, offset, offset)
 process_bounds.set_reference(bounds)
 tps.set_bounds(process_bounds)
 if opts.export_gcode:
     # generate the toolpath
     start_time = time.time()
     toolpath = pycam.Toolpath.Generator.generate_toolpath_from_settings(
             model, tps, callback=progress_bar.update)
     progress_bar.finish()
     log.info("Toolpath generation time: %f" \
             % (time.time() - start_time))
     # write result
     if isinstance(toolpath, basestring):
         # an error occoured
         log.error(toolpath)
     else:
         description = "Toolpath generated via PyCAM v%s" % VERSION
         tp_obj = Toolpath(toolpath, description, tps)
         handler, closer = get_output_handler(opts.export_gcode)
         if handler is None:
             return EXIT_CODES["write_output_failed"]
         generator = pycam.Exporters.GCodeExporter.GCodeGenerator(
                 handler, metric_units = (opts.unit_size == "mm"),
                 safety_height=opts.safety_height,
                 toggle_spindle_status=opts.gcode_no_start_stop_spindle,
                 minimum_steps=[opts.gcode_minimum_step])
示例#39
0
 def unregister_namespace(self, name):
     if not name in self.namespace:
         log.info("Tried to unregister an unknown name from namespace: " + \
                   str(name))
示例#40
0
 def register_namespace(self, name, value):
     if name in self.namespace:
         log.info("Trying to register the same key in namespace twice: %s",
                  name)
     self.namespace[name] = value
示例#41
0
def main_func():
    # The PyInstaller standalone executable requires this "freeze_support" call. Otherwise we will
    # see a warning regarding an invalid argument called "--multiprocessing-fork". This problem can
    # be triggered on single-core systems with these arguments:
    #    "--enable-server --server-auth-key foo".
    if hasattr(multiprocessing, "freeze_support"):
        multiprocessing.freeze_support()
    parser = OptionParser(
        prog="PyCAM",
        usage=(
            "usage: pycam [options]\n\n"
            "Start the PyCAM toolpath generator. Supplying one of the "
            "'--export-?' parameters will cause PyCAM to start in batch mode. "
            "Most parameters are useful only for batch mode."),
        epilog="PyCAM website: https://github.com/SebKuzminsky/pycam")
    group_general = parser.add_option_group("General options")
    # general options
    group_general.add_option(
        "",
        "--unit",
        dest="unit_size",
        default="mm",
        action="store",
        type="choice",
        choices=["mm", "inch"],
        help=
        "choose 'mm' or 'inch' for all numbers. By default 'mm' is assumed.")
    group_general.add_option(
        "",
        "--collision-engine",
        dest="collision_engine",
        default="triangles",
        action="store",
        type="choice",
        choices=["triangles"],
        help=
        ("choose a specific collision detection engine. The default is 'triangles'. "
         "Use 'help' to get a list of possible engines."))
    group_general.add_option(
        "",
        "--number-of-processes",
        dest="parallel_processes",
        default=None,
        type="int",
        action="store",
        help=
        ("override the default detection of multiple CPU cores. Parallel processing only "
         "works with Python 2.6 (or later) or with the additional 'multiprocessing' module."
         ))
    group_general.add_option(
        "",
        "--enable-server",
        dest="enable_server",
        default=False,
        action="store_true",
        help="enable a local server and (optionally) remote worker servers.")
    group_general.add_option(
        "",
        "--remote-server",
        dest="remote_server",
        default=None,
        action="store",
        type="string",
        help=
        ("Connect to a remote task server to distribute the processing load. "
         "The server is given as an IP or a hostname with an optional port (default: 1250) "
         "separated by a colon."))
    group_general.add_option(
        "",
        "--start-server-only",
        dest="start_server",
        default=False,
        action="store_true",
        help="Start only a local server for handling remote requests.")
    group_general.add_option(
        "",
        "--server-auth-key",
        dest="server_authkey",
        default="",
        action="store",
        type="string",
        help=
        ("Secret used for connecting to a remote server or for granting access to remote "
         "clients."))
    group_general.add_option("-q",
                             "--quiet",
                             dest="quiet",
                             default=False,
                             action="store_true",
                             help="output only warnings and errors.")
    group_general.add_option("-d",
                             "--debug",
                             dest="debug",
                             default=False,
                             action="store_true",
                             help="enable output of debug messages.")
    group_general.add_option("",
                             "--trace",
                             dest="trace",
                             default=False,
                             action="store_true",
                             help="enable more verbose debug messages.")
    group_general.add_option(
        "",
        "--progress",
        dest="progress",
        default="text",
        action="store",
        type="choice",
        choices=["none", "text", "bar", "dot"],
        help=
        ("specify the type of progress bar used in non-GUI mode. The following options are "
         "available: text, none, bar, dot."))
    group_general.add_option(
        "",
        "--profiling",
        dest="profile_destination",
        action="store",
        type="string",
        help="store profiling statistics in a file (only for debugging)")
    group_general.add_option(
        "-v",
        "--version",
        dest="show_version",
        default=False,
        action="store_true",
        help="output the current version of PyCAM and exit")
    (opts, args) = parser.parse_args()
    try:
        if opts.profile_destination:
            import cProfile
            exit_code = cProfile.run('execute(parser, opts, args, pycam)',
                                     opts.profile_destination)
        else:
            # We need to add the parameter "pycam" to avoid weeeeird namespace
            # issues. Any idea how to fix this?
            exit_code = execute(parser, opts, args, pycam)
    except KeyboardInterrupt:
        log.info("Quit requested")
        exit_code = None
    pycam.Utils.threading.cleanup()
    if exit_code is not None:
        sys.exit(exit_code)
    else:
        sys.exit(EXIT_CODES["ok"])
示例#42
0
 def stop(self):
     if self._is_running:
         log.debug("Stopping main loop")
         self._gtk.main_quit()
     else:
         log.info("Main loop was stopped before")
示例#43
0
    def adjust_bounds_to_absolute_limits(self,
                                         limits_low,
                                         limits_high,
                                         reference=None):
        """ change the current bounds settings according to some absolute values

        This does not change the type of this bounds instance (e.g. relative).
        @value limits_low: a tuple describing the new lower absolute boundary
        @type limits_low: (tuple|list) of float
        @value limits_high: a tuple describing the new lower absolute boundary
        @type limits_high: (tuple|list) of float
        @value reference: a reference object described by a tuple (or list) of
            three item. These three values describe only the lower boundary of
            this object (for the x, y and z axes). Each item must be a float
            value. This argument is ignored for the boundary type "TYPE_CUSTOM".
        @type reference: (tuple|list) of float
        """
        # use the default reference if none was given
        if reference is None:
            reference = self.reference
        # check if a reference is given (if necessary)
        if self.bounds_type \
                in (Bounds.TYPE_RELATIVE_MARGIN, Bounds.TYPE_FIXED_MARGIN):
            if reference is None:
                raise ValueError, "any non-custom boundary definition " \
                        + "requires an a reference object for caluclating " \
                        + "absolute limits"
            else:
                ref_low, ref_high = reference.get_absolute_limits()
        # calculate the new settings
        if self.bounds_type == Bounds.TYPE_RELATIVE_MARGIN:
            for index in range(3):
                dim_width = ref_high[index] - ref_low[index]
                if dim_width == 0:
                    # We always loose relative margins if the specific dimension
                    # is zero. There is no way to avoid this.
                    message = "Non-zero %s boundary lost during conversion " \
                            + "to relative margins due to zero size " \
                            + "dimension '%s'." % "xyz"[index]
                    # Display warning messages, if we can't reach the requested
                    # absolute dimension.
                    if ref_low[index] != limits_low[index]:
                        log.info(message % "lower")
                    if ref_high[index] != limits_high[index]:
                        log.info(message % "upper")
                    self.bounds_low[index] = 0
                    self.bounds_high[index] = 0
                else:
                    self.bounds_low[index] = \
                            (ref_low[index] - limits_low[index]) / dim_width
                    self.bounds_high[index] = \
                            (limits_high[index] - ref_high[index]) / dim_width
        elif self.bounds_type == Bounds.TYPE_FIXED_MARGIN:
            for index in range(3):
                self.bounds_low[index] = ref_low[index] - limits_low[index]
                self.bounds_high[index] = limits_high[index] - ref_high[index]
        elif self.bounds_type == Bounds.TYPE_CUSTOM:
            for index in range(3):
                self.bounds_low[index] = limits_low[index]
                self.bounds_high[index] = limits_high[index]
        else:
            # this should not happen
            raise NotImplementedError, "the function " \
                    + "'adjust_bounds_to_absolute_limits' is currently not " \
                    + "implemented for the bounds_type '%s'" \
                    % str(self.bounds_type)
示例#44
0
文件: Model.py 项目: zancas/pycam
 def revise_directions(self, callback=None):
     """ Go through all open polygons and try to merge them regardless of
     their direction. Afterwards all closed polygons are analyzed regarding
     their inside/outside relationships.
     Beware: never use this function if the direction of lines may not
     change.
     """
     number_of_initial_closed_polygons = len(
         [poly for poly in self.get_polygons() if poly.is_closed])
     open_polygons = [
         poly for poly in self.get_polygons() if not poly.is_closed
     ]
     if callback:
         progress_callback = pycam.Utils.ProgressCounter(
             2 * number_of_initial_closed_polygons + len(open_polygons),
             callback).increment
     else:
         progress_callback = None
     # try to connect all open polygons
     for poly in open_polygons:
         self._line_groups.remove(poly)
     poly_open_before = len(open_polygons)
     for poly in open_polygons:
         for line in poly.get_lines():
             self.append(line, allow_reverse=True)
         if progress_callback and progress_callback():
             return
     poly_open_after = len(
         [poly for poly in self.get_polygons() if not poly.is_closed])
     if poly_open_before != poly_open_after:
         log.info("Reduced the number of open polygons from %d down to %d",
                  poly_open_before, poly_open_after)
     else:
         log.debug("No combineable open polygons found")
     # auto-detect directions of closed polygons: inside and outside
     finished = []
     remaining_polys = [
         poly for poly in self.get_polygons() if poly.is_closed
     ]
     if progress_callback:
         # shift the counter back by the number of new closed polygons
         progress_callback(
             2 * (number_of_initial_closed_polygons - len(remaining_polys)))
     remaining_polys.sort(key=lambda poly: abs(poly.get_area()))
     while remaining_polys:
         # pick the largest polygon
         current = remaining_polys.pop()
         # start with the smallest finished polygon
         for comp, is_outer in finished:
             if comp.is_polygon_inside(current):
                 finished.insert(0, (current, not is_outer))
                 break
         else:
             # no enclosing polygon was found
             finished.insert(0, (current, True))
         if progress_callback and progress_callback():
             return
     # Adjust the directions of all polygons according to the result
     # of the previous analysis.
     change_counter = 0
     for polygon, is_outer in finished:
         if polygon.is_outer() != is_outer:
             polygon.reverse_direction()
             change_counter += 1
         if progress_callback and progress_callback():
             self.reset_cache()
             return
     log.info("The winding of %d polygon(s) was fixed.", change_counter)
     self.reset_cache()
示例#45
0
文件: Settings.py 项目: zancas/pycam
 def __getitem__(self, key):
     try:
         return self.__getitem_orig(key)[self.GET_INDEX]()
     except TypeError as err_msg:
         log.info("Failed to retrieve setting '%s': %s", key, err_msg)
         return None
示例#46
0
def execute(parser, opts, args, pycam):
    # try to change the process name
    pycam.Utils.setproctitle("pycam")

    if len(args) > 0:
        inputfile = pycam.Utils.URIHandler(args[0])
    else:
        inputfile = None

    if opts.debug:
        log.setLevel(logging.DEBUG)
    elif opts.quiet:
        log.setLevel(logging.WARNING)
        # disable the progress bar
        opts.progress = "none"
        # silence all warnings
        warnings.filterwarnings("ignore")
    else:
        # silence gtk warnings
        try:
            import gtk
            warnings.filterwarnings("ignore", category=gtk.Warning)
        except ImportError:
            pass

    # show version and exit
    if opts.show_version:
        if opts.quiet:
            # print only the bare version number
            print VERSION
        else:
            text = '''PyCAM %s
Copyright (C) 2008-2010 Lode Leroy
Copyright (C) 2010-2011 Lars Kruse

License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
This is free software: you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law.''' % VERSION
            print text
        return EXIT_CODES["ok"]

    if not opts.disable_psyco:
        try:
            import psyco
            psyco.full()
            log.info("Psyco enabled")
        except ImportError:
            log.info("Psyco is not available (performance will probably " \
                    + "suffer slightly)")
    else:
        log.info("Psyco was disabled via the commandline")

    # check if server-auth-key is given -> this is mandatory for server mode
    if (opts.enable_server or opts.start_server) and not opts.server_authkey:
        parser.error("You need to supply a shared secret for server mode. " \
                + "This is supposed to prevent you from exposing your host " \
                + "to remote access without authentication.\n" \
                + "Please add the '--server-auth-key' argument followed by " \
                + "a shared secret password.")
        return EXIT_CODES["server_without_password"]

    # initialize multiprocessing
    try:
        if opts.start_server:
            pycam.Utils.threading.init_threading(
                opts.parallel_processes,
                remote=opts.remote_server,
                run_server=True,
                server_credentials=opts.server_authkey)
            pycam.Utils.threading.cleanup()
            return EXIT_CODES["ok"]
        else:
            pycam.Utils.threading.init_threading(
                opts.parallel_processes,
                enable_server=opts.enable_server,
                remote=opts.remote_server,
                server_credentials=opts.server_authkey)
    except socket.error, err_msg:
        log.error("Failed to connect to remote server: %s" % err_msg)
        return EXIT_CODES["connection_error"]
示例#47
0
            return "Failed to bind to socket for unknown reasons"
        # create the spawning process
        __closing = __manager.Value("b", False)
        if __num_of_processes > 0:
            # only start the spawner, if we want to use local workers
            spawner = __multiprocessing.Process(name="spawn",
                                                target=_spawn_daemon,
                                                args=(__manager,
                                                      __num_of_processes,
                                                      worker_uuid_list))
            spawner.start()
        else:
            spawner = None
        # wait forever - in case of a server
        if run_server:
            log.info("Running a local server and waiting for remote " + \
                    "connections.")
            # the server can be stopped via CTRL-C - it is caught later
            if not spawner is None:
                spawner.join()


def cleanup():
    global __multiprocessing, __manager, __closing
    if __multiprocessing and __closing:
        log.debug("Shutting down process handler")
        try:
            __closing.set(True)
        except (IOError, EOFError):
            log.debug("Connection to manager lost during cleanup")
        # Only managers that were started via ".start()" implement a "shutdown".
        # Managers started via ".connect" may skip this.