示例#1
0
 def report_log_result(self):
     """Prints fails if their was or that they did not happen"""
     if self.has_fails:
         warning(f'During import next fails has happened:')
         print(self.fail_message)
     else:
         info(f'Import done with no fails')
示例#2
0
def make_tree_from_nodes(node_names, tree, down=True):
    """
    Create a partial update list from a sub-tree, node_names is a list of nodes that
    drives change for the tree
    """
    ng = tree
    nodes = ng.nodes
    if not node_names:
        warning("No nodes!")
        return make_update_list(ng)

    out_set = set(node_names)

    out_stack = collections.deque(node_names)
    current_node = out_stack.pop()

    # build downwards links, this should be cached perhaps
    node_links = make_dep_dict(ng, down)
    while current_node:
        for node in node_links[current_node]:
            if node not in out_set:
                out_set.add(node)
                out_stack.append(node)
        if out_stack:
            current_node = out_stack.pop()
        else:
            current_node = ''

    if len(out_set) == 1:
        return list(out_set)
    else:
        return make_update_list(ng, out_set)
示例#3
0
def make_tree_from_nodes(node_names, tree, down=True):
    """
    Create a partial update list from a sub-tree, node_names is a list of nodes that
    drives change for the tree
    """
    ng = tree
    nodes = ng.nodes
    if not node_names:
        warning("No nodes!")
        return make_update_list(ng)

    out_set = set(node_names)

    out_stack = collections.deque(node_names)
    current_node = out_stack.pop()

    # build downwards links, this should be cached perhaps
    node_links = make_dep_dict(ng, down)
    while current_node:
        for node in node_links[current_node]:
            if node not in out_set:
                out_set.add(node)
                out_stack.append(node)
        if out_stack:
            current_node = out_stack.pop()
        else:
            current_node = ''

    if len(out_set) == 1:
        return list(out_set)
    else:
        return make_update_list(ng, out_set)
示例#4
0
 def init_from_path(cls, path: str) -> JSONImporter:
     """It will decode json from given path and initialize importer"""
     if path.endswith('.zip'):
         structure = get_file_obj_from_zip(path)
         return cls(structure)
     elif path.endswith('.json'):
         with open(path) as fp:
             structure = json.load(fp)
             return cls(structure)
     else:
         warning(f'File should have .zip or .json extension, got ".{path.rsplit(".")[-1]}" instead')
示例#5
0
    def execute(self, context):
        ng = bpy.data.node_groups[self.id_tree]

        is_tree_exportable, msg = self.can_be_exported(ng)
        if not is_tree_exportable:
            self.report({'ERROR'}, msg)
            return {'CANCELLED'}

        destination_path = self.filepath
        if not destination_path.lower().endswith('.json'):
            destination_path += '.json'

        # future: should check if filepath is a folder or ends in \

        if self.selected_only:
            layout_dict = JSONExporter.get_nodes_structure(
                [node for node in ng.nodes if node.select])
        else:
            layout_dict = JSONExporter.get_tree_structure(ng)

        if not layout_dict:
            msg = 'no update list found - didn\'t export'
            self.report({"WARNING"}, msg)
            warning(msg)
            return {'CANCELLED'}

        json.dump(layout_dict,
                  open(destination_path, 'w'),
                  sort_keys=True,
                  indent=2)
        msg = 'exported to: ' + destination_path
        self.report({"INFO"}, msg)
        info(msg)

        if self.compress:
            comp_mode = zipfile.ZIP_DEFLATED

            # destination path = /a../b../c../somename.json
            base = basename(destination_path)  # somename.json
            basedir = dirname(destination_path)  # /a../b../c../

            # somename.zip
            final_archivename = base.replace('.json', '') + '.zip'

            # /a../b../c../somename.zip
            fullpath = os.path.join(basedir, final_archivename)

            with zipfile.ZipFile(fullpath, 'w',
                                 compression=comp_mode) as myzip:
                myzip.write(destination_path, arcname=base)
                info('wrote:', final_archivename)

        return {'FINISHED'}
示例#6
0
def SvForgetSocket(socket):
    """deletes socket data from cache"""
    global socket_data_cache
    if data_structure.DEBUG_MODE:
        if not socket.is_output:
            warning(
                f"{socket.node.name} forgetting input socket: {socket.name}")
    s_id = socket.socket_id
    s_ng = socket.id_data.tree_id
    try:
        socket_data_cache[s_ng].pop(s_id, None)
    except KeyError:
        debug("it was never there")
示例#7
0
def SvSetSocket(socket, out):
    """sets socket data for socket"""
    global socket_data_cache
    if data_structure.DEBUG_MODE:
        if not socket.is_output:
            warning("{} setting input socket: {}".format(socket.node.name, socket.name))
        if not socket.is_linked:
            warning("{} setting unconncted socket: {}".format(socket.node.name, socket.name))
    s_id = socket.socket_id
    s_ng = socket.id_data.name
    if s_ng not in socket_data_cache:
        socket_data_cache[s_ng] = {}
    socket_data_cache[s_ng][s_id] = out
示例#8
0
def SvSetSocket(socket, out):
    """sets socket data for socket"""
    global socket_data_cache
    if data_structure.DEBUG_MODE:
        if not socket.is_output:
            warning(f"{socket.node.name} setting input socket: {socket.name}")
        if not socket.is_linked:
            warning(
                f"{socket.node.name} setting unconncted socket: {socket.name}")
    s_id = socket.socket_id
    s_ng = socket.id_data.tree_id
    if s_ng not in socket_data_cache:
        socket_data_cache[s_ng] = {}
    socket_data_cache[s_ng][s_id] = out
示例#9
0
def make_dep_dict(node_tree, down=False):
    """
    Create a dependency dictionary for node group.
    """
    ng = node_tree

    deps = collections.defaultdict(set)

    # create wifi out dependencies, process if needed

    wifi_out_nodes = [(name, node.var_name) for name, node in ng.nodes.items()
                      if node.bl_idname == 'WifiOutNode' and node.outputs]
    if wifi_out_nodes:
        wifi_dict = {
            node.var_name: name
            for name, node in ng.nodes.items()
            if node.bl_idname == 'WifiInNode'
        }

    for i, link in enumerate(list(ng.links)):
        #  this proctects against a rare occurance where
        #  a link is considered valid without a to_socket
        #  or a from_socket. proctects against a blender crash
        #  see https://github.com/nortikin/sverchok/issues/493

        if not (link.to_socket and link.from_socket):
            ng.links.remove(link)
            raise ValueError("Invalid link found!, please report this file")
        # it seems to work even with invalid links, maybe beacuse sverchok update is indepentent from blender update
        # if not link.is_valid:
        # return collections.defaultdict(set)  # this happens more often than one might think
        if link.is_hidden:
            continue
        key, value = (link.from_node.name,
                      link.to_node.name) if down else (link.to_node.name,
                                                       link.from_node.name)
        deps[key].add(value)

    for name, var_name in wifi_out_nodes:
        other = wifi_dict.get(var_name)
        if not other:
            warning("Unsatisifed Wifi dependency: node, %s var,%s", name,
                    var_name)
            return collections.defaultdict(set)
        if down:
            deps[other].add(name)
        else:
            deps[name].add(other)

    return deps
示例#10
0
    def execute(self, context):
        if len(context.space_data.path) > 1:
            self.report({"WARNING"},
                        "Export is not supported inside node groups")
            return {'CANCELLED'}

        ng = bpy.data.node_groups[self.id_tree]

        destination_path = self.filepath
        if not destination_path.lower().endswith('.json'):
            destination_path += '.json'

        # future: should check if filepath is a folder or ends in \

        layout_dict = JSONExporter.get_tree_structure(ng, self.selected_only)

        if not layout_dict:
            msg = 'no update list found - didn\'t export'
            self.report({"WARNING"}, msg)
            warning(msg)
            return {'CANCELLED'}

        indent = None if self.compact else 2
        json.dump(layout_dict, open(destination_path, 'w'),
                  indent=indent)  # json_struct doesn't expect sort_keys = True
        msg = 'exported to: ' + destination_path
        self.report({"INFO"}, msg)
        info(msg)

        if self.compress:
            comp_mode = zipfile.ZIP_DEFLATED

            # destination path = /a../b../c../somename.json
            base = basename(destination_path)  # somename.json
            basedir = dirname(destination_path)  # /a../b../c../

            # somename.zip
            final_archivename = base.replace('.json', '') + '.zip'

            # /a../b../c../somename.zip
            fullpath = os.path.join(basedir, final_archivename)

            with zipfile.ZipFile(fullpath, 'w',
                                 compression=comp_mode) as myzip:
                myzip.write(destination_path, arcname=base)
                info('wrote:', final_archivename)

        return {'FINISHED'}
示例#11
0
 def update_objects_number(self):
     """
     Should be called each time after process method of the socket owner
     It will update number of objects to show in socket labels
     """
     try:
         if self.is_output:
             objects_info = SvGetSocketInfo(self)
             self.objects_number = int(objects_info) if objects_info else 0
         else:
             data = self.sv_get(deepcopy=False, default=[])
             self.objects_number = len(data) if data else 0
     except Exception as e:
         warning(
             f"Socket='{self.name}' of node='{self.node.name}' can't update number of objects on the label. "
             f"Cause is '{e}'")
         self.objects_number = 0
示例#12
0
def make_dep_dict(node_tree, down=False):
    """
    Create a dependency dictionary for node group.
    """
    ng = node_tree

    deps = collections.defaultdict(set)

    # create wifi out dependencies, process if needed

    wifi_out_nodes = [(name, node.var_name)
                  for name, node in ng.nodes.items()
                  if node.bl_idname == 'WifiOutNode' and node.outputs]
    if wifi_out_nodes:
        wifi_dict = {node.var_name: name
                     for name, node in ng.nodes.items()
                     if node.bl_idname == 'WifiInNode'}

    for i,link in enumerate(list(ng.links)):
        #  this proctects against a rare occurance where
        #  a link is considered valid without a to_socket
        #  or a from_socket. proctects against a blender crash
        #  see https://github.com/nortikin/sverchok/issues/493
        if not (link.to_socket and link.from_socket):
            ng.links.remove(link)
            raise ValueError("Invalid link found!, please report this file")
        if not link.is_valid:
            return collections.defaultdict(set)  # this happens more often than one might think
        if link.is_hidden:
            continue
        key, value = (link.from_node.name, link.to_node.name) if down else (link.to_node.name, link.from_node.name)
        deps[key].add(value)

    for name, var_name in wifi_out_nodes:
        other = wifi_dict.get(var_name)
        if not other:
            warning("Unsatisifed Wifi dependency: node, %s var,%s", name, var_name)
            return collections.defaultdict(set)
        if down:
            deps[other].add(name)
        else:
            deps[name].add(other)

    return deps
示例#13
0
    def execute(self, context):
        ng = bpy.data.node_groups[self.id_tree]

        destination_path = self.filepath
        if not destination_path.lower().endswith('.json'):
            destination_path += '.json'

        # future: should check if filepath is a folder or ends in \

        layout_dict = create_dict_of_tree(ng)
        if not layout_dict:
            msg = 'no update list found - didn\'t export'
            self.report({"WARNING"}, msg)
            warning(msg)
            return {'CANCELLED'}

        write_json(layout_dict, destination_path)
        msg = 'exported to: ' + destination_path
        self.report({"INFO"}, msg)
        info(msg)

        if self.compress:
            comp_mode = zipfile.ZIP_DEFLATED

            # destination path = /a../b../c../somename.json
            base = basename(destination_path)  # somename.json
            basedir = dirname(destination_path)  # /a../b../c../

            # somename.zip
            final_archivename = base.replace('.json', '') + '.zip'

            # /a../b../c../somename.zip
            fullpath = os.path.join(basedir, final_archivename)

            with zipfile.ZipFile(fullpath, 'w',
                                 compression=comp_mode) as myzip:
                myzip.write(destination_path, arcname=base)
                info('wrote:', final_archivename)

        return {'FINISHED'}
示例#14
0
    def execute(self, context):
        ng = bpy.data.node_groups[self.id_tree]

        destination_path = self.filepath
        if not destination_path.lower().endswith('.json'):
            destination_path += '.json'

        # future: should check if filepath is a folder or ends in \

        layout_dict = create_dict_of_tree(ng)
        if not layout_dict:
            msg = 'no update list found - didn\'t export'
            self.report({"WARNING"}, msg)
            warning(msg)
            return {'CANCELLED'}

        write_json(layout_dict, destination_path)
        msg = 'exported to: ' + destination_path
        self.report({"INFO"}, msg)
        info(msg)

        if self.compress:
            comp_mode = zipfile.ZIP_DEFLATED

            # destination path = /a../b../c../somename.json
            base = basename(destination_path)  # somename.json
            basedir = dirname(destination_path)  # /a../b../c../

            # somename.zip
            final_archivename = base.replace('.json', '') + '.zip'

            # /a../b../c../somename.zip
            fullpath = os.path.join(basedir, final_archivename)

            with zipfile.ZipFile(fullpath, 'w', compression=comp_mode) as myzip:
                myzip.write(destination_path, arcname=base)
                info('wrote:', final_archivename)

        return {'FINISHED'}
示例#15
0
    def execute(self, context):
        node = bpy.data.node_groups[self.treename].nodes[self.nodename]
        precision = node.precision
        subdivisions = node.curve_points_count
        if not bpy.context.selected_objects:
            warning('Pofiler: Select curve!')
            self.report({'INFO'}, 'Select CURVE first')
            return {'CANCELLED'}
        if not bpy.context.selected_objects[0].type == 'CURVE':
            warning('Pofiler: NOT a curve selected')
            self.report({'INFO'}, 'It is not a curve selected for profiler')
            return {'CANCELLED'}

        objs = bpy.context.selected_objects
        names = str([o.name for o in objs])[1:-2]

        # test for POLY or NURBS curve types, these are not yet supported
        spline_type = objs[0].data.splines[0].type
        if spline_type in {'POLY', 'NURBS'}:
            msg = 'Pofiler: does not support {0} curve type yet'.format(
                spline_type)
            warning(msg)
            self.report({'INFO'}, msg)
            return {'CANCELLED'}

        # collect paths
        op = []
        clos = []
        for obj in objs:
            for spl in obj.data.splines:
                op.append(spl.bezier_points)
                clos.append(spl.use_cyclic_u)

        # define path to text
        values = '# Here is autogenerated values, \n# Please, rename text to avoid data loose.\n'
        values += '# Objects are: \n# %a' % (names) + '.\n'
        values += '# Object origin should be at 0,0,0. \n'
        values += '# Property panel has precision %a \n# and curve subdivision %s.\n\n' % (
            precision, subdivisions)
        # also future output for viewer indices
        out_points = []
        out_names = []
        ss = 0
        for ob_points, clo in zip(op, clos):
            values += '# Spline %a\n' % (ss)
            ss += 1
            # handles preperation
            curves_left = [i.handle_left_type for i in ob_points]
            curves_right = ['v'] + [i.handle_right_type
                                    for i in ob_points][:-1]
            # first collect C,L values to compile them later per point
            types = ['FREE', 'ALIGNED', 'AUTO']
            curves = [
                'C ' if x in types or c in types else 'L '
                for x, c in zip(curves_left, curves_right)
            ]
            # line for if curve was before line or not
            line = False
            curve = False

            for i, c in zip(range(len(ob_points)), curves):
                co = ob_points[i].co
                if not i:
                    # initial value
                    values += '\n'
                    values += 'M '
                    co = ob_points[0].co[:]
                    values += self.stringadd(co,
                                             ob_points[0].select_control_point)
                    values += '\n'
                    out_points.append(co)
                    out_names.append(['M.0'])
                    # pass if first 'M' that was used already upper
                    continue

                elif c == 'C ':
                    values += '\n'
                    values += '#C.' + str(i) + '\n'
                    values += c
                    hr = ob_points[i - 1].handle_right[:]
                    hl = ob_points[i].handle_left[:]
                    # hr[0]hr[1]hl[0]hl[1]co[0]co[1] 20 0
                    values += self.stringadd(
                        hr, ob_points[i - 1].select_right_handle)
                    values += self.stringadd(hl,
                                             ob_points[i].select_left_handle)
                    values += self.stringadd(co,
                                             ob_points[i].select_control_point)
                    if curve:
                        values += '\n'
                    out_points.append(hr[:])
                    out_points.append(hl[:])
                    out_points.append(co[:])
                    #namecur = ['C.'+str(i)]
                    out_names.extend([['C.' + str(i) + 'h1'],
                                      ['C.' + str(i) + 'h2'],
                                      ['C.' + str(i) + 'k']])
                    line = False
                    curve = True

                elif c == 'L ' and not line:
                    if curve:
                        values += '\n'
                    values += '#L.' + str(i) + '...' + '\n'
                    values += c
                    values += self.stringadd(co,
                                             ob_points[i].select_control_point)
                    out_points.append(co[:])
                    out_names.append(['L.' + str(i)])
                    line = True
                    curve = False

                elif c == 'L ' and line:
                    values += self.stringadd(co,
                                             ob_points[i].select_control_point)
                    out_points.append(co[:])
                    out_names.append(['L.' + str(i)])

            if clo:
                if ob_points[0].handle_left_type in types or ob_points[
                        -1].handle_right_type in types:
                    line = False
                    values += '\n'
                    values += '#C.' + str(i + 1) + '\n'
                    values += 'C '
                    hr = ob_points[-1].handle_right[:]
                    hl = ob_points[0].handle_left[:]
                    # hr[0]hr[1]hl[0]hl[1]co[0]co[1] 20 0
                    values += self.stringadd(hr,
                                             ob_points[-1].select_right_handle)
                    values += self.stringadd(hl,
                                             ob_points[0].select_left_handle)
                    values += self.stringadd(ob_points[0].co,
                                             ob_points[0].select_control_point)
                    values += ' 0 '
                    values += '\n'
                    out_points.append(hr[:])
                    out_points.append(hl[:])
                    out_names.extend([['C.' + str(i + 1) + 'h1'],
                                      ['C.' + str(i + 1) + 'h2']])
                    # preserving overlapping
                    #out_points.append(ob_points[0].co[:])
                    #out_names.append(['C'])
                if not line:
                    # hacky way till be fixed x for curves not only for lines
                    values += '# hacky way till be fixed x\n# for curves not only for lines'
                    values += '\nL ' + self.stringadd(
                        ob_points[0].co, ob_points[0].select_control_point)
                    values += '\nx \n\n'
                else:
                    values += '\nx \n\n'

        if self.knotselected:
            values += '# expression (#+a) added because \n# you selected knots in curve'
        self.write_values(self.nodename, values)
        #print(values)
        node.filename = self.nodename
        #print([out_points], [out_names])
        # sharing data to node:
        return {'FINISHED'}