Beispiel #1
0
def test_serialize_function_node():
    """Serialization also stored the location of the function."""
    node = function_for_testing()
    data = json.dumps(node.serialize())
    deserialized_node = INode.deserialize(json.loads(data))
    assert node.__doc__ == deserialized_node.__doc__
    assert node.name == deserialized_node.name
    assert node.inputs.keys() == deserialized_node.inputs.keys()
    assert node.outputs.keys() == deserialized_node.outputs.keys()
    assert node.evaluate() == deserialized_node.evaluate()
Beispiel #2
0
def test_deserialize_from_json():
    """De-serialize the node from json."""
    node1 = SquareNode('Node1ToSerialize')
    node2 = SquareNode('Node2ToSerialize')
    node1.inputs['in1'].value = 1
    node1.outputs['out'] >> node2.inputs['in1']

    serialized_data = node1.serialize()

    new_node1 = INode.deserialize(serialized_data)

    assert node1.name == new_node1.name
    assert node1.identifier == new_node1.identifier
    assert node1.inputs['in1'].value == new_node1.inputs['in1'].value
def evaluate_on_farm(serialized_json, frames=None):
    """Evaluate the node behind the given json file.

    1. Deserialize the node
    2. Collect any input values from any upstream dependencies
        For implicit batching, the given frames are assigned to the node,
        overriding whatever might be stored in the json file, becuase all
        batches share the same json file.
    3. Evaluate the node
    4. Serialize the node back into its original file
        For implicit farm conversion, the serialization only happens once,
        for the 'last' batch, knowing that the last batch in numbers might
        not be the 'last' batch actually executed.
    """
    # Debug logs might be useful on the farm
    flowpipe.logger.setLevel(logging.DEBUG)

    # Deserialize the node from the serialized json
    with open(serialized_json, 'r') as f:
        data = json.load(f)
    node = INode.deserialize(data)

    # Retrieve the upstream output data
    for name, input_plug in data['inputs'].items():
        for identifier, output_plug in input_plug['connections'].items():
            upstream_node = JsonDatabase.get(identifier)
            node.inputs[name].value = upstream_node.outputs[output_plug].value

    # Specifically assign the batch frames here if applicable
    if frames is not None:
        all_frames = node.inputs['frames']
        node.inputs['frames'] = frames

    # Actually evalute the node
    node.evaluate()

    # Store the result back into the same file ONLY once
    # ALL batch processes access the same json file so the result is only stored
    # for the last batch, knowing that the last batch in numbers might not be
    # the last batch actually executed
    if frames is not None and frames[-1] != all_frames[-1]:
        return

    with open(serialized_json, 'w') as f:
        json.dump(node.serialize(), f, indent=2)
Beispiel #4
0
def test_deserialize_from_json(mock_inspect):
    """De-serialize the node from json."""
    node1 = SquareNode('Node1')
    node2 = SquareNode('Node2')
    node1.inputs['in1'].value = 1
    node1.outputs['out'] >> node2.inputs['in1']
    node1.outputs['out'] >> node2.inputs['compound_in']['key']
    node1.outputs['out'] >> node2.inputs['compound_in']['1']

    deserialized_data = INode.deserialize(node1.serialize()).serialize()
    assert deserialized_data == {
        'inputs': {
            'compound_in': {
                'connections': {},
                'name': 'compound_in',
                'value': None,
                'sub_plugs': {}
            },
            'in1': {
                'connections': {},
                'name': 'in1',
                'value': 1,
                'sub_plugs': {}
            }
        },
        'name': 'Node1',
        'outputs': {
            'compound_out': {
                'connections': {},
                'name': 'compound_out',
                'value': None,
                'sub_plugs': {}
            },
            'out': {
                'connections': {},
                'name': 'out',
                'value': None,
                'sub_plugs': {}
            }
        },
        'metadata': {},
        'module': 'test_node',
        'file_location': '/path/to/node/implementation.py',
        'identifier': node1.identifier,
        'cls': 'SquareNode'
    }

    deserialized_data2 = INode.deserialize(node2.serialize()).serialize()
    assert deserialized_data2 == {
        'inputs': {
            'compound_in': {
                'connections': {},
                'name': 'compound_in',
                'value': None,
                'sub_plugs': {
                    '1': {
                        'connections': {},
                        'name': 'compound_in.1',
                        'value': None
                    },
                    'key': {
                        'connections': {},
                        'name': 'compound_in.key',
                        'value': None
                    }
                }
            },
            'in1': {
                'connections': {},
                'name': 'in1',
                'value': None,
                'sub_plugs': {}
            }
        },
        'name': 'Node2',
        'outputs': {
            'compound_out': {
                'connections': {},
                'name': 'compound_out',
                'value': None,
                'sub_plugs': {}
            },
            'out': {
                'connections': {},
                'name': 'out',
                'value': None,
                'sub_plugs': {}
            }
        },
        'metadata': {},
        'module': 'test_node',
        'file_location': '/path/to/node/implementation.py',
        'identifier': node2.identifier,
        'cls': 'SquareNode'
    }
 def get(identifier):
     """Retrieve the node behind the given identifier."""
     serialized_json = JsonDatabase.PATH.format(identifier=identifier)
     with open(serialized_json, 'r') as f:
         data = json.load(f)
     return INode.deserialize(data)