def pushSomething(lua, something): if isinstance(something, int): lua.pushNumber(something) return if isinstance(something, float): lua.pushNumber(something) return if isinstance(something, str): lua.pushString(something) return if isinstance(something, dict): pushTable(lua, something) return if isinstance(something, list): pushArray(lua, something) return for pythonClass in pushFunctionByPythonClass: if isinstance(something, pythonClass): pushFunctionByPythonClass[pythonClass](something) return if type(something) in luaClassesReverse: pushObject(lua, something) return typestring = str(type(something)) if typestring in ["<class 'numpy.ndarray'>", "<type 'numpy.ndarray'>"]: dtypestr = str(something.dtype) if dtypestr == 'float32': pushSomething(lua, PyTorch._asFloatTensor(something)) return if dtypestr == 'float64': pushSomething(lua, PyTorch._asDoubleTensor(something)) return if dtypestr == 'uint8': pushSomething(lua, PyTorch._asByteTensor(something)) return raise Exception('pushing numpy array with elements of type ' + dtypestr + ' it not currently implemented') raise Exception('pushing type ' + str(type(something)) + ' not implemented, value ', something)
def pushSomething(lua, something): if isinstance(something, int): lua.pushNumber(something) return if isinstance(something, float): lua.pushNumber(something) return if isinstance(something, str): lua.pushString(something) return if isinstance(something, dict): pushTable(lua, something) return if isinstance(something, (list, tuple)): pushTable(lua, OrderedDict(zip(range(1, len(something) + 1), something))) return for pythonClass in pushFunctionByPythonClass: if isinstance(something, pythonClass): pushFunctionByPythonClass[pythonClass](something) return if type(something) in luaClassesReverse: pushObject(lua, something) return typestring = str(type(something)) if typestring in ["<class 'numpy.ndarray'>", "<type 'numpy.ndarray'>"]: dtypestr = str(something.dtype) if dtypestr == 'float32': pushSomething(lua, PyTorch._asFloatTensor(something)) return if dtypestr == 'float64': pushSomething(lua, PyTorch._asDoubleTensor(something)) return if dtypestr == 'uint8': pushSomething(lua, PyTorch._asByteTensor(something)) return raise Exception('pushing numpy array with elements of type ' + dtypestr + ' it not currently implemented') raise Exception( 'pushing type ' + str(type(something)) + ' not implemented, value ', something)
def asByteTensor(myarray): f1 = PyTorch._asByteTensor(myarray) # print('type(f1)', type(f1)) return ByteTensor(f1)