Exemple #1
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------

from __future__ import print_function
from __future__ import division
import numpy as np

from ngraph.transformers.base import UnsupportedTransformerException
from ngraph.transformers.passes.flexfusion import FlexFusion

try:
    from ngraph.flex import GPUFlexManager, GPUFlex
except ImportError:
    raise UnsupportedTransformerException("autoflex package not installed")

from ngraph.op_graph.op_graph import Op, Fill, RngOp, TensorSizeOp, AssignOp
from ngraph.op_graph.pooling import PoolingOp, BpropPoolOp
from ngraph.op_graph.convolution import ConvolutionOp, bprop_conv, update_conv
from ngraph.transformers.gputransform import GPUTransformer, GPUKernelGroup
from ngraph.transformers.gputransform import GPUDeviceTensor, GPUDeviceBufferStorage
from ngraph.transformers.gputransform import ElementWiseKernel
from ngraph.transformers.gpu.flex_conv import FlexConvFpropKernel, FlexConvBpropKernel, \
    FlexConvUpdateKernel
from ngraph.transformers.gpu.flex_pool import FlexPoolFpropKernel, FlexPoolBpropKernel
from ngraph.transformers.gpu.tensor_ops import FlexFillKernel, FlexRngFillKernel, FlexAssignKernel
from ngraph.transformers.passes.flexpass import FlexDtypePass, FlexPropagateEntryPass, \
    ClearTensorDescriptions
from ngraph.transformers.gpu.float_ew2 import CudaSourceFile, FlexScaleDescription, \
    FlexPtrDescription
Exemple #2
0
# ----------------------------------------------------------------------------

from builtins import range
import atexit
import sys
from six import itervalues
from weakref import WeakSet

from ngraph.transformers.base import UnsupportedTransformerException

try:
    import pycuda.driver as drv
    from pycuda.gpuarray import GPUArray
    from pycuda.curandom import MRG32k3aRandomNumberGenerator as rng_mrg
except ImportError:
    raise UnsupportedTransformerException("No GPU")

from ngraph.transformers.base import ComputationGraphTransformer, \
    DeviceBufferStorage, DeviceTensor, \
    PYCUDA_LOGIC_ERROR_CODE
from ngraph.op_graph.op_graph import Argmax, Argmin, Op, \
    Max, Min, OneHotOp, \
    Power, RngOp, Sum, TensorSizeOp, Fill, TensorDescription, \
    AbsoluteOp, Add, AssignOp, CosOp, Divide, FloorDivide, Mod, Equal, \
    ExpOp, Greater, GreaterEqual, Less, LessEqual, LogOp, Maximum, Minimum, \
    Multiply, NegativeOp, NotEqual, ReciprocalOp, SignOp, SinOp, SqrtOp, SquareOp, \
    Subtract, TanhOp, Prod, DotOp, TensorOp, SigmoidAtomicOp
from ngraph.op_graph.comm_nodes import GPUQueueSendOp, GPUQueueRecvOp, \
    GPUCudaScatterSendOp, GPUCudaScatterRecvOp, \
    GPUCudaGatherSendOp, GPUCudaGatherRecvOp, GPUCudaAllReduceOp
from ngraph.op_graph.convolution import ConvolutionOp, bprop_conv, update_conv, \