Пример #1
0
 def dispatch_func(func, *args, **kwargs):
     """The wrapped dispatch function"""
     tgt = _target.current_target()
     context = DispatchContext.current
     if context is None:
         raise RuntimeError("DispatchContext is not initialized")
     workload = func(*args, **kwargs)
     cfg = context.query(tgt, workload)
     return dispatch_dict[cfg.template_key](cfg, *args, **kwargs)
Пример #2
0
def _get_schedule(wkl, target=None):
    """ Get the platform specific schedule. """
    if target is None:
        target = _target.current_target()
    else:
        target = _target.Target(target)
    assert target in _CONV_SCHEDULE, "no schedule for such target: {}".format(
        target)
    return _CONV_SCHEDULE[target](wkl)
Пример #3
0
 def dispatch_func(func, *args, **kwargs):
     """The wrapped dispatch function"""
     tgt = _target.current_target()
     context = DispatchContext.current
     if context is None:
         raise RuntimeError("DispatchContext is not initialized")
     workload = func(*args, **kwargs)
     cfg = context.query(tgt, workload)
     if cfg.template_key:
         return dispatch_dict[cfg.template_key](cfg, *args, **kwargs)
     else:
         assert dispatch_dict, "No func registered for this dispatcher"
         for v in dispatch_dict.values():
             return v(cfg, *args, **kwargs)
Пример #4
0
 def dispatch_func(func, *args, **kwargs):
     """The wrapped dispatch function"""
     tgt = _target.current_target()
     workload = func(*args, **kwargs)
     cfg = DispatchContext.current.query(tgt, workload)
     if cfg.is_fallback and not cfg.template_key:
         # first try 'direct' template
         if 'direct' in dispatch_dict:
             return dispatch_dict['direct'](cfg, *args, **kwargs)
         # otherwise pick a random template
         for v in dispatch_dict.values():
             return v(cfg, *args, **kwargs)
     else:
         return dispatch_dict[cfg.template_key](cfg, *args, **kwargs)
Пример #5
0
 def dispatch_func(func, *args, **kwargs):
     """The wrapped dispatch function"""
     tgt = _target.current_target()
     workload = func(*args, **kwargs)
     cfg = DispatchContext.current.query(tgt, workload)
     if cfg.is_fallback and not cfg.template_key:
         # first try 'direct' template
         if 'direct' in dispatch_dict:
             return dispatch_dict['direct'](cfg, *args, **kwargs)
         # otherwise pick a random template
         for v in dispatch_dict.values():
             return v(cfg, *args, **kwargs)
     else:
         return dispatch_dict[cfg.template_key](cfg, *args, **kwargs)
Пример #6
0
 def dispatch_func(func, *args, **kwargs):
     #The wrapped dispatch function
     tgt = _target.current_target()
     workload = func(*args, **kwargs)
     cfg = DispatchContext.current.query(tgt, workload)
     if cfg.is_fallback and not cfg.template_key:
         # first try 'direct' template
         if 'direct' in dispatch_dict:
             if tgt.device_type == 14:
                 with open(Dp.DPUAutoTuningLibraryPath, 'r') as f:
                     result = f.read()
                     if len(result) > 10:
                         f.seek(0)
                         DPUAutoTuningData = json.load(f)
                         inputStr = transferIntoStr(workload)
                         if inputStr in DPUAutoTuningData:
                             cfg = DPUAutoTuningData[inputStr]
                         else:
                             cfg = None
                     else:
                         cfg = None
                 f.close()
                 if cfg == None:
                     #TODO:wjq
                     cfg = searchDPUOptSch(workload)
                     with open(Dp.DPUAutoTuningLibraryPath, 'r') as f:
                         result = f.read()
                         if len(result) > 10:
                             f.seek(0)
                             DPUAutoTuningData = json.load(f)
                             inputStr = transferIntoStr(workload)
                             if inputStr in DPUAutoTuningData:
                                 cfg = DPUAutoTuningData[inputStr]
                             else:
                                 f.close()
                                 raise ValueError("add cfg fail")
                         else:
                             f.close()
                             raise ValueError("add cfg fail")
                     f.close()
                 return dispatch_dict['direct'](cfg, *args, **kwargs)
             else:
                 return dispatch_dict['direct'](cfg, *args, **kwargs)
         # otherwise pick a random template
         for v in dispatch_dict.values():
             return v(cfg, *args, **kwargs)
     else:
         return dispatch_dict[cfg.template_key](cfg, *args, **kwargs)
Пример #7
0
def conv2d(data, kernel, stride, padding, layout='NCHW'):
    """Conv2D operator.

    Parameters
    ----------
    input : tvm.Tensor
        4-D with shape [batch, in_channel, in_height, in_width]

    filter : tvm.Tensor
        4-D with shape [num_filter, in_channel, filter_height, filter_width]

    stride : int or a list/tuple of two ints
        stride size, or [stride_height, stride_width]

    padding : int or a list/tuple of two ints
        padding size, or [pad_height, pad_width]

    layout : str
        layout of data

    Returns
    -------
    output : tvm.Tensor
        4-D with shape [batch, out_channel, out_height, out_width]
    """
    # search platform specific declaration first
    target = _target.current_target()
    if target in _CONV_DECLARATION:
        return _CONV_DECLARATION[target](data, kernel, stride, padding, layout)

    # default declaration
    if layout == 'NCHW':
        return conv2d_nchw(data, kernel, stride, padding)
    elif layout == 'HWCN':
        return conv2d_hwcn(data, kernel, stride, padding)
    else:
        raise ValueError("not support this layout {} yet".format(layout))