Exemplo n.º 1
0
 def get_related_objects(self, flatten=False):
     """
     Returns all objects related to the current.
     """
     collector = NestedObjects(using='default')
     collector.collect([self])
     if flatten:
         return list(utils.flatten(collector.nested()))
     return collector.nested()
Exemplo n.º 2
0
        def nan_check(i, node, fn):
            """
            Runs `fn` while checking its inputs and outputs for NaNs / Infs

            Parameters
            ----------
            i : currently ignored (TODO: determine why it is here or remove)
            node : theano.gof.Apply
                The Apply node currently being executed
            fn : callable
                The thunk to execute for this Apply node
            """
            inputs = fn.inputs
            # TODO: figure out why individual inputs are themselves lists sometimes
            for x in flatten(inputs):
                do_check_on(x, node, fn, True)

            fn()
            outputs = fn.outputs
            for j, x in enumerate(flatten(outputs)):
                do_check_on(x, node, fn, False)
Exemplo n.º 3
0
    def aggregate_data(self):
        """根据供应商的订货单分组
        1, 入仓的订货单根据预测单继续聚合分组;
        2, 未入仓订货单则单独统一分组; 
        """
        if hasattr(self, '_aggregate_data_'):
            return self._aggregate_data_

        aggregate_dict_list = []
        order_group_keyset = self.get_group_keyset()
        order_keylist = self.flatten_group_keyset(order_group_keyset)
        order_keyset  = set(order_keylist)

        supplier_ids = [order['supplier_id'] for order in self.aggregate_orders_dict.values()]
        supplier_values = SaleSupplier.objects.filter(id__in=supplier_ids).values(
            'id', 'supplier_name', 'supplier_code')
        supplier_dict_data = dict([(s['id'], s) for s in supplier_values])

        logger.info('aggregate key len: list=%s, set=%s'%(len(order_keylist), len(order_keyset)))
        forecast_inbounds = ForecastInbound.objects.filter(relate_order_set__in=order_keyset)\
            .exclude(status__in=(ForecastInbound.ST_CANCELED,ForecastInbound.ST_TIMEOUT))
        forecast_values = forecast_inbounds.values(
            'id', 'relate_order_set','supplier_id', 'express_code', 'express_no', 'forecast_arrive_time',
            'total_forecast_num', 'total_arrival_num', 'purchaser', 'status',
            'memo', 'has_lack', 'has_defact', 'has_overhead', 'has_wrong'
        )
        forecast_status_map = dict(ForecastInbound.STATUS_CHOICES)
        aggregate_forecast_dict = defaultdict(list)
        for value in forecast_values:
            value['status_name'] = forecast_status_map.get(value['status'])
            aggregate_forecast_dict[value['relate_order_set']].append(value)

        real_inbounds = RealInbound.objects.filter(relate_order_set__in=order_keyset)\
            .exclude(status=RealInbound.CANCELED)
        realinbound_values = real_inbounds.values(
            'id', 'relate_order_set','supplier_id', 'wave_no', 'ware_house', 'express_code', 'express_no',
            'creator', 'inspector', 'total_inbound_num', 'total_inferior_num', 'created', 'memo', 'status'
        )
        realinbound_status_map = dict(RealInbound.STATUS_CHOICES)
        aggregate_realinbound_dict = defaultdict(list)
        for value in realinbound_values:
            value['status_name'] = realinbound_status_map.get(value['status'])
            aggregate_realinbound_dict[value['relate_order_set']].append(value)

        # TODO value list出所有的预测单及到货单
        for group_key in order_group_keyset:
            aggregate_id_set = self.flatten_group_keyset([group_key])
            if not aggregate_id_set:
                continue
            aggregate_orders = []
            for order_id in aggregate_id_set:
                if order_id in self.aggregate_orders_dict:
                    order_dict = self.aggregate_orders_dict[order_id]
                else:
                    order_dict = get_purchaseorder_data(order_id)
                aggregate_orders.append(order_dict)

            is_unarrive_intime = False
            is_unrecord_logistic = False
            is_billingable = True
            is_arrivalexcept = False
            forecast_orders = flatten([aggregate_forecast_dict.get(key) for key in aggregate_id_set
                                       if aggregate_forecast_dict.has_key(key)])
            distinct_forecast_orders = dict([(fo['id'], fo) for fo in forecast_orders]).values()
            for forecast_data in distinct_forecast_orders:
                forecast_data['is_unarrive_intime']   = self.is_arrival_timeout(forecast_data)
                forecast_data['is_unrecord_logistic'] = self.is_unrecord_logistic(forecast_data)
                is_unarrive_intime |= forecast_data['is_unarrive_intime']
                is_unrecord_logistic |= forecast_data['is_unrecord_logistic']
                is_billingable &= not self.is_inthedelivery(forecast_data)
                is_arrivalexcept |= self.is_arrival_except(forecast_data)

            realinbound_orders = flatten([aggregate_realinbound_dict.get(key) for key in aggregate_id_set
                                          if aggregate_realinbound_dict.has_key(key)])
            distinct_realinbound_orders = dict([(fo['id'], fo) for fo in realinbound_orders]).values()

            aggregate_dict_list.append({
                'order_group_key': group_key,
                'purchase_orders': aggregate_orders,
                'forecast_inbounds': distinct_forecast_orders,
                'real_inbounds': distinct_realinbound_orders,
                'is_unarrive_intime': is_unarrive_intime,
                'is_unrecord_logistic': is_unrecord_logistic,
                'is_billingable': is_billingable,
                'is_arrivalexcept': is_arrivalexcept,
                'supplier': supplier_dict_data.get(aggregate_orders[0]['supplier_id'])
            })
        self._aggregate_data_ = aggregate_dict_list
        return self._aggregate_data_
Exemplo n.º 4
0
 def flatten_group_keyset(self, group_keyset):
     key_set = flatten([key.strip('-').split('-') for key in group_keyset if key.strip('-')])
     return [int(key) for key in key_set]
Exemplo n.º 5
0
BENCHMARKS: FlatIntermediateDefinition = flatten({
    "Empty": {
        "no allocation":
        GroupedStmts(
            r"torch.empty(())",
            r"torch::empty({0});",
        ),
        "with allocation":
        GroupedStmts(
            r"torch.empty((1,))",
            r"torch::empty({1});",
        ),
        "overloads":
        GroupedVariants(cpp_block=r"""
                // @Setup
                auto options_empty = c10::TensorOptions();
                auto options_full = c10::TensorOptions().dtype(at::kFloat).device(at::kCPU);
                auto optional_float = c10::make_optional(at::kFloat);

                // @TensorOptions overload
                at::empty({0}, options_empty);
                at::empty({0}, options_full);
                at::empty({0}, at::kFloat); // implicit conversion

                // @Faithful overload
                at::empty({0}, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
                at::empty({0}, at::kFloat, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
                at::empty({0}, optional_float, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
            """),
    },
    "Pointwise": {
        "Math": {
            "add": {
                "Tensor-Scalar":
                GroupedStmts(
                    r"x += 1.0",
                    r"x += 1.0;",
                    setup=Setup.GENERIC.value,
                ),
            },
        },
    },
    "Indexing":
    GroupedVariants(*parse_stmts(r"""
        Python                                   | C++
        ---------------------------------------- | ----------------------------------------
        # @setup                                 | // @setup
                                                 | using namespace torch::indexing;
        torch.manual_seed(6626_10_34)            | torch::manual_seed(66261034);
                                                 |
        x = torch.randn(1, 1, 1)                 | auto x = torch::randn({1, 1, 1});
        y = torch.randn(1, 1, 1)                 | auto y = torch::randn({1, 1, 1});
                                                 |
        # @Tensor-Scalar                         | // @Tensor-Scalar
        x[0] = 1                                 | x.index_put_({0}, 1);
        x[0, 0] = 1                              | x.index_put_({0, 0}, 1);
        x[0, 0, 0] = 1                           | x.index_put_({0, 0, 0}, 1);
                                                 |
        # @Tensor-Scalar (Advanced)              | // @Tensor-Scalar (Advanced)
        x[...] = 1                               | x.index_put_({"..."}, 1);
        x[:] = 1                                 | x.index_put_({Slice(None, None, None)}, 1);
        x[None] = 1                              | x.index_put_({None}, 1);
        x[False] = 1                             | x.index_put_({false}, 1);
        x[True] = 1                              | x.index_put_({true}, 1);
                                                 |
        # @Tensor-Tensor                         | // @Tensor-Tensor
        x[0] = y[0]                              | x.index_put_({0}, y.index({0}));
        x[0, 0] = y[0, 0]                        | x.index_put_({0, 0}, y.index({0, 0}));
        x[0, 0, 0] = y[0, 0, 0]                  | x.index_put_({0, 0, 0}, y.index({0, 0, 0}));
                                                 |
        # @Tensor-Tensor (Advanced)              | // @Tensor-Tensor (Advanced)
        x[...] = y[...]                          | x.index_put_({"..."}, y.index({"..."}));
        x[:] = y[:]                              | x.index_put_({Slice(None, None, None)}, y.index({Slice(None, None, None)}));
        x[None] = y[None]                        | x.index_put_({None}, y.index({None}));
        x[False] = y[False]                      | x.index_put_({false}, y.index({false}));
        x[True] = y[True]                        | x.index_put_({true}, y.index({true}));
    """)),
    "nn Modules": {
        "Linear":
        GroupedModules(
            "model = torch.nn.Linear(4, 2)",
            "auto model = torch::nn::Linear(4, 2);",
            setup=Setup.TRIVIAL_4D.value,
            signature="f(x) -> y",
            torchscript=True,
        ),
    },
    "training": {
        "simple":
        GroupedStmts(
            *parse_stmts(r"""
                Python                                   | C++
                ---------------------------------------- | ----------------------------------------
                a0 = torch.nn.functional.relu(x * w0)    | auto a0 = torch::nn::functional::relu(x * w0);
                y = a0 * w1                              | auto y = a0 * w1;
            """),
            Setup.TRAINING.value,
            num_threads=(1, 2),
            signature=r"f(x, w0, w1) -> y",
            torchscript=True,
            autograd=True,
        ),
        "ensemble":
        GroupedStmts(
            *parse_stmts(r"""
                Python                                   | C++
                ---------------------------------------- | ----------------------------------------
                a0 = torch.nn.functional.gelu(x * w0)    | auto a0 = torch::nn::functional::gelu(x * w0);
                a1 = torch.nn.functional.prelu(y, w1)    | auto a1 = torch::nn::functional::prelu(y, w1);
                z = torch.nn.functional.normalize(       | auto z = torch::nn::functional::normalize(
                    torch.cat([a0, a1]),                 |     torch::cat({a0, a1}),
                    p=2.0, dim=0,                        |     torch::nn::functional::NormalizeFuncOptions().p(2).dim(0)
                ).dot(w2)                                | ).dot(w2);
            """),
            Setup.TRAINING.value,
            num_threads=(1, 2),
            signature=r"f(x, y, w0, w1, w2) -> z",
            torchscript=True,
            autograd=True,
        ),
    },
})
Exemplo n.º 6
0
def process(section,
            dir_name,
            input_name=None,
            failed=False,
            client_agent='manual',
            download_id=None,
            input_category=None,
            failure_link=None):

    cfg = dict(core.CFG[section][input_category])

    host = cfg['host']
    port = cfg['port']
    ssl = int(cfg.get('ssl', 0))
    web_root = cfg.get('web_root', '')
    protocol = 'https://' if ssl else 'http://'
    username = cfg.get('username', '')
    password = cfg.get('password', '')
    apikey = cfg.get('apikey', '')

    if server_responding('{0}{1}:{2}{3}'.format(protocol, host, port,
                                                web_root)):
        # auto-detect correct fork
        fork, fork_params = auto_fork(section, input_category)
    elif not username and not apikey:
        logger.info(
            'No SickBeard username or Sonarr apikey entered. Performing transcoder functions only'
        )
        fork, fork_params = 'None', {}
    else:
        logger.error('Server did not respond. Exiting', section)
        return ProcessResult(
            status_code=1,
            message='{0}: Failed to post-process - {0} did not respond.'.
            format(section),
        )

    delete_failed = int(cfg.get('delete_failed', 0))
    nzb_extraction_by = cfg.get('nzbExtractionBy', 'Downloader')
    process_method = cfg.get('process_method')
    if client_agent == core.TORRENT_CLIENT_AGENT and core.USE_LINK == 'move-sym':
        process_method = 'symlink'
    remote_path = int(cfg.get('remote_path', 0))
    wait_for = int(cfg.get('wait_for', 2))
    force = int(cfg.get('force', 0))
    delete_on = int(cfg.get('delete_on', 0))
    ignore_subs = int(cfg.get('ignore_subs', 0))
    status = int(failed)
    if status > 0 and core.NOEXTRACTFAILED:
        extract = 0
    else:
        extract = int(cfg.get('extract', 0))
    # get importmode, default to 'Move' for consistency with legacy
    import_mode = cfg.get('importMode', 'Move')

    if not os.path.isdir(dir_name) and os.path.isfile(
            dir_name
    ):  # If the input directory is a file, assume single file download and split dir/name.
        dir_name = os.path.split(os.path.normpath(dir_name))[0]

    specific_path = os.path.join(dir_name, str(input_name))
    clean_name = os.path.splitext(specific_path)
    if clean_name[1] == '.nzb':
        specific_path = clean_name[0]
    if os.path.isdir(specific_path):
        dir_name = specific_path

    # Attempt to create the directory if it doesn't exist and ignore any
    # error stating that it already exists. This fixes a bug where SickRage
    # won't process the directory because it doesn't exist.
    if dir_name:
        try:
            os.makedirs(dir_name)  # Attempt to create the directory
        except OSError as e:
            # Re-raise the error if it wasn't about the directory not existing
            if e.errno != errno.EEXIST:
                raise

    if 'process_method' not in fork_params or (
            client_agent in ['nzbget', 'sabnzbd']
            and nzb_extraction_by != 'Destination'):
        if input_name:
            process_all_exceptions(input_name, dir_name)
            input_name, dir_name = convert_to_ascii(input_name, dir_name)

        # Now check if tv files exist in destination.
        if not list_media_files(
                dir_name, media=True, audio=False, meta=False, archives=False):
            if list_media_files(
                    dir_name, media=False, audio=False, meta=False,
                    archives=True) and extract:
                logger.debug(
                    'Checking for archives to extract in directory: {0}'.
                    format(dir_name))
                core.extract_files(dir_name)
                input_name, dir_name = convert_to_ascii(input_name, dir_name)

        if list_media_files(
                dir_name, media=True, audio=False, meta=False, archives=False
        ):  # Check that a video exists. if not, assume failed.
            flatten(dir_name)

    # Check video files for corruption
    good_files = 0
    num_files = 0
    for video in list_media_files(dir_name,
                                  media=True,
                                  audio=False,
                                  meta=False,
                                  archives=False):
        num_files += 1
        if transcoder.is_video_good(video, status):
            good_files += 1
            import_subs(video)
    if num_files > 0:
        if good_files == num_files and not status == 0:
            logger.info('Found Valid Videos. Setting status Success')
            status = 0
            failed = 0
        if good_files < num_files and status == 0:
            logger.info('Found corrupt videos. Setting status Failed')
            status = 1
            failed = 1
            if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][
                    0:5] >= '14.0':
                print('[NZB] MARK=BAD')
            if failure_link:
                failure_link += '&corrupt=true'
    elif client_agent == 'manual':
        logger.warning(
            'No media files found in directory {0} to manually process.'.
            format(dir_name), section)
        return ProcessResult(
            message='',
            status_code=0,  # Success (as far as this script is concerned)
        )
    elif nzb_extraction_by == 'Destination':
        logger.info(
            'Check for media files ignored because nzbExtractionBy is set to Destination.'
        )
        if int(failed) == 0:
            logger.info('Setting Status Success.')
            status = 0
            failed = 0
        else:
            logger.info(
                'Downloader reported an error during download or verification. Processing this as a failed download.'
            )
            status = 1
            failed = 1
    else:
        logger.warning(
            'No media files found in directory {0}. Processing this as a failed download'
            .format(dir_name), section)
        status = 1
        failed = 1
        if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][
                0:5] >= '14.0':
            print('[NZB] MARK=BAD')

    if status == 0 and core.TRANSCODE == 1:  # only transcode successful downloads
        result, new_dir_name = transcoder.transcode_directory(dir_name)
        if result == 0:
            logger.debug(
                'SUCCESS: Transcoding succeeded for files in {0}'.format(
                    dir_name), section)
            dir_name = new_dir_name

            chmod_directory = int(str(cfg.get('chmodDirectory', '0')), 8)
            logger.debug(
                'Config setting \'chmodDirectory\' currently set to {0}'.
                format(oct(chmod_directory)), section)
            if chmod_directory:
                logger.info(
                    'Attempting to set the octal permission of \'{0}\' on directory \'{1}\''
                    .format(oct(chmod_directory), dir_name), section)
                core.rchmod(dir_name, chmod_directory)
        else:
            logger.error(
                'FAILED: Transcoding failed for files in {0}'.format(dir_name),
                section)
            return ProcessResult(
                message='{0}: Failed to post-process - Transcoding failed'.
                format(section),
                status_code=1,
            )

    # configure SB params to pass
    fork_params['quiet'] = 1
    fork_params['proc_type'] = 'manual'
    if input_name is not None:
        fork_params['nzbName'] = input_name

    for param in copy.copy(fork_params):
        if param == 'failed':
            fork_params[param] = failed
            if 'proc_type' in fork_params:
                del fork_params['proc_type']
            if 'type' in fork_params:
                del fork_params['type']

        if param == 'return_data':
            fork_params[param] = 0
            if 'quiet' in fork_params:
                del fork_params['quiet']

        if param == 'type':
            fork_params[param] = 'manual'
            if 'proc_type' in fork_params:
                del fork_params['proc_type']

        if param in [
                'dir_name', 'dir', 'proc_dir', 'process_directory', 'path'
        ]:
            fork_params[param] = dir_name
            if remote_path:
                fork_params[param] = remote_dir(dir_name)

        if param == 'process_method':
            if process_method:
                fork_params[param] = process_method
            else:
                del fork_params[param]

        if param in ['force', 'force_replace']:
            if force:
                fork_params[param] = force
            else:
                del fork_params[param]

        if param in ['delete_on', 'delete']:
            if delete_on:
                fork_params[param] = delete_on
            else:
                del fork_params[param]

        if param == 'ignore_subs':
            if ignore_subs:
                fork_params[param] = ignore_subs
            else:
                del fork_params[param]

        if param == 'force_next':
            fork_params[param] = 1

    # delete any unused params so we don't pass them to SB by mistake
    [fork_params.pop(k) for k, v in list(fork_params.items()) if v is None]

    if status == 0:
        if section == 'NzbDrone' and not apikey:
            logger.info('No Sonarr apikey entered. Processing completed.')
            return ProcessResult(
                message='{0}: Successfully post-processed {1}'.format(
                    section, input_name),
                status_code=0,
            )
        logger.postprocess(
            'SUCCESS: The download succeeded, sending a post-process request',
            section)
    else:
        core.FAILED = True
        if failure_link:
            report_nzb(failure_link, client_agent)
        if 'failed' in fork_params:
            logger.postprocess(
                'FAILED: The download failed. Sending \'failed\' process request to {0} branch'
                .format(fork), section)
        elif section == 'NzbDrone':
            logger.postprocess(
                'FAILED: The download failed. Sending failed download to {0} for CDH processing'
                .format(fork), section)
            return ProcessResult(
                message='{0}: Download Failed. Sending back to {0}'.format(
                    section),
                status_code=
                1,  # Return as failed to flag this in the downloader.
            )
        else:
            logger.postprocess(
                'FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process'
                .format(fork), section)
            if delete_failed and os.path.isdir(
                    dir_name) and not os.path.dirname(dir_name) == dir_name:
                logger.postprocess(
                    'Deleting failed files and folder {0}'.format(dir_name),
                    section)
                remove_dir(dir_name)
            return ProcessResult(
                message=
                '{0}: Failed to post-process. {0} does not support failed downloads'
                .format(section),
                status_code=
                1,  # Return as failed to flag this in the downloader.
            )

    url = None
    if section == 'SickBeard':
        if apikey:
            url = '{0}{1}:{2}{3}/api/{4}/'.format(protocol, host, port,
                                                  web_root, apikey)
            if not 'cmd' in fork_params:
                if 'SickGear' in fork:
                    fork_params['cmd'] = 'sg.postprocess'
                else:
                    fork_params['cmd'] = 'postprocess'
        elif fork == 'Stheno':
            url = '{0}{1}:{2}{3}/home/postprocess/process_episode'.format(
                protocol, host, port, web_root)
        else:
            url = '{0}{1}:{2}{3}/home/postprocess/processEpisode'.format(
                protocol, host, port, web_root)
    elif section == 'NzbDrone':
        url = '{0}{1}:{2}{3}/api/command'.format(protocol, host, port,
                                                 web_root)
        url2 = '{0}{1}:{2}{3}/api/config/downloadClient'.format(
            protocol, host, port, web_root)
        headers = {'X-Api-Key': apikey}
        # params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
        if remote_path:
            logger.debug('remote_path: {0}'.format(remote_dir(dir_name)),
                         section)
            data = {
                'name': 'DownloadedEpisodesScan',
                'path': remote_dir(dir_name),
                'downloadClientId': download_id,
                'importMode': import_mode
            }
        else:
            logger.debug('path: {0}'.format(dir_name), section)
            data = {
                'name': 'DownloadedEpisodesScan',
                'path': dir_name,
                'downloadClientId': download_id,
                'importMode': import_mode
            }
        if not download_id:
            data.pop('downloadClientId')
        data = json.dumps(data)

    try:
        if section == 'SickBeard':
            logger.debug(
                'Opening URL: {0} with params: {1}'.format(url, fork_params),
                section)
            s = requests.Session()
            if not apikey and username and password:
                login = '******'.format(protocol, host, port,
                                                     web_root)
                login_params = {'username': username, 'password': password}
                r = s.get(login, verify=False, timeout=(30, 60))
                if r.status_code in [401, 403] and r.cookies.get('_xsrf'):
                    login_params['_xsrf'] = r.cookies.get('_xsrf')
                s.post(login,
                       data=login_params,
                       stream=True,
                       verify=False,
                       timeout=(30, 60))
            r = s.get(url,
                      auth=(username, password),
                      params=fork_params,
                      stream=True,
                      verify=False,
                      timeout=(30, 1800))
        elif section == 'NzbDrone':
            logger.debug('Opening URL: {0} with data: {1}'.format(url, data),
                         section)
            r = requests.post(url,
                              data=data,
                              headers=headers,
                              stream=True,
                              verify=False,
                              timeout=(30, 1800))
    except requests.ConnectionError:
        logger.error('Unable to open URL: {0}'.format(url), section)
        return ProcessResult(
            message='{0}: Failed to post-process - Unable to connect to {0}'.
            format(section),
            status_code=1,
        )

    if r.status_code not in [
            requests.codes.ok, requests.codes.created, requests.codes.accepted
    ]:
        logger.error('Server returned status {0}'.format(r.status_code),
                     section)
        return ProcessResult(
            message='{0}: Failed to post-process - Server returned status {1}'.
            format(section, r.status_code),
            status_code=1,
        )

    success = False
    queued = False
    started = False
    if section == 'SickBeard':
        if apikey:
            if r.json()['result'] == 'success':
                success = True
        else:
            for line in r.iter_lines():
                if line:
                    line = line.decode('utf-8')
                    logger.postprocess('{0}'.format(line), section)
                    if 'Moving file from' in line:
                        input_name = os.path.split(line)[1]
                    if 'added to the queue' in line:
                        queued = True
                    if 'Processing succeeded' in line or 'Successfully processed' in line:
                        success = True

        if queued:
            time.sleep(60)
    elif section == 'NzbDrone':
        try:
            res = r.json()
            scan_id = int(res['id'])
            logger.debug('Scan started with id: {0}'.format(scan_id), section)
            started = True
        except Exception as e:
            logger.warning('No scan id was returned due to: {0}'.format(e),
                           section)
            scan_id = None
            started = False

    if status != 0 and delete_failed and not os.path.dirname(
            dir_name) == dir_name:
        logger.postprocess(
            'Deleting failed files and folder {0}'.format(dir_name), section)
        remove_dir(dir_name)

    if success:
        return ProcessResult(
            message='{0}: Successfully post-processed {1}'.format(
                section, input_name),
            status_code=0,
        )
    elif section == 'NzbDrone' and started:
        n = 0
        params = {}
        url = '{0}/{1}'.format(url, scan_id)
        while n < 6:  # set up wait_for minutes to see if command completes..
            time.sleep(10 * wait_for)
            command_status = command_complete(url, params, headers, section)
            if command_status and command_status in ['completed', 'failed']:
                break
            n += 1
        if command_status:
            logger.debug(
                'The Scan command return status: {0}'.format(command_status),
                section)
        if not os.path.exists(dir_name):
            logger.debug(
                'The directory {0} has been removed. Renaming was successful.'.
                format(dir_name), section)
            return ProcessResult(
                message='{0}: Successfully post-processed {1}'.format(
                    section, input_name),
                status_code=0,
            )
        elif command_status and command_status in ['completed']:
            logger.debug(
                'The Scan command has completed successfully. Renaming was successful.',
                section)
            return ProcessResult(
                message='{0}: Successfully post-processed {1}'.format(
                    section, input_name),
                status_code=0,
            )
        elif command_status and command_status in ['failed']:
            logger.debug(
                'The Scan command has failed. Renaming was not successful.',
                section)
            # return ProcessResult(
            #     message='{0}: Failed to post-process {1}'.format(section, input_name),
            #     status_code=1,
            # )
        if completed_download_handling(url2, headers, section=section):
            logger.debug(
                'The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.'
                .format(section), section)
            return ProcessResult(
                message=
                '{0}: Complete DownLoad Handling is enabled. Passing back to {0}'
                .format(section),
                status_code=status,
            )
        else:
            logger.warning(
                'The Scan command did not return a valid status. Renaming was not successful.',
                section)
            return ProcessResult(
                message='{0}: Failed to post-process {1}'.format(
                    section, input_name),
                status_code=1,
            )
    else:
        return ProcessResult(
            message=
            '{0}: Failed to post-process - Returned log from {0} was not as expected.'
            .format(section),
            status_code=1,  # We did not receive Success confirmation.
        )
Exemplo n.º 7
0
    def create_or_split_multiforecast(self, request, *args, **kwargs):

        datas = request.data
        orderlist_ids = [
            int(s) for s in datas.get('order_group_key').split('-')
            if s.isdigit()
        ]
        forecast_data_list = datas.get('forecast_orders')
        for data in forecast_data_list:
            for k, v in data.iteritems():
                data[k] = int(v)

        forecast_inbounds = ForecastInbound.objects.filter(
            relate_order_set__in=orderlist_ids)
        forecast_inbounds_orderlist = forecast_inbounds.values_list(
            'id', 'relate_order_set')
        forecast_obj = forecast_inbounds.first()
        if not forecast_obj:
            raise exceptions.APIException('no forecast inbound found')

        if not forecast_data_list:
            return Response({
                'redirect_url':
                reverse('admin:forecast_forecastinbound_changelist') +
                '?supplier_id=%s' % forecast_obj.supplier_id
            })

        forecast_order_skuids = set([o['sku_id'] for o in forecast_data_list])
        forecast_detail_values= ForecastInboundDetail.objects.filter(forecast_inbound__in=forecast_inbounds,
                                             sku_id__in=forecast_order_skuids,
                                             forecast_inbound__status__in=(ForecastInbound.ST_ARRIVED,ForecastInbound.ST_FINISHED),
                                             status=ForecastInboundDetail.NORMAL)\
                            .values('sku_id', 'product_name', 'product_img', 'forecast_inbound_id', 'forecast_arrive_num')
        forecast_details_dict = defaultdict(list)
        for forecast_detail in forecast_detail_values:
            forecast_details_dict[forecast_detail['sku_id']].append(
                forecast_detail)
        forecast_ids = set([
            fo['forecast_inbound_id']
            for fo in flatten(forecast_details_dict.values())
        ])

        with transaction.atomic():
            forecast_arrive_time = datetime.datetime.now(
            ) + datetime.timedelta(days=3)
            forecast_newobj = ForecastInbound(supplier=forecast_obj.supplier)
            forecast_objdict = model_to_dict(forecast_obj)
            for name, value in forecast_objdict.iteritems():
                if name not in ('purchaser', 'ware_house'): continue
                setattr(forecast_newobj, name, value)
            forecast_newobj.supplier = forecast_obj.supplier
            forecast_newobj.forecast_arrive_time = forecast_arrive_time
            forecast_newobj.save()

            relate_orderlist_ids = set([
                s[1] for s in forecast_inbounds_orderlist
                if s[0] in forecast_ids
            ])
            for orderlist_id in relate_orderlist_ids:
                forecast_newobj.relate_order_set.add(orderlist_id)

            # TODO@meron
            for obj in forecast_data_list:
                forecast_details_list = forecast_details_dict.get(
                    obj['sku_id'], [])
                total_forecast_num = sum(
                    [s['forecast_arrive_num'] for s in forecast_details_list])
                if total_forecast_num < obj['num']:
                    raise exceptions.APIException(u'新建数量不能大于总预测数量')
                detail = forecast_details_list[0]
                forecast_detail = ForecastInboundDetail()
                forecast_detail.forecast_inbound = forecast_newobj
                forecast_detail.product_id = obj['product_id']
                forecast_detail.sku_id = obj['sku_id']
                forecast_detail.forecast_arrive_num = obj['num']
                forecast_detail.product_name = detail['product_name']
                forecast_detail.product_img = detail['product_img']
                forecast_detail.save()
            forecast_newobj.save()

        # serializer_data = self.get_serializer(forecast_newobj).data
        return Response({
            'redirect_url':
            reverse('admin:forecast_forecastinbound_changelist') +
            '?supplier_id=%s' % forecast_newobj.supplier_id
        })
Exemplo n.º 8
0
def process(section, dir_name, input_name=None, failed=False, client_agent='manual', download_id=None, input_category=None, failure_link=None):

    cfg = dict(core.CFG[section][input_category])

    host = cfg['host']
    port = cfg['port']
    ssl = int(cfg.get('ssl', 0))
    web_root = cfg.get('web_root', '')
    protocol = 'https://' if ssl else 'http://'
    username = cfg.get('username', '')
    password = cfg.get('password', '')
    apikey = cfg.get('apikey', '')

    if server_responding('{0}{1}:{2}{3}'.format(protocol, host, port, web_root)):
        # auto-detect correct fork
        fork, fork_params = auto_fork(section, input_category)
    elif not username and not apikey:
        logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only')
        fork, fork_params = 'None', {}
    else:
        logger.error('Server did not respond. Exiting', section)
        return ProcessResult(
            status_code=1,
            message='{0}: Failed to post-process - {0} did not respond.'.format(section),
        )

    delete_failed = int(cfg.get('delete_failed', 0))
    nzb_extraction_by = cfg.get('nzbExtractionBy', 'Downloader')
    process_method = cfg.get('process_method')
    if client_agent == core.TORRENT_CLIENT_AGENT and core.USE_LINK == 'move-sym':
        process_method = 'symlink'
    remote_path = int(cfg.get('remote_path', 0))
    wait_for = int(cfg.get('wait_for', 2))
    force = int(cfg.get('force', 0))
    delete_on = int(cfg.get('delete_on', 0))
    ignore_subs = int(cfg.get('ignore_subs', 0))
    status = int(failed)
    if status > 0 and core.NOEXTRACTFAILED:
        extract = 0
    else:
        extract = int(cfg.get('extract', 0))
    # get importmode, default to 'Move' for consistency with legacy
    import_mode = cfg.get('importMode', 'Move')

    if not os.path.isdir(dir_name) and os.path.isfile(dir_name):  # If the input directory is a file, assume single file download and split dir/name.
        dir_name = os.path.split(os.path.normpath(dir_name))[0]

    specific_path = os.path.join(dir_name, str(input_name))
    clean_name = os.path.splitext(specific_path)
    if clean_name[1] == '.nzb':
        specific_path = clean_name[0]
    if os.path.isdir(specific_path):
        dir_name = specific_path

    # Attempt to create the directory if it doesn't exist and ignore any
    # error stating that it already exists. This fixes a bug where SickRage
    # won't process the directory because it doesn't exist.
    try:
        os.makedirs(dir_name)  # Attempt to create the directory
    except OSError as e:
        # Re-raise the error if it wasn't about the directory not existing
        if e.errno != errno.EEXIST:
            raise

    if 'process_method' not in fork_params or (client_agent in ['nzbget', 'sabnzbd'] and nzb_extraction_by != 'Destination'):
        if input_name:
            process_all_exceptions(input_name, dir_name)
            input_name, dir_name = convert_to_ascii(input_name, dir_name)

        # Now check if tv files exist in destination.
        if not list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
            if list_media_files(dir_name, media=False, audio=False, meta=False, archives=True) and extract:
                logger.debug('Checking for archives to extract in directory: {0}'.format(dir_name))
                core.extract_files(dir_name)
                input_name, dir_name = convert_to_ascii(input_name, dir_name)

        if list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):  # Check that a video exists. if not, assume failed.
            flatten(dir_name)

    # Check video files for corruption
    good_files = 0
    num_files = 0
    for video in list_media_files(dir_name, media=True, audio=False, meta=False, archives=False):
        num_files += 1
        if transcoder.is_video_good(video, status):
            good_files += 1
            import_subs(video)
    if num_files > 0:
        if good_files == num_files and not status == 0:
            logger.info('Found Valid Videos. Setting status Success')
            status = 0
            failed = 0
        if good_files < num_files and status == 0:
            logger.info('Found corrupt videos. Setting status Failed')
            status = 1
            failed = 1
            if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
                print('[NZB] MARK=BAD')
            if failure_link:
                failure_link += '&corrupt=true'
    elif client_agent == 'manual':
        logger.warning('No media files found in directory {0} to manually process.'.format(dir_name), section)
        return ProcessResult(
            message='',
            status_code=0,  # Success (as far as this script is concerned)
        )
    elif nzb_extraction_by == 'Destination':
        logger.info('Check for media files ignored because nzbExtractionBy is set to Destination.')
        if int(failed) == 0:
            logger.info('Setting Status Success.')
            status = 0
            failed = 0
        else:
            logger.info('Downloader reported an error during download or verification. Processing this as a failed download.')
            status = 1
            failed = 1
    else:
        logger.warning('No media files found in directory {0}. Processing this as a failed download'.format(dir_name), section)
        status = 1
        failed = 1
        if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
            print('[NZB] MARK=BAD')

    if status == 0 and core.TRANSCODE == 1:  # only transcode successful downloads
        result, new_dir_name = transcoder.transcode_directory(dir_name)
        if result == 0:
            logger.debug('SUCCESS: Transcoding succeeded for files in {0}'.format(dir_name), section)
            dir_name = new_dir_name

            chmod_directory = int(str(cfg.get('chmodDirectory', '0')), 8)
            logger.debug('Config setting \'chmodDirectory\' currently set to {0}'.format(oct(chmod_directory)), section)
            if chmod_directory:
                logger.info('Attempting to set the octal permission of \'{0}\' on directory \'{1}\''.format(oct(chmod_directory), dir_name), section)
                core.rchmod(dir_name, chmod_directory)
        else:
            logger.error('FAILED: Transcoding failed for files in {0}'.format(dir_name), section)
            return ProcessResult(
                message='{0}: Failed to post-process - Transcoding failed'.format(section),
                status_code=1,
            )

    # configure SB params to pass
    fork_params['quiet'] = 1
    fork_params['proc_type'] = 'manual'
    if input_name is not None:
        fork_params['nzbName'] = input_name

    for param in copy.copy(fork_params):
        if param == 'failed':
            fork_params[param] = failed
            if 'proc_type' in fork_params:
                del fork_params['proc_type']
            if 'type' in fork_params:
                del fork_params['type']

        if param == 'return_data':
            fork_params[param] = 0
            if 'quiet' in fork_params:
                del fork_params['quiet']

        if param == 'type':
            fork_params[param] = 'manual'
            if 'proc_type' in fork_params:
                del fork_params['proc_type']

        if param in ['dir_name', 'dir', 'proc_dir', 'process_directory', 'path']:
            fork_params[param] = dir_name
            if remote_path:
                fork_params[param] = remote_dir(dir_name)

        if param == 'process_method':
            if process_method:
                fork_params[param] = process_method
            else:
                del fork_params[param]

        if param in ['force', 'force_replace']:
            if force:
                fork_params[param] = force
            else:
                del fork_params[param]

        if param in ['delete_on', 'delete']:
            if delete_on:
                fork_params[param] = delete_on
            else:
                del fork_params[param]

        if param == 'ignore_subs':
            if ignore_subs:
                fork_params[param] = ignore_subs
            else:
                del fork_params[param]

        if param == 'force_next':
            fork_params[param] = 1

    # delete any unused params so we don't pass them to SB by mistake
    [fork_params.pop(k) for k, v in list(fork_params.items()) if v is None]

    if status == 0:
        if section == 'NzbDrone' and not apikey:
            logger.info('No Sonarr apikey entered. Processing completed.')
            return ProcessResult(
                message='{0}: Successfully post-processed {1}'.format(section, input_name),
                status_code=0,
            )
        logger.postprocess('SUCCESS: The download succeeded, sending a post-process request', section)
    else:
        core.FAILED = True
        if failure_link:
            report_nzb(failure_link, client_agent)
        if 'failed' in fork_params:
            logger.postprocess('FAILED: The download failed. Sending \'failed\' process request to {0} branch'.format(fork), section)
        elif section == 'NzbDrone':
            logger.postprocess('FAILED: The download failed. Sending failed download to {0} for CDH processing'.format(fork), section)
            return ProcessResult(
                message='{0}: Download Failed. Sending back to {0}'.format(section),
                status_code=1,  # Return as failed to flag this in the downloader.
            )
        else:
            logger.postprocess('FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process'.format(fork), section)
            if delete_failed and os.path.isdir(dir_name) and not os.path.dirname(dir_name) == dir_name:
                logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
                remove_dir(dir_name)
            return ProcessResult(
                message='{0}: Failed to post-process. {0} does not support failed downloads'.format(section),
                status_code=1,  # Return as failed to flag this in the downloader.
            )

    url = None
    if section == 'SickBeard':
        if apikey:
            url = '{0}{1}:{2}{3}/api/{4}/?cmd=postprocess'.format(protocol, host, port, web_root, apikey)
        elif fork == 'Stheno':
            url = "{0}{1}:{2}{3}/home/postprocess/process_episode".format(protocol, host, port, web_root)
        else:
            url = '{0}{1}:{2}{3}/home/postprocess/processEpisode'.format(protocol, host, port, web_root)
    elif section == 'NzbDrone':
        url = '{0}{1}:{2}{3}/api/command'.format(protocol, host, port, web_root)
        url2 = '{0}{1}:{2}{3}/api/config/downloadClient'.format(protocol, host, port, web_root)
        headers = {'X-Api-Key': apikey}
        # params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
        if remote_path:
            logger.debug('remote_path: {0}'.format(remote_dir(dir_name)), section)
            data = {'name': 'DownloadedEpisodesScan', 'path': remote_dir(dir_name), 'downloadClientId': download_id, 'importMode': import_mode}
        else:
            logger.debug('path: {0}'.format(dir_name), section)
            data = {'name': 'DownloadedEpisodesScan', 'path': dir_name, 'downloadClientId': download_id, 'importMode': import_mode}
        if not download_id:
            data.pop('downloadClientId')
        data = json.dumps(data)

    try:
        if section == 'SickBeard':
            logger.debug('Opening URL: {0} with params: {1}'.format(url, fork_params), section)
            s = requests.Session()
            if not apikey and username and password:
                login = '******'.format(protocol, host, port, web_root)
                login_params = {'username': username, 'password': password}
                r = s.get(login, verify=False, timeout=(30, 60))
                if r.status_code == 401 and r.cookies.get('_xsrf'):
                    login_params['_xsrf'] = r.cookies.get('_xsrf')
                s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
            r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800))
        elif section == 'NzbDrone':
            logger.debug('Opening URL: {0} with data: {1}'.format(url, data), section)
            r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
    except requests.ConnectionError:
        logger.error('Unable to open URL: {0}'.format(url), section)
        return ProcessResult(
            message='{0}: Failed to post-process - Unable to connect to {0}'.format(section),
            status_code=1,
        )

    if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
        logger.error('Server returned status {0}'.format(r.status_code), section)
        return ProcessResult(
            message='{0}: Failed to post-process - Server returned status {1}'.format(section, r.status_code),
            status_code=1,
        )

    success = False
    queued = False
    started = False
    if section == 'SickBeard':
        if apikey:
            if r.json()['result'] == 'success':
                success = True
        else:
            for line in r.iter_lines():
                if line:
                    line = line.decode('utf-8')
                    logger.postprocess('{0}'.format(line), section)
                    if 'Moving file from' in line:
                        input_name = os.path.split(line)[1]
                    if 'added to the queue' in line:
                        queued = True
                    if 'Processing succeeded' in line or 'Successfully processed' in line:
                        success = True

        if queued:
            time.sleep(60)
    elif section == 'NzbDrone':
        try:
            res = json.loads(r.content)
            scan_id = int(res['id'])
            logger.debug('Scan started with id: {0}'.format(scan_id), section)
            started = True
        except Exception as e:
            logger.warning('No scan id was returned due to: {0}'.format(e), section)
            scan_id = None
            started = False

    if status != 0 and delete_failed and not os.path.dirname(dir_name) == dir_name:
        logger.postprocess('Deleting failed files and folder {0}'.format(dir_name), section)
        remove_dir(dir_name)

    if success:
        return ProcessResult(
            message='{0}: Successfully post-processed {1}'.format(section, input_name),
            status_code=0,
        )
    elif section == 'NzbDrone' and started:
        n = 0
        params = {}
        url = '{0}/{1}'.format(url, scan_id)
        while n < 6:  # set up wait_for minutes to see if command completes..
            time.sleep(10 * wait_for)
            command_status = command_complete(url, params, headers, section)
            if command_status and command_status in ['completed', 'failed']:
                break
            n += 1
        if command_status:
            logger.debug('The Scan command return status: {0}'.format(command_status), section)
        if not os.path.exists(dir_name):
            logger.debug('The directory {0} has been removed. Renaming was successful.'.format(dir_name), section)
            return ProcessResult(
                message='{0}: Successfully post-processed {1}'.format(section, input_name),
                status_code=0,
            )
        elif command_status and command_status in ['completed']:
            logger.debug('The Scan command has completed successfully. Renaming was successful.', section)
            return ProcessResult(
                message='{0}: Successfully post-processed {1}'.format(section, input_name),
                status_code=0,
            )
        elif command_status and command_status in ['failed']:
            logger.debug('The Scan command has failed. Renaming was not successful.', section)
            # return ProcessResult(
            #     message='{0}: Failed to post-process {1}'.format(section, input_name),
            #     status_code=1,
            # )
        if completed_download_handling(url2, headers, section=section):
            logger.debug('The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.'.format(section), section)
            return ProcessResult(
                message='{0}: Complete DownLoad Handling is enabled. Passing back to {0}'.format(section),
                status_code=status,
            )
        else:
            logger.warning('The Scan command did not return a valid status. Renaming was not successful.', section)
            return ProcessResult(
                message='{0}: Failed to post-process {1}'.format(section, input_name),
                status_code=1,
            )
    else:
        return ProcessResult(
            message='{0}: Failed to post-process - Returned log from {0} was not as expected.'.format(section),
            status_code=1,  # We did not receive Success confirmation.
        )
Exemplo n.º 9
0
        s = self.surface
        rc = s.get_rect()
        pygame.draw.rect(s, black, rc)
        rc.inflate_ip(-5, -5)
        pygame.draw.rect(s, white, rc)

        r = int(rc.width // 2.5)
        if self.state == 1:
            pygame.draw.circle(s, red, rc.center, r)
        elif self.state == 2:
            pygame.draw.circle(s, blue, rc.center, r)


for p in t.product(range(5), repeat=2):
    pan[p[0]].append((0, qp(p)))
qps = list(t.filter(lambda x: type(x) != int, t.flatten(pan)))

turn = 1

while True:
    engine.frame_begin()
    screen = engine.screen

    screen.fill((0, 0, 0))
    fps = engine.clock.get_fps()

    fnt = pygame.font.Font(pygame.font.match_font("arial"), 16)
    fpsinfo = fnt.render("%.2f fps" % fps, True, white)

    for he in engine.hl_events_list:
        if he[0] == "clickboard":
Exemplo n.º 10
0
 def get_scores(self, samples):
     return normalize(flatten(
         self.classifier.decision_function(samples).tolist()),
                      absolute=True)
Exemplo n.º 11
0
BENCHMARKS: FlatIntermediateDefinition = flatten({
    "Empty": {
        "no allocation": GroupedStmts(
            r"torch.empty(())",
            r"torch::empty({0});",
        ),

        "with allocation": GroupedStmts(
            r"torch.empty((1,))",
            r"torch::empty({1});",
        ),

        "overloads": GroupedVariants(
            cpp_block=r"""
                // @Setup
                auto options_empty = c10::TensorOptions();
                auto options_full = c10::TensorOptions().dtype(at::kFloat).device(at::kCPU);
                auto optional_float = c10::make_optional(at::kFloat);

                // @TensorOptions overload
                at::empty({0}, options_empty);
                at::empty({0}, options_full);
                at::empty({0}, at::kFloat); // implicit conversion

                // @Faithful overload
                at::empty({0}, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
                at::empty({0}, at::kFloat, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
                at::empty({0}, optional_float, c10::nullopt, c10::nullopt, c10::nullopt, c10::nullopt);
            """
        ),
    },

    "Pointwise": {
        "Math": GroupedVariants(*parse_stmts(r"""
            Python                                   | C++
            ---------------------------------------- | ----------------------------------------
            # @setup                                 | // @setup
            torch.manual_seed(138_10_23)             | torch::manual_seed(1381023);
            x = torch.rand((4, 4))                   | auto x = torch::rand({4, 4});
            y_float = torch.ones((4, 4))             | auto y_float = torch::ones({4, 4});
            y_vector = torch.ones((4, 1))            | auto y_vector = torch::ones({4, 1});
            y_int = torch.ones(                      | auto y_int = torch::ones({4, 4}, at::kInt);
                (4, 4), dtype=torch.int32)           |
                                                     |
            # @add                                   | // @add
            x += 1.0                                 | x += 1;
            x += y_float                             | x += y_float;
            x += y_vector                            | x += y_vector;
            x += y_int                               | x += y_int;
            x + y_float                              | x + y_float;
            torch.add(x, y_float)                    | torch::add(x, y_float);
            torch.add(x, y_float, out=x)             | torch::add_out(/*out=*/x, x, y_float);
                                                     |
            # @multiply                              | // @multiply
            x *= 1.0                                 | x *= 1;
            x *= y_float                             | x *= y_float;
            x *= y_vector                            | x *= y_vector;
            x *= y_int                               | x *= y_int;
            x * y_float                              | x * y_float;
            torch.mul(x, y_float)                    | torch::mul(x, y_float);
            torch.mul(x, y_float, out=x)             | torch::mul_out(/*out=*/x, x, y_float);
                                                     |
            # @equality                              | // @equality
            x == y_float                             | x == y_float;
            x == 1.0                                 | x == 1.0;
        """)),

        "Data movement": GroupedVariants(*parse_stmts(r"""
            Python                                   | C++
            ---------------------------------------- | ----------------------------------------
            # @setup                                 | // @setup
            x = torch.ones((4, 4))                   | auto x = torch::ones({4, 4});
            y = torch.ones((4, 4))                   | auto y = torch::ones({4, 4});
            x_t = x.t()                              | auto x_t = x.t();
                                                     |
            # @contiguous (trivial)                  | // @contiguous (trivial)
            x.contiguous()                           | x.contiguous();
                                                     |
            # @contiguous (non-trivial)              | // @contiguous (non-trivial)
            x_t.contiguous()                         | x_t.contiguous();
                                                     |
            # @clone                                 | // @clone
            x.clone()                                | x.clone();
                                                     |
            # @copy_                                 | // @copy_
            x.copy_(y)                               | x.copy_(y);
                                                     |
            # @zero_                                 | // @zero_
            x.zero_()                                | x.zero_();
                                                     |
            # @RNG                                   | // @RNG
            x.uniform_()                             | x.uniform_();
        """)),
    },

    "Reduction": GroupedVariants(*parse_stmts(r"""
        Python                                   | C++
        ---------------------------------------- | ----------------------------------------
        # @setup                                 | // @setup
        x = torch.ones((4, 4))                   | auto x = torch::ones({4, 4});
                                                 |
        # @max                                   | // @max
        x.max()                                  | x.max();
                                                 |
        # @sum                                   | // @sum
        x.sum()                                  | x.sum();
                                                 |
        # @variance                              | // @variance
        x.var(0)                                 | x.var(0);
    """)),

    "Indexing": GroupedVariants(*parse_stmts(r"""
        Python                                   | C++
        ---------------------------------------- | ----------------------------------------
        # @setup                                 | // @setup
                                                 | using namespace torch::indexing;
        torch.manual_seed(6626_10_34)            | torch::manual_seed(66261034);
                                                 |
        x = torch.randn(1, 1, 1)                 | auto x = torch::randn({1, 1, 1});
        y = torch.randn(1, 1, 1)                 | auto y = torch::randn({1, 1, 1});
                                                 |
        # @Tensor-Scalar                         | // @Tensor-Scalar
        x[0] = 1                                 | x.index_put_({0}, 1);
        x[0, 0] = 1                              | x.index_put_({0, 0}, 1);
        x[0, 0, 0] = 1                           | x.index_put_({0, 0, 0}, 1);
                                                 |
        # @Tensor-Scalar (Advanced)              | // @Tensor-Scalar (Advanced)
        x[...] = 1                               | x.index_put_({"..."}, 1);
        x[:] = 1                                 | x.index_put_({Slice(None, None, None)}, 1);
        x[None] = 1                              | x.index_put_({None}, 1);
        x[False] = 1                             | x.index_put_({false}, 1);
        x[True] = 1                              | x.index_put_({true}, 1);
                                                 |
        # @Tensor-Tensor                         | // @Tensor-Tensor
        x[0] = y[0]                              | x.index_put_({0}, y.index({0}));
        x[0, 0] = y[0, 0]                        | x.index_put_({0, 0}, y.index({0, 0}));
        x[0, 0, 0] = y[0, 0, 0]                  | x.index_put_({0, 0, 0}, y.index({0, 0, 0}));
                                                 |
        # @Tensor-Tensor (Advanced)              | // @Tensor-Tensor (Advanced)
        x[...] = y[...]                          | x.index_put_({"..."}, y.index({"..."}));
        x[:] = y[:]                              | x.index_put_({Slice(None, None, None)}, y.index({Slice(None, None, None)}));
        x[None] = y[None]                        | x.index_put_({None}, y.index({None}));
        x[False] = y[False]                      | x.index_put_({false}, y.index({false}));
        x[True] = y[True]                        | x.index_put_({true}, y.index({true}));
    """)),

    "Metadata and views": GroupedVariants(*parse_stmts(r"""
        Python                                   | C++
        ---------------------------------------- | ----------------------------------------
        # @setup                                 | // @setup
        x = torch.ones((4, 4))                   | auto x = torch::ones({4, 4});
                                                 |
        # @size                                  | // @size
        x.size()[0]                              | x.sizes()[0];
                                                 |
        # @stride                                | // @stride
        x.stride(0)                              | x.stride(0);
                                                 |
        # @as_strided                            | // @as_strided
        torch.as_strided(x, (2, 3), (4, 1), 2)   | torch::as_strided(x, {2, 3}, {4, 1}, 2);
                                                 |
        # @select                                | // @select
        x.select(1, 1)                           | x.select(1, 1);
                                                 |
        # @unsqueeze                             | // @unsqueeze
        x.unsqueeze(0)                           | x.unsqueeze(0);
                                                 |
        # @view                                  | // @view
        x.view(-1, 1)                            | x.view({-1, 1});
                                                 |
        # @transpose                             | // @transpose
        x.t()                                    | x.t();
                                                 |
        # @reshape                               | // @reshape
        x.reshape((16, 1))                       | x.reshape({16, 1});
    """)),

    "nn Modules": {
        py_constructor.split("(")[0]: GroupedModules(
            f"model = torch.nn.{py_constructor}",
            f"auto model = torch::nn::{cpp_constructor};",
            setup=setup.value,
            signature="f(x) -> y",
            torchscript=torchscript,
        )

        for setup, torchscript, (py_constructor, cpp_constructor) in (
            (Setup.TRIVIAL_4D, True, ("BatchNorm2d(4)",) * 2),
            (Setup.TRIVIAL_4D, True, ("GroupNorm(2, 4)",) * 2),
            (Setup.TRIVIAL_4D, True, (
                "LayerNorm(4)",
                "LayerNorm(torch::nn::LayerNormOptions({4}))"
            )),
            (Setup.TRIVIAL_3D, True, ("Conv1d(4, 4, 1)",) * 2),
            (Setup.TRIVIAL_4D, True, ("Conv2d(4, 4, 1)",) * 2),
            (Setup.TRIVIAL_4D, True, ("MaxPool2d(2)",) * 2),
            (Setup.TRIVIAL_2D, True, ("ReLU()",) * 2),
            (Setup.TRIVIAL_2D, True, ("Sigmoid()",) * 2),
            (Setup.TRIVIAL_4D, True, ("Linear(4, 2)",) * 2),

            # TODO: LSTM can't be TorchScript'd
            (Setup.TRIVIAL_3D, False, ("LSTM(4, 2)",) * 2),
        )
    },

    "training": {
        "simple": GroupedStmts(
            *parse_stmts(r"""
                Python                                   | C++
                ---------------------------------------- | ----------------------------------------
                a0 = torch.nn.functional.relu(x * w0)    | auto a0 = torch::nn::functional::relu(x * w0);
                y = a0 * w1                              | auto y = a0 * w1;
            """),
            Setup.TRAINING.value,
            num_threads=(1, 2),
            signature=r"f(x, w0, w1) -> y",
            torchscript=True,
            autograd=True,
        ),

        "ensemble": GroupedStmts(
            *parse_stmts(r"""
                Python                                   | C++
                ---------------------------------------- | ----------------------------------------
                a0 = torch.nn.functional.gelu(x * w0)    | auto a0 = torch::nn::functional::gelu(x * w0);
                a1 = torch.nn.functional.prelu(y, w1)    | auto a1 = torch::nn::functional::prelu(y, w1);
                z = torch.nn.functional.normalize(       | auto z = torch::nn::functional::normalize(
                    torch.cat([a0, a1]),                 |     torch::cat({a0, a1}),
                    p=2.0, dim=0,                        |     torch::nn::functional::NormalizeFuncOptions().p(2).dim(0)
                ).dot(w2)                                | ).dot(w2);
            """),
            Setup.TRAINING.value,
            num_threads=(1, 2),
            signature=r"f(x, y, w0, w1, w2) -> z",
            torchscript=True,
            autograd=True,
        ),
    },

    "InferenceMode": GroupedVariants(
        # In general, the mixed input scenario is less common so its
        # perf can be less important than pure inference tensor inputs.
        cpp_block=r"""
            // @Setup
            auto s = torch::ones({3, 3});  // Normal Tensor
            c10::InferenceMode guard;
            auto x = torch::ones({3, 3});  // Inference Tensor

            // @View
            torch::Tensor y = x.view({9});

            // @Inplace
            torch::Tensor y = x.mul_(x);

            // @Mixed
            torch::Tensor y = x + s;
        """
    ),
})