コード例 #1
0
    if isinstance(net, DataParallel):
        state_dict = net.module.state_dict()
    else:
        state_dict = net.state_dict()
    for key in state_dict.keys():
        state_dict[key] = state_dict[key].cpu()

    kwargs.update(state_dict=state_dict)
    torch.save(kwargs, path)
    return path


@dc.input(
    HiveTable(
        key="inputTrainData",
        table="inputTrainDataTable",
        partition="inputTrainDataPartition",
    ))
@dc.input(
    HiveTable(
        key="inputValidateData",
        table="inputValidateDataTable",
        partition="inputValidateDataPartition",
    ))
@dc.input(Folder(key="inputDataFolder", required=True))
@dc.input(Checkpoint(key="inputCheckpoint"))
@dc.output(Checkpoint(key="outputCheckpoint", required=True))
@dc.column(String(key="idColumn", default="id"))
@dc.param(Int(key="epochs", default=100))
@dc.param(Int(key="batchSize", default=16))
@dc.param(Float(key="learningRate", default=0.01))
コード例 #2
0
# coding: utf-8
from __future__ import print_function

import os

import pandas as pd

from suanpan import asyncio, path, utils
from suanpan.arguments import String
from suanpan.docker import DockerComponent as dc
from suanpan.docker.arguments import Folder, HiveTable


@dc.input(
    HiveTable(key="inputData",
              table="inputDataTable",
              partition="inputDataPartition"))
@dc.input(Folder(key="inputDataFolder", required=True))
@dc.output(Folder(key="outputImagesFolder", required=True))
@dc.column(String(key="idColumn", default="id"))
@dc.column(String(key="dataColumn", default="data_path"))
def SPData2Images(context):
    args = context.args

    with asyncio.multiThread() as pool:
        for _, row in args.inputData.iterrows():
            image = utils.loadFromNpy(
                os.path.join(args.inputDataFolder, row[args.dataColumn]))
            prefix = os.path.join(args.outputImagesFolder, row[args.idColumn])
            utils.saveAllAsImages(prefix, image, pool=pool)
コード例 #3
0
from torch.utils.data import DataLoader

import dsb.net_detector as nodmodel
from dsb import preprocessing
from dsb.data_detector import DataBowl3Detector, collate
from dsb.split_combine import SplitComb
from dsb.test_detect import test_detect
from suanpan import asyncio, path, utils
from suanpan.arguments import Bool, Int, String
from suanpan.docker import DockerComponent as dc
from suanpan.docker.arguments import Checkpoint, Folder, HiveTable


@dc.input(
    HiveTable(key="inputData",
              table="inputDataTable",
              partition="inputDataPartition"))
@dc.input(
    Folder(
        key="inputDataFolder",
        required=True,
        help="Directory to save preprocessed npy files to.",
    ))
@dc.input(
    Checkpoint(key="inputCheckpoint", required=True, help="Ckpt model file."))
@dc.output(
    HiveTable(
        key="outputBboxData",
        table="outputBboxDataTable",
        partition="outputBboxDataPartition",
    ))