# coding=utf-8 from __future__ import absolute_import, print_function import os from suanpan.app import app from suanpan.docker.arguments import Folder, String from suanpan.storage import StorageProxy @app.param(String(key="storageType", default="oss")) @app.param( String(key="folder", default="man_face_25k", help="girl_face_50k man_face_25k")) @app.output(Folder(key="modelDir")) def SPModels(context): args = context.args storage = StorageProxy(None, None) storage.setBackend(type=args.storageType) storage.download(os.path.join("common/model/facelab", args.folder), args.modelDir) return args.modelDir if __name__ == "__main__": SPModels() # pylint: disable=no-value-for-parameter
import os import pandas as pd from suanpan import asyncio, path, utils from suanpan.arguments import String from suanpan.docker import DockerComponent as dc from suanpan.docker.arguments import Folder, HiveTable @dc.input( HiveTable(key="inputData", table="inputDataTable", partition="inputDataPartition")) @dc.input(Folder(key="inputDataFolder", required=True)) @dc.output(Folder(key="outputImagesFolder", required=True)) @dc.column(String(key="idColumn", default="id")) @dc.column(String(key="dataColumn", default="data_path")) def SPData2Images(context): args = context.args with asyncio.multiThread() as pool: for _, row in args.inputData.iterrows(): image = utils.loadFromNpy( os.path.join(args.inputDataFolder, row[args.dataColumn])) prefix = os.path.join(args.outputImagesFolder, row[args.idColumn]) utils.saveAllAsImages(prefix, image, pool=pool) return args.outputImagesFolder
# coding=utf-8 from __future__ import absolute_import, print_function from suanpan.docker import DockerComponent from suanpan.docker.arguments import Folder from suanpan.notebook import Notebook notebook = Notebook(DockerComponent, "Demo") # 定义输入 notebook.input(Folder(key="inputData1", required=True)) # 定义输出 notebook.output(Folder(key="outputData1", required=True)) context = notebook.init() args = context.args print(args.inputData1) # 自定义代码 utils.hello() # 将 args.outputData1 作为输出发送给下一节点 notebook.save(context, arsgs.outputData1)
# coding: utf-8 from __future__ import print_function from suanpan import path from suanpan.arguments import String from suanpan.docker import DockerComponent as dc from suanpan.docker.arguments import Folder @dc.input(Folder(key="inputFolder1", required=True)) @dc.input(Folder(key="inputFolder2", required=True)) @dc.output(Folder(key="outputFolder", required=True)) # @dc.param(String(key="mode", default="replace")) def SPFolderCombine(context): args = context.args return path.merge([args.inputFolder1, args.inputFolder2], dist=args.outputFolder) if __name__ == "__main__": SPFolderCombine() # pylint: disable=no-value-for-parameter
return path @dc.input( HiveTable( key="inputTrainData", table="inputTrainDataTable", partition="inputTrainDataPartition", )) @dc.input( HiveTable( key="inputValidateData", table="inputValidateDataTable", partition="inputValidateDataPartition", )) @dc.input(Folder(key="inputDataFolder", required=True)) @dc.input(Checkpoint(key="inputCheckpoint")) @dc.output(Checkpoint(key="outputCheckpoint", required=True)) @dc.column(String(key="idColumn", default="id")) @dc.param(Int(key="epochs", default=100)) @dc.param(Int(key="batchSize", default=16)) @dc.param(Float(key="learningRate", default=0.01)) @dc.param(Float(key="momentum", default=0.9)) @dc.param(Float(key="weightDecay", default=1e-4)) @dc.param(Bool(key="distributed", default=False)) @dc.param(String(key="ckptFolder")) @dc.param(Bool(key="saveFreq", default=1)) def SPNNetTrain(context): torch.manual_seed(0) args = context.args
from suanpan.docker.arguments import Folder, HiveTable def scan_prep_results(folder, id_column, image_column): data_suffix = "_clean.npy" return pd.DataFrame( [(file[:-len(data_suffix)], file) for file in os.listdir(folder) if file.endswith(data_suffix)], columns=[id_column, image_column], ) @dc.input( Folder( key="inputDataFolder", required=True, help="DSB stage1/2 or similar directory path.", )) @dc.output( HiveTable(key="outputData", table="outputDataTable", partition="outputDataPartition")) @dc.output( Folder( key="outputDataFolder", required=True, help="Directory to save preprocessed npy files to.", )) @dc.output(String(key="idColumn", default="patient")) @dc.output(String(key="imageColumn", default="image_path")) def SPPredictPreprocess(context):
# coding=utf-8 from __future__ import absolute_import, print_function import os from suanpan.app import app from suanpan.docker.arguments import Folder, String from suanpan.storage import StorageProxy @app.param(String(key="storageType", default="oss")) @app.param( String(key="folder", default="man_1", help="girl_0 man_0 girl_1 man_1")) @app.output(Folder(key="outputData")) def SPMaterial(context): args = context.args storage = StorageProxy(None, None) storage.setBackend(type=args.storageType) storage.download( os.path.join("common/data/facelab_material", args.folder, "data.mp4"), os.path.join(args.outputData, "data.mp4"), ) return args.outputData if __name__ == "__main__": SPMaterial() # pylint: disable=no-value-for-parameter
from __future__ import absolute_import, print_function from suanpan.docker import DockerComponent as dc from suanpan.docker.arguments import Folder, String from suanpan.storage import storage DATESET_PATH_PREFIX = "common/data" @dc.param( String( key="dataset", required=True, help= "allowed values: ['boston_housing', 'breast_cancer', 'california_housing', " "'covertype', 'diabetes', 'digits', 'iris', 'kddcup', 'linnerud', 'wine', 'titanic'" ", 'sun_spots', 'macrodata']", )) @dc.output(Folder(key="outputDir")) def SPClassicDatasets(context): args = context.args remotePath = storage.storagePathJoin(DATESET_PATH_PREFIX, args.dataset) storage.download(remotePath, args.outputDir) return args.outputDir if __name__ == "__main__": SPClassicDatasets() # pylint: disable=no-value-for-parameter
from dsb.split_combine import SplitComb from dsb.test_detect import test_detect from suanpan import asyncio, path, utils from suanpan.arguments import Bool, Int, String from suanpan.docker import DockerComponent as dc from suanpan.docker.arguments import Checkpoint, Folder, HiveTable @dc.input( HiveTable(key="inputData", table="inputDataTable", partition="inputDataPartition")) @dc.input( Folder( key="inputDataFolder", required=True, help="Directory to save preprocessed npy files to.", )) @dc.input( Checkpoint(key="inputCheckpoint", required=True, help="Ckpt model file.")) @dc.output( HiveTable( key="outputBboxData", table="outputBboxDataTable", partition="outputBboxDataPartition", )) @dc.output( Folder( key="outputBboxFolder", required=True, help="Directory to save bbox npy files to.",
# coding=utf-8 from __future__ import absolute_import, print_function import suanpan import utils from suanpan.docker import DockerComponent as dc from suanpan.docker.arguments import Folder # 定义输入 @dc.input(Folder(key="inputData1", alias="inputFolder", required=True)) @dc.param(key="param1", alias="p", default=1) # 定义输出 @dc.output(Folder(key="outputData1", alias="outputFolder", required=True)) def Demo(context): # 从 Context 中获取相关数据 args = context.args # 查看上一节点发送的 args.inputData1 数据 print(args.inputFolder) print(args.p) # 自定义代码 utils.hello() # 将 args.outputData1 作为输出发送给下一节点 return args.outputFolder if __name__ == "__main__": suanpan.run(Demo)
def scan_prep_results(folder, id_column, image_column, label_column): data_suffix = "_clean.npy" label_suffix = "_label.npy" return pd.DataFrame( [(file[:-len(data_suffix)], file, file.replace(data_suffix, label_suffix)) for file in os.listdir(folder) if file.endswith(data_suffix)], columns=[id_column, image_column, label_column], ) @dc.input( Folder(key="inputRawFolder", required=True, help="Luna raw or similar directory path.")) @dc.input( Folder( key="inputSegmentFolder", required=True, help="Luna segment or similar directory path.", )) @dc.input( File(key="inputAbbr", name="data", type="csv", required=True, help="Luna abbr path.")) @dc.input( File(key="inputLabels",