def parse(path, extra=None) -> GenericPipeline: extraImports = [] if isinstance(path, str): if not os.path.exists(path) or os.path.isdir(path): cur_project_pth = context.get_current_project_path() config_path = os.path.join(path, "config.yaml") if os.path.exists(config_path): path = config_path else: config_path = os.path.join(cur_project_pth, "experiments", path, "config.yaml") if os.path.exists(config_path): path = config_path if os.path.exists(cur_project_pth + "/common.yaml") and extra is None: extra = cur_project_pth + "/common.yaml" modules_path = os.path.join(cur_project_pth, 'modules') if os.path.exists(modules_path): for m in os.listdir(modules_path): sys.path.insert(0, modules_path) if ".py" in m: extraImports.append(m[0:m.index(".py")]) cfg = configloader.parse("generic", path, extra) cfg.path = path for e in extraImports: cfg.imports.append(e) return cfg
def perform(self, server, reporter: ProgressMonitor): ms=ModelSpec(**self.spec) settings = yaml.load(self.augmentationConfig) augmenters_lst = configloader.parse("augmenters", settings) if len(augmenters_lst) > 0: augmenter = iaa.Sequential(augmenters_lst) exp:Experiment=server.experiment(self.experimentPath) cf=exp.parse_config({"augmentation": settings}) ds = _cache.get_dataset(exp, self.datasetName) wrappedModel = ms.wrap(cf, exp) targets=_cache.get_targets(exp,self.datasetName) def augmented_image_visializer(predictionItem:PredictionItem): cache_path=visualization_ctx().path path = cache_path + str(predictionItem.id) + ".png" if os.path.exists(path): return path if len(predictionItem.y.shape) > 1: #Should be a segmentation mask in this case, better to refactor this later batch = imgaug.Batch(images=[predictionItem.x], segmentation_maps=[imgaug.SegmentationMapsOnImage(predictionItem.y, shape=predictionItem.y.shape)]) aug_batch = augmenter.augment_batch(batch) img = aug_batch.segmentation_maps_aug[0].draw_on_image(aug_batch.images_aug[0], alpha=0.5)[0] else: img = augmenter.augment_image(predictionItem.x) # Augment segmantation map also imageio.imwrite(path,img) return path wrapped_dataset=WrappedDS(ds,list(range(len(ds))),"sample",None,None) sig=inspect.signature(augmented_image_visializer) visualizerFunc = projects.WrappedVisualizer("augmented", augmented_image_visializer, sig) wrapped_dataset._visualizer=visualizerFunc.create(wrapped_dataset,tempfile.mkdtemp()) return AnalizeResults([wrapped_dataset],None)
def parse(path, extra=None) -> GenericPipeline: extraImports = [] if isinstance(path, str): if not os.path.exists(path) or os.path.isdir(path): pth = context.get_current_project_path() if os.path.exists(pth + "/experiments/" + path + "/config.yaml"): path = pth + "/experiments/" + path + "/config.yaml" if os.path.exists(pth + "/common.yaml") and extra is None: extra = pth + "/common.yaml" if os.path.exists(pth + "/modules"): for m in os.listdir(pth + "/modules"): sys.path.insert(0, pth + "/modules") if ".py" in m: extraImports.append(m[0:m.index(".py")]) cfg = configloader.parse("generic", path, extra) cfg.path = path for e in extraImports: cfg.imports.append(e) return cfg
def __init__(self, **args): super(CallbackModule, self).__init__() self.relSize = args['relSize'] if 'relSize' in args else None self.absSize = args['absSize'] if 'absSize' in args else None self.periodEpochs = args['periodEpochs'] if 'periodEpochs' in args else None self.periodSteps = args['periodSteps'] if 'periodSteps' in args else None if self.absSize is None and self.relSize is None: raise ValueError("'absSize' or 'relSize' must be specified for CallbackModule") if self.absSize is not None and self.relSize is not None: raise ValueError("'absSize' and 'relSize' are mutually exclusive for CallbackModule") if self.periodEpochs is not None and self.periodSteps is not None: raise ValueError("'periodEpochs' and 'periodSteps' are mutually exclusive for CallbackModule") self.then = None if 'then' in args: thenArg = args['then'] parsedThen = configloader.parse("callbacks", thenArg) if len(parsedThen) > 0: self.then = parsedThen[0]
def parse(path) -> PipelineConfig: cfg = configloader.parse("segmentation", path) cfg.path = path return cfg
def execute(self, kf: datasets.DefaultKFoldedDataSet, model: keras.Model, ec: ExecutionConfig, callbacks=None): if 'unfreeze_encoder' in self.dict and self.dict['unfreeze_encoder']: self.unfreeze(model) if 'unfreeze_encoder' in self.dict and not self.dict[ 'unfreeze_encoder']: self.freeze(model) if callbacks is None: cb = [] + self.cfg.callbacks else: cb = callbacks if self.cfg._reporter is not None: if self.cfg._reporter.isCanceled(): return cb.append(ReporterCallback(self.cfg._reporter)) pass prevInfo = None if self.cfg.resume: allBest = self.cfg.info() filtered = list( filter(lambda x: x.stage == ec.stage and x.fold == ec.fold, allBest)) if len(filtered) > 0: prevInfo = filtered[0] self.lr = prevInfo.lr if self.loss or self.lr: self.cfg.compile(model, self.cfg.createOptimizer(self.lr), self.loss) if self.initial_weights is not None: try: model.load_weights(self.initial_weights) except: z = model.layers[-1].name model.layers[-1].name = "tmpName12312" model.load_weights(self.initial_weights, by_name=True) model.layers[-1].name = z if 'callbacks' in self.dict: cb = configloader.parse("callbacks", self.dict['callbacks']) if 'extra_callbacks' in self.dict: cb = cb + configloader.parse("callbacks", self.dict['extra_callbacks']) kepoch = -1 if "logAll" in self.dict and self.dict["logAll"]: cb = cb + [AllLogger(ec.metricsPath() + "all.csv")] cb.append(KFoldCallback(kf)) kepoch = self._addLogger(model, ec, cb, kepoch) md = self.cfg.primary_metric_mode if self.cfg.gpus == 1: mcp = keras.callbacks.ModelCheckpoint( ec.weightsPath(), save_best_only=True, monitor=self.cfg.primary_metric, mode=md, verbose=1) if prevInfo != None: mcp.best = prevInfo.best cb.append(mcp) self.add_visualization_callbacks(cb, ec, kf) if self.epochs - kepoch == 0: return self.loadBestWeightsFromPrevStageIfExists(ec, model) self._doTrain(kf, model, ec, cb, kepoch) print('saved') pass
def parse(path) -> ClassificationPipeline: cfg = configloader.parse("classification", path) cfg.path = path return cfg