def add_watermark(id): """Open original photo which we want to resize using PIL's Image object""" obj = get_model('vehicle', 'image').objects.get(pk=id) img_file = obj.image im = StringIO(img_file.read()) image = ImageObj.open(im) # Convert to RGB if necessary if image.mode not in ('L', 'RGB'): image = image.convert('RGB') overlay = ImageObj.open(getattr(settings, 'WATERMARK_IMAGE')) image = watermark(image, overlay, 'bottomright', 0.4) temp_handle = StringIO() image.save(temp_handle, 'jpeg') temp_handle.seek(0) suf = SimpleUploadedFile(obj.image.name, temp_handle.read(), content_type='image/png') obj.image.save(obj.image.name, suf) obj.save() return True
def create_watermark(self, model_instance): # get the watermark name = self.watermark try: watermark = Watermark.objects.get(name=name, is_active=True) except Watermark.DoesNotExist: logger.error('Watermark "%s" does not exist. Unable to watermark image %s' % (name, model_instance.pk)) return # get the image to watermark try: image_field = getattr(model_instance, self.populate_from) target = Image.open(image_field.path) mark = Image.open(watermark.image.path) except (IOError, ValueError): return # determine the actual value that the parameters provided will render scale = utils.determine_scale(self.scale, target, mark, self.dimension_ratio) rotation = utils.determine_rotation(self.rotation, mark) pos = utils.determine_position(self.position, target, mark) params = { 'position': pos, 'opacity': self.opacity, 'scale': scale, 'tile': self.tile, 'greyscale': self.greyscale, 'rotation': rotation, } logger.debug('Params: %s' % params) im = utils.watermark(target, mark, **params) image_io = StringIO.StringIO() image_io.name = os.path.basename(image_field.name) # check ext name, ext = os.path.splitext(image_io.name) if not ext or ext.lower() not in ('jpg', 'jpeg', 'png'): image_io.name = "%s%s" % (name, '.jpg') im.save(image_io, quality=self.quality) image_io.seek(0) return File(image_io)
def main(hparams): """ Main training routine specific for this project :param hparams: """ # 0 INIT TRACKER # https://docs.neptune.ai/integrations/pytorch_lightning.html try: import neptune NEPTUNE_AVAILABLE = True except ImportError: # pragma: no-cover NEPTUNE_AVAILABLE = False USE_NEPTUNE = False if getattr(hparams, 'tracker', None) is not None: if getattr(hparams.tracker, 'neptune', None) is not None: USE_NEPTUNE = True if USE_NEPTUNE and not NEPTUNE_AVAILABLE: warnings.warn( 'You want to use `neptune` logger which is not installed yet,' ' install it with `pip install neptune-client`.', UserWarning) time.sleep(5) tracker = None if NEPTUNE_AVAILABLE and USE_NEPTUNE: neptune_params = hparams.tracker.neptune fn_token = getattr(neptune_params, 'fn_token', None) if fn_token is not None: p = Path(neptune_params.fn_token).expanduser() if p.exists(): with open(p, 'r') as f: token = f.readline().splitlines()[0] os.environ['NEPTUNE_API_TOKEN'] = token hparams_flatten = dict_flatten(hparams, sep='.') experiment_name = hparams.tracker.get('experiment_name', None) tags = list(hparams.tracker.get('tags', [])) offline_mode = hparams.tracker.get('offline', False) tracker = NeptuneLogger( project_name=neptune_params.project_name, experiment_name=experiment_name, params=hparams_flatten, tags=tags, offline_mode=offline_mode, upload_source_files=["../../../*.py" ], # because hydra change current dir ) try: # log if tracker is not None: watermark_s = watermark(packages=[ 'python', 'nvidia', 'cudnn', 'hostname', 'torch', 'sparseconvnet', 'pytorch-lightning', 'hydra-core', 'numpy', 'plyfile' ]) log_text_as_artifact(tracker, watermark_s, "versions.txt") # arguments_of_script sysargs_s = str(sys.argv[1:]) log_text_as_artifact(tracker, sysargs_s, "arguments_of_script.txt") for key in ['overrides.yaml', 'config.yaml']: p = Path.cwd() / '.hydra' / key if p.exists(): tracker.log_artifact(str(p), f'hydra_{key}') callbacks = [] if tracker is not None: lr_logger = LearningRateLogger() callbacks.append(lr_logger) # ------------------------ # 1 INIT LIGHTNING MODEL # ------------------------ model = LightningTemplateModel(hparams) if tracker is not None: s = str(model) log_text_as_artifact(tracker, s, "model_summary.txt") # ------------------------ # 2 INIT TRAINER # ------------------------ cfg = hparams.PL if tracker is None: tracker = cfg.logger # True by default in PL kwargs = dict(cfg) kwargs.pop('logger') trainer = pl.Trainer( max_epochs=hparams.train.max_epochs, callbacks=callbacks, logger=tracker, **kwargs, ) # ------------------------ # 3 START TRAINING # ------------------------ print() print("Start training") trainer.fit(model) except (Exception, KeyboardInterrupt) as ex: if tracker is not None: print_exc() tracker.experiment.stop(str(ex)) raise