def register_descriptors(self, descriptor_schemas, user, force=False, verbosity=1): """Read and register descriptors.""" log_descriptors = [] for descriptor_schema in descriptor_schemas: for field in ['var', 'schema']: for schema, _, _ in iterate_schema({}, descriptor_schema.get(field, {})): if not schema['type'][-1].endswith(':'): schema['type'] += ':' # support backward compatibility # TODO: update .yml files and remove if 'slug' not in descriptor_schema: descriptor_schema['slug'] = slugify(descriptor_schema.pop('name').replace(':', '-')) descriptor_schema['name'] = descriptor_schema.pop('label') if 'schema' not in descriptor_schema: descriptor_schema['schema'] = [] if 'static' in descriptor_schema: descriptor_schema['schema'].extend(descriptor_schema.pop('static')) if 'var' in descriptor_schema: descriptor_schema['schema'].extend(descriptor_schema.pop('var')) if not self.valid(descriptor_schema, DESCRIPTOR_SCHEMA): continue slug = descriptor_schema['slug'] version = descriptor_schema.get('version', '0.0.0') int_version = convert_version_string_to_int(version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = DescriptorSchema.objects.filter(slug=slug).aggregate(Max('version'))['version__max'] if latest_version is not None and latest_version > int_version: self.stderr.write("Skip descriptor schema {}: newer version installed".format(slug)) continue descriptor_query = DescriptorSchema.objects.filter(slug=slug, version=version) if descriptor_query.exists(): if not force: if verbosity > 0: self.stdout.write("Skip descriptor schema {}: same version installed".format(slug)) continue descriptor_query.update(**descriptor_schema) log_descriptors.append("Updated {}".format(slug)) else: DescriptorSchema.objects.create(contributor=user, **descriptor_schema) log_descriptors.append("Inserted {}".format(slug)) if len(log_descriptors) > 0 and verbosity > 0: self.stdout.write("Descriptor schemas Updates:") for log in log_descriptors: self.stdout.write(" {}".format(log))
def register_descriptors(self, descriptor_schemas, user, force=False): """Read and register descriptors.""" log_descriptors = [] for ds in descriptor_schemas: for field in ['var', 'schema']: for schema, _, _ in iterate_schema({}, ds.get(field, {})): if not schema['type'][-1].endswith(':'): schema['type'] += ':' # support backward compatibility # TODO: update .yml files and remove if 'slug' not in ds: ds['slug'] = slugify(ds.pop('name').replace(':', '-')) ds['name'] = ds.pop('label') if 'schema' not in ds: ds['schema'] = [] if 'static' in ds: ds['schema'].extend(ds.pop('static')) if 'var' in ds: ds['schema'].extend(ds.pop('var')) if not self.valid(ds, DESCRIPTOR_SCHEMA): continue slug = ds['slug'] version = ds.get('version', '0.0.0') int_version = convert_version_string_to_int(version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = DescriptorSchema.objects.filter(slug=slug).aggregate(Max('version'))['version__max'] if latest_version is not None and latest_version > int_version: self.stderr.write("Skip descriptor schema {}: newer version installed".format(slug)) continue descriptor_query = DescriptorSchema.objects.filter(slug=slug, version=version) if descriptor_query.exists(): if not force: self.stdout.write("Skip descriptor schema {}: same version installed".format(slug)) continue descriptor_query.update(**ds) log_descriptors.append("Updated {}".format(slug)) else: DescriptorSchema.objects.create(contributor=user, **ds) log_descriptors.append("Inserted {}".format(slug)) if len(log_descriptors) > 0: self.stdout.write("Descriptor schemas Updates:") for log in log_descriptors: self.stdout.write(" {}".format(log))
def register_descriptors(self, descriptor_schemas, user, force=False, verbosity=1): """Read and register descriptors.""" log_descriptors = [] for descriptor_schema in descriptor_schemas: for schema, _, _ in iterate_schema({}, descriptor_schema.get('schema', {})): if not schema['type'][-1].endswith(':'): schema['type'] += ':' if 'schema' not in descriptor_schema: descriptor_schema['schema'] = [] if not self.valid(descriptor_schema, DESCRIPTOR_SCHEMA): continue slug = descriptor_schema['slug'] version = descriptor_schema.get('version', '0.0.0') int_version = convert_version_string_to_int(version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = DescriptorSchema.objects.filter(slug=slug).aggregate(Max('version'))['version__max'] if latest_version is not None and latest_version > int_version: self.stderr.write("Skip descriptor schema {}: newer version installed".format(slug)) continue previous_descriptor_qs = DescriptorSchema.objects.filter(slug=slug) if previous_descriptor_qs.exists(): previous_descriptor = previous_descriptor_qs.latest() else: previous_descriptor = None descriptor_query = DescriptorSchema.objects.filter(slug=slug, version=version) if descriptor_query.exists(): if not force: if verbosity > 0: self.stdout.write("Skip descriptor schema {}: same version installed".format(slug)) continue descriptor_query.update(**descriptor_schema) log_descriptors.append("Updated {}".format(slug)) else: descriptor = DescriptorSchema.objects.create(contributor=user, **descriptor_schema) assign_contributor_permissions(descriptor) if previous_descriptor: copy_permissions(previous_descriptor, descriptor) log_descriptors.append("Inserted {}".format(slug)) if log_descriptors and verbosity > 0: self.stdout.write("Descriptor schemas Updates:") for log in log_descriptors: self.stdout.write(" {}".format(log))
def register_descriptors(self, descriptor_schemas, user, force=False, verbosity=1): """Read and register descriptors.""" log_descriptors = [] for descriptor_schema in descriptor_schemas: for schema, _, _ in iterate_schema({}, descriptor_schema.get( "schema", {})): if not schema["type"][-1].endswith(":"): schema["type"] += ":" if "schema" not in descriptor_schema: descriptor_schema["schema"] = [] if not self.valid(descriptor_schema, DESCRIPTOR_SCHEMA): continue slug = descriptor_schema["slug"] version = descriptor_schema.get("version", "0.0.0") int_version = convert_version_string_to_int( version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = DescriptorSchema.objects.filter( slug=slug).aggregate(Max("version"))["version__max"] if latest_version is not None and latest_version > int_version: self.stderr.write( "Skip descriptor schema {}: newer version installed". format(slug)) continue previous_descriptor_qs = DescriptorSchema.objects.filter(slug=slug) if previous_descriptor_qs.exists(): previous_descriptor = previous_descriptor_qs.latest() else: previous_descriptor = None descriptor_query = DescriptorSchema.objects.filter(slug=slug, version=version) if descriptor_query.exists(): if not force: if verbosity > 0: self.stdout.write( "Skip descriptor schema {}: same version installed" .format(slug)) continue descriptor_query.update(**descriptor_schema) log_descriptors.append("Updated {}".format(slug)) else: descriptor = DescriptorSchema.objects.create( contributor=user, **descriptor_schema) assign_contributor_permissions(descriptor) if previous_descriptor: copy_permissions(previous_descriptor, descriptor) log_descriptors.append("Inserted {}".format(slug)) if log_descriptors and verbosity > 0: self.stdout.write("Descriptor schemas Updates:") for log in log_descriptors: self.stdout.write(" {}".format(log))
def register_processes(self, process_schemas, user, force=False, verbosity=1): """Read and register processors.""" log_processors = [] log_templates = [] for p in process_schemas: # TODO: Remove this when all processes are migrated to the # new syntax. if "flow_collection" in p: if "entity" in p: self.stderr.write( "Skip processor {}: only one of 'flow_collection' and 'entity' fields " "allowed".format(p["slug"])) continue p["entity"] = {"type": p.pop("flow_collection")} if p["type"][-1] != ":": p["type"] += ":" if "category" in p and not p["category"].endswith(":"): p["category"] += ":" for field in ["input", "output"]: for schema, _, _ in iterate_schema( {}, p[field] if field in p else {}): if not schema["type"][-1].endswith(":"): schema["type"] += ":" # TODO: Check if schemas validate with our JSON meta schema and Processor model docs. if not self.valid(p, PROCESSOR_SCHEMA): continue if "entity" in p: if "type" not in p["entity"]: self.stderr.write( "Skip process {}: 'entity.type' required if 'entity' defined" .format(p["slug"])) continue if "input" in p["entity"] and p["entity"].get( "always_create", False): self.stderr.write( "Skip process {}: 'entity.input' will not be considered if 'entity.always_create' " "is set to true.".format(p["slug"])) continue p["entity_type"] = p["entity"]["type"] p["entity_descriptor_schema"] = p["entity"].get( "descriptor_schema", p["entity_type"]) p["entity_input"] = p["entity"].get("input", None) p["entity_always_create"] = p["entity"].get( "always_create", False) p.pop("entity") if not DescriptorSchema.objects.filter( slug=p["entity_descriptor_schema"]).exists(): self.stderr.write( "Skip processor {}: Unknown descriptor schema '{}' used in 'entity' " "field.".format(p["slug"], p["entity_descriptor_schema"])) continue if "persistence" in p: persistence_mapping = { "RAW": Process.PERSISTENCE_RAW, "CACHED": Process.PERSISTENCE_CACHED, "TEMP": Process.PERSISTENCE_TEMP, } p["persistence"] = persistence_mapping[p["persistence"]] if "scheduling_class" in p: scheduling_class_mapping = { "interactive": Process.SCHEDULING_CLASS_INTERACTIVE, "batch": Process.SCHEDULING_CLASS_BATCH, } p["scheduling_class"] = scheduling_class_mapping[ p["scheduling_class"]] if "input" in p: p["input_schema"] = p.pop("input") if "output" in p: p["output_schema"] = p.pop("output") slug = p["slug"] if "run" in p: # Set default language to 'bash' if not set. p["run"].setdefault("language", "bash") # Transform output schema using the execution engine. try: execution_engine = manager.get_execution_engine( p["run"]["language"]) extra_output_schema = execution_engine.get_output_schema(p) if extra_output_schema: p.setdefault("output_schema", []).extend(extra_output_schema) except InvalidEngineError: self.stderr.write( "Skip processor {}: execution engine '{}' not supported" .format(slug, p["run"]["language"])) continue # Validate if container image is allowed based on the configured pattern. # NOTE: This validation happens here and is not deferred to executors because the idea # is that this will be moved to a "container" requirement independent of the # executor. if hasattr(settings, "FLOW_CONTAINER_VALIDATE_IMAGE"): try: container_image = dict_dot( p, "requirements.executor.docker.image") if not re.match(settings.FLOW_CONTAINER_VALIDATE_IMAGE, container_image): self.stderr.write( "Skip processor {}: container image does not match '{}'" .format( slug, settings.FLOW_CONTAINER_VALIDATE_IMAGE, )) continue except KeyError: pass version = p["version"] int_version = convert_version_string_to_int( version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = Process.objects.filter(slug=slug).aggregate( Max("version"))["version__max"] if latest_version is not None and latest_version > int_version: self.stderr.write( "Skip processor {}: newer version installed".format(slug)) continue previous_process_qs = Process.objects.filter(slug=slug) if previous_process_qs.exists(): previous_process = previous_process_qs.latest() else: previous_process = None process_query = Process.objects.filter(slug=slug, version=version) if process_query.exists(): if not force: if verbosity > 0: self.stdout.write( "Skip processor {}: same version installed".format( slug)) continue process_query.update(**p) log_processors.append("Updated {}".format(slug)) else: process = Process.objects.create(contributor=user, **p) assign_contributor_permissions(process) if previous_process: copy_permissions(previous_process, process) log_processors.append("Inserted {}".format(slug)) if verbosity > 0: if log_processors: self.stdout.write("Processor Updates:") for log in log_processors: self.stdout.write(" {}".format(log)) if log_templates: self.stdout.write("Default Template Updates:") for log in log_templates: self.stdout.write(" {}".format(log))
def register_processes(self, process_schemas, user, force=False, verbosity=1): """Read and register processors.""" log_processors = [] log_templates = [] for p in process_schemas: if p['type'][-1] != ':': p['type'] += ':' if 'category' in p and not p['category'].endswith(':'): p['category'] += ':' # get `data_name` from `static` if 'static' in p: for schema, _, _ in iterate_schema({}, p['static']): if schema['name'] == 'name' and 'default' in schema: p['data_name'] = schema['default'] # support backward compatibility # TODO: update .yml files and remove if 'slug' not in p: p['slug'] = slugify(p.pop('name').replace(':', '-')) p['name'] = p.pop('label') p.pop('var', None) p.pop('static', None) for field in ['input', 'output', 'var', 'static']: for schema, _, _ in iterate_schema( {}, p[field] if field in p else {}): if not schema['type'][-1].endswith(':'): schema['type'] += ':' # TODO: Check if schemas validate with our JSON meta schema and Processor model docs. if not self.valid(p, PROCESSOR_SCHEMA): continue if 'persistence' in p: persistence_mapping = { 'RAW': Process.PERSISTENCE_RAW, 'CACHED': Process.PERSISTENCE_CACHED, 'TEMP': Process.PERSISTENCE_TEMP, } p['persistence'] = persistence_mapping[p['persistence']] if 'input' in p: p['input_schema'] = p.pop('input') if 'output' in p: p['output_schema'] = p.pop('output') slug = p['slug'] if 'run' in p: # Set default language to 'bash' if not set. p['run'].setdefault('language', 'bash') # Transform output schema using the execution engine. try: execution_engine = manager.get_execution_engine( p['run']['language']) extra_output_schema = execution_engine.get_output_schema(p) if extra_output_schema: p.setdefault('output_schema', []).extend(extra_output_schema) except InvalidEngineError: self.stderr.write( "Skip processor {}: execution engine '{}' not supported" .format(slug, p['run']['language'])) continue version = p['version'] int_version = convert_version_string_to_int( version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = Process.objects.filter(slug=slug).aggregate( Max('version'))['version__max'] if latest_version is not None and latest_version > int_version: self.stderr.write( "Skip processor {}: newer version installed".format(slug)) continue previous_process_qs = Process.objects.filter(slug=slug) if previous_process_qs.exists(): previous_process = previous_process_qs.latest() else: previous_process = None process_query = Process.objects.filter(slug=slug, version=version) if process_query.exists(): if not force: if verbosity > 0: self.stdout.write( "Skip processor {}: same version installed".format( slug)) continue process_query.update(**p) log_processors.append("Updated {}".format(slug)) else: process = Process.objects.create(contributor=user, **p) if previous_process: copy_permissions(previous_process, process) log_processors.append("Inserted {}".format(slug)) if verbosity > 0: if len(log_processors) > 0: self.stdout.write("Processor Updates:") for log in log_processors: self.stdout.write(" {}".format(log)) if len(log_templates) > 0: self.stdout.write("Default Template Updates:") for log in log_templates: self.stdout.write(" {}".format(log))
def register_processes(self, process_schemas, user, force=False): """Read and register processors.""" log_processors = [] log_templates = [] for p in process_schemas: if p['type'][-1] != ':': p['type'] += ':' if 'category' in p and not p['category'].endswith(':'): p['category'] += ':' # get `data_name` from `static` if 'static' in p: for schema, _, _ in iterate_schema({}, p['static']): if schema['name'] == 'name' and 'default' in schema: p['data_name'] = schema['default'] # support backward compatibility # TODO: update .yml files and remove if 'slug' not in p: p['slug'] = slugify(p.pop('name').replace(':', '-')) p['name'] = p.pop('label') p.pop('var', None) p.pop('static', None) for field in ['input', 'output', 'var', 'static']: for schema, _, _ in iterate_schema({}, p[field] if field in p else {}): if not schema['type'][-1].endswith(':'): schema['type'] += ':' # TODO: Check if schemas validate with our JSON meta schema and Processor model docs. if not self.valid(p, PROCESSOR_SCHEMA): continue if 'persistence' in p: persistence_mapping = { 'RAW': Process.PERSISTENCE_RAW, 'CACHED': Process.PERSISTENCE_CACHED, 'TEMP': Process.PERSISTENCE_TEMP, } p['persistence'] = persistence_mapping[p['persistence']] if 'input' in p: p['input_schema'] = p.pop('input') if 'output' in p: p['output_schema'] = p.pop('output') slug = p['slug'] version = p['version'] int_version = convert_version_string_to_int(version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = Process.objects.filter(slug=slug).aggregate(Max('version'))['version__max'] if latest_version is not None and latest_version > int_version: self.stderr.write("Skip processor {}: newer version installed".format(slug)) continue process_query = Process.objects.filter(slug=slug, version=version) if process_query.exists(): if not force: self.stdout.write("Skip processor {}: same version installed".format(slug)) continue process_query.update(**p) log_processors.append("Updated {}".format(slug)) else: Process.objects.create(contributor=user, **p) log_processors.append("Inserted {}".format(slug)) if len(log_processors) > 0: self.stdout.write("Processor Updates:") for log in log_processors: self.stdout.write(" {}".format(log)) if len(log_templates) > 0: self.stdout.write("Default Template Updates:") for log in log_templates: self.stdout.write(" {}".format(log))
def test_to_python_numeric(self): field = forms.VersionField() self.assertEqual( Version("1.2.3", DEFAULT_NUMBER_BITS), field.to_python( convert_version_string_to_int("1.2.3", DEFAULT_NUMBER_BITS)))
def register_processes(self, process_schemas, user, force=False, verbosity=1): """Read and register processors.""" log_processors = [] log_templates = [] for p in process_schemas: if p['type'][-1] != ':': p['type'] += ':' if 'category' in p and not p['category'].endswith(':'): p['category'] += ':' for field in ['input', 'output']: for schema, _, _ in iterate_schema( {}, p[field] if field in p else {}): if not schema['type'][-1].endswith(':'): schema['type'] += ':' # TODO: Check if schemas validate with our JSON meta schema and Processor model docs. if not self.valid(p, PROCESSOR_SCHEMA): continue if 'persistence' in p: persistence_mapping = { 'RAW': Process.PERSISTENCE_RAW, 'CACHED': Process.PERSISTENCE_CACHED, 'TEMP': Process.PERSISTENCE_TEMP, } p['persistence'] = persistence_mapping[p['persistence']] if 'scheduling_class' in p: scheduling_class_mapping = { 'interactive': Process.SCHEDULING_CLASS_INTERACTIVE, 'batch': Process.SCHEDULING_CLASS_BATCH } p['scheduling_class'] = scheduling_class_mapping[ p['scheduling_class']] if 'input' in p: p['input_schema'] = p.pop('input') if 'output' in p: p['output_schema'] = p.pop('output') slug = p['slug'] if 'run' in p: # Set default language to 'bash' if not set. p['run'].setdefault('language', 'bash') # Transform output schema using the execution engine. try: execution_engine = manager.get_execution_engine( p['run']['language']) extra_output_schema = execution_engine.get_output_schema(p) if extra_output_schema: p.setdefault('output_schema', []).extend(extra_output_schema) except InvalidEngineError: self.stderr.write( "Skip processor {}: execution engine '{}' not supported" .format(slug, p['run']['language'])) continue # Validate if container image is allowed based on the configured pattern. # NOTE: This validation happens here and is not deferred to executors because the idea # is that this will be moved to a "container" requirement independent of the # executor. if hasattr(settings, 'FLOW_CONTAINER_VALIDATE_IMAGE'): try: container_image = dict_dot( p, 'requirements.executor.docker.image') if not re.match(settings.FLOW_CONTAINER_VALIDATE_IMAGE, container_image): self.stderr.write( "Skip processor {}: container image does not match '{}'" .format( slug, settings.FLOW_CONTAINER_VALIDATE_IMAGE, )) continue except KeyError: pass version = p['version'] int_version = convert_version_string_to_int( version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = Process.objects.filter(slug=slug).aggregate( Max('version'))['version__max'] if latest_version is not None and latest_version > int_version: self.stderr.write( "Skip processor {}: newer version installed".format(slug)) continue previous_process_qs = Process.objects.filter(slug=slug) if previous_process_qs.exists(): previous_process = previous_process_qs.latest() else: previous_process = None process_query = Process.objects.filter(slug=slug, version=version) if process_query.exists(): if not force: if verbosity > 0: self.stdout.write( "Skip processor {}: same version installed".format( slug)) continue process_query.update(**p) log_processors.append("Updated {}".format(slug)) else: process = Process.objects.create(contributor=user, **p) assign_contributor_permissions(process) if previous_process: copy_permissions(previous_process, process) log_processors.append("Inserted {}".format(slug)) if verbosity > 0: if log_processors: self.stdout.write("Processor Updates:") for log in log_processors: self.stdout.write(" {}".format(log)) if log_templates: self.stdout.write("Default Template Updates:") for log in log_templates: self.stdout.write(" {}".format(log))
def get_assets_query(appslug=None, app=None, cslug=None, component=None, tslug=None, tags=None, follower=None, verstring=None, version=None, asset_version=None, license=None, asset_title=None, author=None, original_author=None, order_by='-pub_date', **kwargs): qry = Q() auto_title = [] if appslug is not None: app = get_object_or_404(Application, slug=appslug) if app is not None: auto_title.append(_("application {}").format(app.title)) qry = qry & Q(application=app) if cslug is not None and app is not None: component = get_object_or_404(Component, application=app, slug=cslug) if component is not None: auto_title.append(_("component {}").format(component.title)) qry = qry & Q(component=component) if tslug is not None: tag = get_object_or_404(Tag, slug=tslug) auto_title.append(_("tag {}").format(tag.name)) qry = qry & Q(tags=tag) if tags is not None: tags_title = ", ".join([tag.name for tag in tags]) auto_title.append(_("tags {}").format(tags_title)) qry = qry & Q(tags__in=tags) if asset_title: auto_title.append(_("title contains `{}'").format(asset_title)) qry = qry & Q(title__icontains=asset_title) if author is not None: auto_title.append(_("author is {}").format(author.get_full_name())) qry = qry & Q(author=author) if original_author: auto_title.append(_("original author {}").format(original_author)) qry = qry & Q(original_author__icontains=original_author) if license is not None: auto_title.append(_("license {}").format(license)) qry = qry & Q(license=license) if follower is not None: qry = qry & Q(author__follower=follower.profile) auto_title.append( _("from users followed by {}").format(follower.get_full_name())) if verstring is not None and app is not None: try: version = convert_version_string_to_int(verstring, [8, 8, 8, 8]) except (ValueError, NotImplementedError): raise Http404 if version is not None and app is not None: if not verstring: verstring = str(version) auto_title.append( _("compatible with application version {}").format(version)) qry = qry & ( Q(application=app) & (Q(app_version_min__lte=verstring) | Q(app_version_min=None)) & (Q(app_version_max__gte=verstring) | Q(app_version_max=None))) if asset_version is not None: auto_title.append(_("version is {}").format(asset_version)) qry = qry & Q(version=asset_version) return qry, auto_title
def register_processes(self, process_schemas, user, force=False, verbosity=1): """Read and register processors.""" log_processors = [] log_templates = [] for p in process_schemas: # TODO: Remove this when all processes are migrated to the # new syntax. if 'flow_collection' in p: if 'entity' in p: self.stderr.write( "Skip processor {}: only one of 'flow_collection' and 'entity' fields " "allowed".format(p['slug']) ) continue p['entity'] = {'type': p.pop('flow_collection')} if p['type'][-1] != ':': p['type'] += ':' if 'category' in p and not p['category'].endswith(':'): p['category'] += ':' for field in ['input', 'output']: for schema, _, _ in iterate_schema({}, p[field] if field in p else {}): if not schema['type'][-1].endswith(':'): schema['type'] += ':' # TODO: Check if schemas validate with our JSON meta schema and Processor model docs. if not self.valid(p, PROCESSOR_SCHEMA): continue if 'entity' in p: if 'type' not in p['entity']: self.stderr.write( "Skip process {}: 'entity.type' required if 'entity' defined".format(p['slug']) ) continue p['entity_type'] = p['entity']['type'] p['entity_descriptor_schema'] = p['entity'].get('descriptor_schema', p['entity_type']) p['entity_input'] = p['entity'].get('input', None) p.pop('entity') if not DescriptorSchema.objects.filter(slug=p['entity_descriptor_schema']).exists(): self.stderr.write( "Skip processor {}: Unknown descriptor schema '{}' used in 'entity' " "field.".format(p['slug'], p['entity_descriptor_schema']) ) continue if 'persistence' in p: persistence_mapping = { 'RAW': Process.PERSISTENCE_RAW, 'CACHED': Process.PERSISTENCE_CACHED, 'TEMP': Process.PERSISTENCE_TEMP, } p['persistence'] = persistence_mapping[p['persistence']] if 'scheduling_class' in p: scheduling_class_mapping = { 'interactive': Process.SCHEDULING_CLASS_INTERACTIVE, 'batch': Process.SCHEDULING_CLASS_BATCH } p['scheduling_class'] = scheduling_class_mapping[p['scheduling_class']] if 'input' in p: p['input_schema'] = p.pop('input') if 'output' in p: p['output_schema'] = p.pop('output') slug = p['slug'] if 'run' in p: # Set default language to 'bash' if not set. p['run'].setdefault('language', 'bash') # Transform output schema using the execution engine. try: execution_engine = manager.get_execution_engine(p['run']['language']) extra_output_schema = execution_engine.get_output_schema(p) if extra_output_schema: p.setdefault('output_schema', []).extend(extra_output_schema) except InvalidEngineError: self.stderr.write("Skip processor {}: execution engine '{}' not supported".format( slug, p['run']['language'] )) continue # Validate if container image is allowed based on the configured pattern. # NOTE: This validation happens here and is not deferred to executors because the idea # is that this will be moved to a "container" requirement independent of the # executor. if hasattr(settings, 'FLOW_CONTAINER_VALIDATE_IMAGE'): try: container_image = dict_dot(p, 'requirements.executor.docker.image') if not re.match(settings.FLOW_CONTAINER_VALIDATE_IMAGE, container_image): self.stderr.write("Skip processor {}: container image does not match '{}'".format( slug, settings.FLOW_CONTAINER_VALIDATE_IMAGE, )) continue except KeyError: pass version = p['version'] int_version = convert_version_string_to_int(version, VERSION_NUMBER_BITS) # `latest version` is returned as `int` so it has to be compared to `int_version` latest_version = Process.objects.filter(slug=slug).aggregate(Max('version'))['version__max'] if latest_version is not None and latest_version > int_version: self.stderr.write("Skip processor {}: newer version installed".format(slug)) continue previous_process_qs = Process.objects.filter(slug=slug) if previous_process_qs.exists(): previous_process = previous_process_qs.latest() else: previous_process = None process_query = Process.objects.filter(slug=slug, version=version) if process_query.exists(): if not force: if verbosity > 0: self.stdout.write("Skip processor {}: same version installed".format(slug)) continue process_query.update(**p) log_processors.append("Updated {}".format(slug)) else: process = Process.objects.create(contributor=user, **p) assign_contributor_permissions(process) if previous_process: copy_permissions(previous_process, process) log_processors.append("Inserted {}".format(slug)) if verbosity > 0: if log_processors: self.stdout.write("Processor Updates:") for log in log_processors: self.stdout.write(" {}".format(log)) if log_templates: self.stdout.write("Default Template Updates:") for log in log_templates: self.stdout.write(" {}".format(log))