def validate(self, attrs): if attrs['addon'].premium_type in amo.ADDON_FREES: raise ValidationError('App must be a premium app.') return attrs
def validate_league(self, league): if league in self.context['request'].user.leagues.accepted(): return league else: raise ValidationError( "League can only be specified on creation/must be with a owned league")
def validate_email(self, value): email = value user_qs = DriverUser.objects.filter(email=email) if user_qs.exists(): raise ValidationError("Email alredy registred") return value
def __call__(self, attrs: dict): archiefprocedure = attrs.get(self.archiefprocedure_field) if archiefprocedure is None: return afleidingswijze = archiefprocedure["afleidingswijze"] mapping = { Afleidingswijze.afgehandeld: { "procestermijn": False, "datumkenmerk": False, "einddatum_bekend": False, "objecttype": False, "registratie": False, }, Afleidingswijze.ander_datumkenmerk: { "procestermijn": False, "datumkenmerk": True, "objecttype": True, "registratie": True, }, Afleidingswijze.eigenschap: { "procestermijn": False, "datumkenmerk": True, "objecttype": False, "registratie": False, }, Afleidingswijze.gerelateerde_zaak: { "procestermijn": False, "datumkenmerk": False, "objecttype": False, "registratie": False, }, Afleidingswijze.hoofdzaak: { "procestermijn": False, "datumkenmerk": False, "objecttype": False, "registratie": False, }, Afleidingswijze.ingangsdatum_besluit: { "procestermijn": False, "datumkenmerk": False, "objecttype": False, "registratie": False, }, Afleidingswijze.termijn: { "procestermijn": True, "datumkenmerk": False, "einddatum_bekend": False, "objecttype": False, "registratie": False, }, Afleidingswijze.vervaldatum_besluit: { "procestermijn": False, "datumkenmerk": False, "objecttype": False, "registratie": False, }, Afleidingswijze.zaakobject: { "procestermijn": False, "datumkenmerk": True, "objecttype": True, "registratie": False, }, } error, empty, required = validate_brondatumarchiefprocedure( archiefprocedure, mapping[afleidingswijze]) if error: error_dict = {} for fieldname in empty: error_dict.update({ f"{self.archiefprocedure_field}.{fieldname}": ErrorDetail(self.empty_message.format(afleidingswijze), self.empty_code) }) for fieldname in required: error_dict.update({ f"{self.archiefprocedure_field}.{fieldname}": ErrorDetail( self.required_message.format(afleidingswijze), self.required_code, ) }) raise ValidationError(error_dict)
def validate(self, data): if 'options' not in data and 'items' not in data: raise ValidationError( 'options field is required or items is required') return super(MenuScreenSerializer, self).validate(data)
def validate_newPassword(self, password): if len(password) < 6: raise ValidationError('Password must be at least 6 characters') return password
def _parse_links(self, links): """ Parse link information supplied by the user. We expect links to come to us in a format that looks like: "links": { "algebra_problem_bank": { "bundle_uuid": "408d549c-2ebf-4bae-9350-d72109a54163", "version": 1 }, "link_to_delete": None } Once we have this information, we need to verify that the linked Bundles actually exist, and then return a dict of link names to direct Dependencies. """ names_to_dependencies = {} for name, bv_info in links.items(): # If bv_info is None, we want to delete this Link (set to None) if name and bv_info is None: names_to_dependencies[name] = None continue # Check that our fields exist. if 'bundle_uuid' not in bv_info: raise ValidationError( f"Link {name} has no 'bundle_uuid' specified.") if 'version' not in bv_info: raise ValidationError( f"Link {name} has no 'version' specified.") # Check that our field values make sense (proper types). if not isinstance(name, str): raise ValidationError(f"{name} is not a valid Link name.") version = bv_info['version'] # Python's bool is a subclass of int if (not isinstance(version, int)) or isinstance(version, bool): raise ValidationError( f"Link {name}: {version} must be an integer.") try: bundle_uuid_str = bv_info['bundle_uuid'] bundle_uuid = uuid.UUID(bundle_uuid_str) except ValueError as err: raise ValidationError( f"Link {name}: {bundle_uuid_str} is not a valid UUID." ) from err # At this point it's syntactically correct, but it might be pointing # to a BundleVersion that doesn't really exist. bundle_version = BundleVersion.get_bundle_version( bundle_uuid=bundle_uuid, version_num=version, ) if not bundle_version: raise ValidationError( "BundleVersion ({}, {}) referenced in Link {} does not exist." .format(bundle_uuid, version, name)) # If everything checks out, create a Dependency. We can't make a # Link yet because we don't know the indirect Dependencies (i.e. # this Dependency's dependencies). names_to_dependencies[name] = Dependency( bundle_uuid=bundle_uuid, version=version, snapshot_digest=bundle_version.snapshot_digest_bytes, ) return names_to_dependencies
def validate_with_items(self, value, loop_value='with_dict'): if value is None and self.custom_data.get(loop_value, None) is None: raise ValidationError("with_items or with_dict field is required") return value
def validate(self, data): email = data['email'] user_qs = User.objects.filter(email=email) if user_qs.exists(): raise ValidationError("This user has already registered") return data
def validate(self, val): if "|" in val["site"]: raise ValidationError("Site name cannot contain the | character") return val
def validate_email(self, value): email = value user = User.objects.filter(email=email) if user.exists(): raise ValidationError("This email is already registered.") return value
def deve_ter_extensao_valida(nome: str): if nome.split('.')[len(nome.split('.')) - 1] not in ['doc', 'docx', 'pdf', 'png', 'jpg', 'jpeg']: raise ValidationError('Extensão inválida') return nome
def import_repository_version(importer_pk, destination_repo_pk, source_repo_name, tar_path): """ Import a repository version from a Pulp export. Args: importer_pk (str): Importer we are working with destination_repo_pk (str): Primary key of Repository to import into. source_repo_name (str): Name of the Repository in the export. tar_path (str): A path to export tar. """ dest_repo = Repository.objects.get(pk=destination_repo_pk) importer = PulpImporter.objects.get(pk=importer_pk) pb = ProgressReport( message=f"Importing content for {dest_repo.name}", code="import.repo.version.content", state=TASK_STATES.RUNNING, ) pb.save() with tempfile.TemporaryDirectory() as temp_dir: # Extract the repo file for the repo info with tarfile.open(tar_path, "r:gz") as tar: tar.extract(REPO_FILE, path=temp_dir) with open(os.path.join(temp_dir, REPO_FILE), "r") as repo_data_file: data = json.load(repo_data_file) src_repo = next(repo for repo in data if repo["name"] == source_repo_name) if dest_repo.pulp_type != src_repo["pulp_type"]: raise ValidationError( _("Repository type mismatch: {src_repo} ({src_type}) vs {dest_repo} " "({dest_type}).").format( src_repo=src_repo["name"], src_type=src_repo["pulp_type"], dest_repo=dest_repo.name, dest_type=dest_repo.pulp_type, )) rv_name = "" # Extract the repo version files with tarfile.open(tar_path, "r:gz") as tar: for mem in tar.getmembers(): match = re.search( fr"(^repository-{source_repo_name}_[0-9]+)/.+", mem.name) if match: rv_name = match.group(1) tar.extract(mem, path=temp_dir) if not rv_name: raise ValidationError( _("No RepositoryVersion found for {}").format(rv_name)) rv_path = os.path.join(temp_dir, rv_name) # Content plugin_name = src_repo["pulp_type"].split(".")[0] cfg = get_plugin_config(plugin_name) resulting_content_ids = [] for res_class in cfg.exportable_classes: filename = f"{res_class.__module__}.{res_class.__name__}.json" a_result = _import_file(os.path.join(rv_path, filename), res_class, do_raise=False) # django import-export can have a problem with concurrent-imports that are # importing the same 'thing' (e.g., a Package that exists in two different # repo-versions that are being imported at the same time). We will try an import # that will simply record errors as they happen (rather than failing with an exception) # first. If errors happen, we'll do one retry before we give up on this repo-version's # import. if a_result.has_errors(): log.info( _("...{} import-errors encountered importing {} from {}, retrying" ).format(a_result.totals["error"], filename, rv_name)) # Second attempt, we allow to raise an exception on any problem. # This will either succeed, or log a fatal error and fail. try: a_result = _import_file(os.path.join(rv_path, filename), res_class) except Exception as e: # noqa log on ANY exception and then re-raise log.error( _("FATAL import-failure importing {} from {}").format( filename, rv_name)) raise resulting_content_ids.extend(row.object_id for row in a_result.rows if row.import_type in ("new", "update")) # Once all content exists, create the ContentArtifact links ca_path = os.path.join(rv_path, CA_FILE) _import_file(ca_path, ContentArtifactResource) # see if we have a content mapping mapping_path = f"{rv_name}/{CONTENT_MAPPING_FILE}" mapping = {} with tarfile.open(tar_path, "r:gz") as tar: if mapping_path in tar.getnames(): tar.extract(mapping_path, path=temp_dir) with open(os.path.join(temp_dir, mapping_path), "r") as mapping_file: mapping = json.load(mapping_file) if mapping: # use the content mapping to map content to repos for repo_name, content_ids in mapping.items(): repo = _destination_repo(importer, repo_name) content = Content.objects.filter(upstream_id__in=content_ids) with repo.new_version() as new_version: new_version.set_content(content) else: # just map all the content to our destination repo content = Content.objects.filter(pk__in=resulting_content_ids) with dest_repo.new_version() as new_version: new_version.set_content(content) content_count = content.count() pb.total = content_count pb.done = content_count pb.state = TASK_STATES.COMPLETED pb.save() gpr = TaskGroup.current().group_progress_reports.filter( code="import.repo.versions") gpr.update(done=F("done") + 1)