def clean_deck_order(self): """ Cleans and validates the JSON POSTed in the deck_order field. This field describes how decks should be sorted in the collection. Errors are manually added to the errorlist because this is a custom field. """ field = 'deck_order' deck_data = [] errstr = '' errors = ErrorList() if field in self.data: deck_order = json.loads(self.data[field]) if 'data' in deck_order: deck_data = deck_order['data'] for d in deck_data: if ('deck_id' in d and 'sort_order' in d): try: int(d['sort_order']) except ValueError: errstr = "deck %s has invalid sort value: %s" % (d['deck_id'], d['sort_order']) errors.append(errstr) else: errstr = "deck_id and sort_order required" errors.append(errstr) break if errors: self._errors.setdefault(field, errors) raise forms.ValidationError("Deck order field has errors") self.cleaned_data['deck_order'] = deck_data
def clean_deck_order(self): """ Cleans and validates the JSON POSTed in the deck_order field. This field describes how decks should be sorted in the collection. Errors are manually added to the errorlist because this is a custom field. """ field = 'deck_order' deck_data = [] errstr = '' errors = ErrorList() if field in self.data: deck_order = json.loads(self.data[field]) if 'data' in deck_order: deck_data = deck_order['data'] for d in deck_data: if ('deck_id' in d and 'sort_order' in d): try: int(d['sort_order']) except ValueError: errstr = "deck %s has invalid sort value: %s" % ( d['deck_id'], d['sort_order']) errors.append(errstr) else: errstr = "deck_id and sort_order required" errors.append(errstr) break if errors: self._errors.setdefault(field, errors) raise forms.ValidationError("Deck order field has errors") self.cleaned_data['deck_order'] = deck_data
def account_login(request): if request.user.is_authenticated(): return redirect('home') else: if request.POST: login_form = LoginForm(request.POST) if login_form.is_valid(): users = UserAccount.objects.filter(email=login_form.cleaned_data['email'].lower()) if len(users) > 0: user = authenticate(email=users[0].email, password=login_form.cleaned_data['password']) if user is not None: if user.is_active: login(request, user) return HttpResponseRedirect(request.POST['next']) else: # user does not authenticate errors = ErrorList() errors = login_form._errors.setdefault(forms.NON_FIELD_ERRORS, errors) errors.append('The password for this account is incorrect.') else: # user doesn't exist errors = ErrorList() errors = login_form._errors.setdefault(forms.NON_FIELD_ERRORS, errors) errors.append('There is no account registered with this e-mail address.') else: login_form = LoginForm() return render(request, 'general/login.html', {'form': login_form, 'next': request.GET['next'] if request.GET and 'next' in request.GET.keys() else None})
def clean(self, value): result = [] errors = [] non_block_errors = ErrorList() for child_val in value: try: result.append(self.child_block.clean(child_val)) except ValidationError as e: errors.append(ErrorList([e])) else: errors.append(None) if self.meta.min_num is not None and self.meta.min_num > len(value): non_block_errors.append( ValidationError( _('The minimum number of items is %d') % self.meta.min_num)) if self.meta.max_num is not None and self.meta.max_num < len(value): non_block_errors.append( ValidationError( _('The maximum number of items is %d') % self.meta.max_num)) if any(errors) or non_block_errors: raise ListBlockValidationError(block_errors=errors, non_block_errors=non_block_errors) return result
def clean(self, value): cleaned_data = [] errors = {} non_block_errors = ErrorList() for i, child in enumerate(value): # child is a StreamChild instance try: cleaned_data.append((child.block.name, child.block.clean(child.value), child.id)) except ValidationError as e: errors[i] = ErrorList([e]) if self.meta.min_num is not None and self.meta.min_num > len(value): non_block_errors.append( ValidationError( _("The minimum number of items is %d") % self.meta.min_num)) elif self.required and len(value) == 0: non_block_errors.append( ValidationError(_("This field is required."))) if self.meta.max_num is not None and self.meta.max_num < len(value): non_block_errors.append( ValidationError( _("The maximum number of items is %d") % self.meta.max_num)) if self.meta.block_counts: block_counts = defaultdict(int) for item in value: block_counts[item.block_type] += 1 for block_name, min_max in self.meta.block_counts.items(): block = self.child_blocks[block_name] max_num = min_max.get("max_num", None) min_num = min_max.get("min_num", None) block_count = block_counts[block_name] if min_num is not None and min_num > block_count: non_block_errors.append( ValidationError("{}: {}".format( block.label, _("The minimum number of items is %d") % min_num, ))) if max_num is not None and max_num < block_count: non_block_errors.append( ValidationError("{}: {}".format( block.label, _("The maximum number of items is %d") % max_num, ))) if errors or non_block_errors: # The message here is arbitrary - outputting error messages is delegated to the child blocks, # which only involves the 'params' list raise StreamBlockValidationError(block_errors=errors, non_block_errors=non_block_errors) return StreamValue(self, cleaned_data)
def clean(self, value): """ Validates every value in the given list. A value is validated against the corresponding Field in self.fields. For example, if this MultiValueField was instantiated with fields=(DateField(), TimeField()), clean() would call DateField.clean(value[0]) and TimeField.clean(value[1]). """ clean_data = [] errors = ErrorList() if not value or isinstance(value, (list, tuple)): if not value or not [ v for v in value if v not in self.empty_values ]: if self.required: raise ValidationError(self.error_messages['required'], code='required') else: return self.compress([]) else: raise ValidationError(self.error_messages['invalid'], code='invalid') for i, field in enumerate(self.fields): try: field_value = value[i] except IndexError: field_value = None if field_value in self.empty_values: if self.require_all_fields: # Raise a 'required' error if the MultiValueField is # required and any field is empty. if self.required: raise ValidationError(self.error_messages['required'], code='required') elif field.required: # Otherwise, add an 'incomplete' error to the list of # collected errors and skip field cleaning, if a required # field is empty. if field.error_messages['incomplete'] not in errors: errors.append(field.error_messages['incomplete']) continue try: clean_data.append(field.clean(field_value)) except ValidationError as e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. Skip duplicates. errors.extend(m for m in e.error_list if m not in errors) if errors: raise ValidationError(errors) out = self.compress(clean_data) self.validate(out) self.run_validators(out) return out
def form_valid(self, form): if not self.request.session.test_cookie_worked(): error = ErrorList() error.append("You must allow cookies from python.org to proceed.") form._errors.setdefault("__all__", error) return self.form_invalid(form) response = super().form_valid(form) self._set_form_data_cookie(form, response) return response
def clean(self): clean = super(FeedBackForm, self).clean() if 'email1' in self.cleaned_data and 'email2' in self.cleaned_data and \ self.cleaned_data['email1'] != self.cleaned_data['email2']: if "email2" not in self._errors: errlst = ErrorList() self._errors["email2"] = errlst else: errlst = self._errors["email2"] errlst.append(u"The Emails did not match") return clean
def login_view(request): """ Standard Django login, with additions: Lowercase the login email (username) Check user has accepted ToS, if any. """ if request.method == "POST": redirect_to = request.POST.get('next', request.GET.get('next', False)) if not redirect_to: redirect_to = reverse('seed:home') form = LoginForm(request.POST) if form.is_valid(): new_user = authenticate( username=form.cleaned_data['email'].lower(), password=form.cleaned_data['password'] ) if new_user and new_user.is_active: # determine if user has accepted ToS, if one exists try: user_accepted_tos = has_user_agreed_latest_tos(new_user) except NoActiveTermsOfService: # there's no active ToS, skip interstitial user_accepted_tos = True if user_accepted_tos: login(request, new_user) return HttpResponseRedirect(redirect_to) else: # store login info for django-tos to handle request.session['tos_user'] = new_user.pk request.session['tos_backend'] = new_user.backend context = RequestContext(request) context.update({ 'next': redirect_to, 'tos': TermsOfService.objects.get_current_tos() }) return render_to_response( 'tos/tos_check.html', context_instance=context ) else: errors = ErrorList() errors = form._errors.setdefault(NON_FIELD_ERRORS, errors) errors.append('Username and/or password were invalid.') else: form = LoginForm() return render_to_response( 'landing/login.html', locals(), context_instance=RequestContext(request), )
def login_view(request): """ Standard Django login, with additions: Lowercase the login email (username) Check user has accepted ToS, if any. """ if request.method == "POST": redirect_to = request.POST.get('next', request.GET.get('next', False)) if not redirect_to: redirect_to = reverse('seed:home') form = LoginForm(request.POST) if form.is_valid(): new_user = authenticate( username=form.cleaned_data['email'].lower(), password=form.cleaned_data['password']) if new_user and new_user.is_active: # determine if user has accepted ToS, if one exists try: user_accepted_tos = has_user_agreed_latest_tos(new_user) except NoActiveTermsOfService: # there's no active ToS, skip interstitial user_accepted_tos = True if user_accepted_tos: login(request, new_user) return HttpResponseRedirect(redirect_to) else: # store login info for django-tos to handle request.session['tos_user'] = new_user.pk request.session['tos_backend'] = new_user.backend context = RequestContext(request) context.update({ 'next': redirect_to, 'tos': TermsOfService.objects.get_current_tos() }) return render_to_response('tos/tos_check.html', context_instance=context) else: errors = ErrorList() errors = form._errors.setdefault(NON_FIELD_ERRORS, errors) errors.append('Username and/or password were invalid.') else: form = LoginForm() return render_to_response( 'landing/login.html', locals(), context_instance=RequestContext(request), )
def clean(self, value): """ Validates every value in the given list. A value is validated against the corresponding Field in self.fields. For example, if this MultiValueField was instantiated with fields=(DateField(), TimeField()), clean() would call DateField.clean(value[0]) and TimeField.clean(value[1]). """ clean_data = [] errors = ErrorList() if not value or isinstance(value, (list, tuple)): if not value or not [v for v in value if v not in self.empty_values]: if self.required: raise ValidationError(self.error_messages['required'], code='required') else: return self.compress([]) else: raise ValidationError(self.error_messages['invalid'], code='invalid') for i, field in enumerate(self.fields): try: field_value = value[i] except IndexError: field_value = None if field_value in self.empty_values: if self.require_all_fields: # Raise a 'required' error if the MultiValueField is # required and any field is empty. if self.required: raise ValidationError(self.error_messages['required'], code='required') elif field.required: # Otherwise, add an 'incomplete' error to the list of # collected errors and skip field cleaning, if a required # field is empty. if field.error_messages['incomplete'] not in errors: errors.append(field.error_messages['incomplete']) continue try: clean_data.append(field.clean(field_value)) except ValidationError as e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. Skip duplicates. errors.extend(m for m in e.error_list if m not in errors) if errors: raise ValidationError(errors) out = self.compress(clean_data) self.validate(out) self.run_validators(out) return out
def read_and_save_data(self, file_contents, site): errors = ErrorList() try: book = xlrd.open_workbook(file_contents=file_contents.read()) except IOError: errors.append(_("Something went wrong while reading the file.")) return errors sheet = book.sheets()[0] with transaction.atomic(): for row_id in range(0, sheet.nrows): data = sheet.row_values(row_id) old_path, redirect_link = data if old_path and redirect_link: if old_path.startswith('/') and redirect_link.startswith('/'): # Based on the wagtail.contrib.redirects form validation # https://github.com/wagtail/wagtail/blob/master/wagtail/contrib/redirects/forms.py#L34 _old_path = Redirect.normalise_path(old_path) duplicates = Redirect.objects.filter(old_path=_old_path, site=site) if duplicates: errors.append( _( "Row: {} - Skipped import: the old path is " "a duplicate of an earlier record.".format(row_id + 1) ) ) else: Redirect.objects.create(old_path=old_path, redirect_link=redirect_link, site=site) else: errors.append( _("Row: {} - The old path and new path, must both start with /".format(row_id + 1)) ) else: errors.append(_("Row: {} - The old path and new path, must both be filled in.".format(row_id + 1))) return errors
def perform_action(self): """ Create the folder """ errors = ErrorList() name = self.cleaned_data['folder_name'] try: os.mkdir(os.path.join(self.file_dir, name)) except FileExistsError: errors.append( format_html('Item named <i>{}</i> already exists', name)) except OSError: errors.append(format_html('Unable to create <i>{}</i>', name)) return 'Your folder has been created', errors
def clean(self, value): cleaned_data = [] errors = {} non_block_errors = ErrorList() for i, child in enumerate(value): # child is a StreamChild instance try: cleaned_data.append( (child.block.name, child.block.clean(child.value), child.id) ) except ValidationError as e: errors[i] = ErrorList([e]) if self.meta.min_num is not None and self.meta.min_num > len(value): non_block_errors.append(ValidationError( _('The minimum number of items is %d') % self.meta.min_num )) elif self.required and len(value) == 0: non_block_errors.append(ValidationError(_('This field is required.'))) if self.meta.max_num is not None and self.meta.max_num < len(value): non_block_errors.append(ValidationError( _('The maximum number of items is %d') % self.meta.max_num )) if self.meta.block_counts: block_counts = collections.defaultdict(int) for item in value: block_counts[item.block_type] += 1 for block_name, min_max in self.meta.block_counts.items(): block = self.child_blocks[block_name] max_num = min_max.get('max_num', None) min_num = min_max.get('min_num', None) block_count = block_counts[block_name] if min_num is not None and min_num > block_count: non_block_errors.append(ValidationError( '{}: {}'.format(block.label, _('The minimum number of items is %d') % min_num) )) if max_num is not None and max_num < block_count: non_block_errors.append(ValidationError( '{}: {}'.format(block.label, _('The maximum number of items is %d') % max_num) )) if errors or non_block_errors: # The message here is arbitrary - outputting error messages is delegated to the child blocks, # which only involves the 'params' list raise StreamBlockValidationError(block_errors=errors, non_block_errors=non_block_errors) return StreamValue(self, cleaned_data)
def perform_action(self): """ Upload the files """ errors = ErrorList() for file in self.files.getlist('file_field'): try: ProjectFiles().fput(self.file_dir, file) except FileExistsError: errors.append( format_html('Item named <i>{}</i> already exists', file.name)) except OSError: errors.append( format_html('Unable to upload <i>{}</i>', file.name)) return 'Your files have been uploaded', errors
def validate(self, value, errors=None): '''Performs validation of the StreamBlock.''' if errors is None: errors = {} non_block_errors = ErrorList() if self.meta.min_num is not None and self.meta.min_num > len(value): non_block_errors.append( ValidationError( _('The minimum number of items is %d') % self.meta.min_num)) elif self.required and len(value) == 0: non_block_errors.append( ValidationError(_('This field is required.'))) if self.meta.max_num is not None and self.meta.max_num < len(value): non_block_errors.append( ValidationError( _('The maximum number of items is %d') % self.meta.max_num)) if self.meta.block_counts: block_counts = collections.defaultdict(int) for item in value: block_counts[item.block_type] += 1 for block_name, min_max in self.meta.block_counts.items(): block = self.child_blocks[block_name] max_num = min_max.get('max_num', None) min_num = min_max.get('min_num', None) block_count = block_counts[block_name] if min_num is not None and min_num > block_count: non_block_errors.append( ValidationError('{}: {}'.format( block.label, _('The minimum number of items is %d') % min_num))) if max_num is not None and max_num < block_count: non_block_errors.append( ValidationError('{}: {}'.format( block.label, _('The maximum number of items is %d') % max_num))) if errors or non_block_errors: # The message here is arbitrary - outputting error messages is delegated to the child blocks, # which only involves the 'params' list raise StreamBlockValidationError(block_errors=errors, non_block_errors=non_block_errors)
def perform_action(self): """ Upload the files """ errors = ErrorList() for file in self.files.getlist('file_field'): try: utility.write_uploaded_file(file=file, overwrite=False, write_file_path=os.path.join( self.file_dir, file.name)) except FileExistsError: errors.append( format_html('Item named <i>{}</i> already exists', file.name)) except OSError: errors.append( format_html('Unable to upload <i>{}</i>', file.name)) return 'Your files have been uploaded', errors
def perform_action(self): """ Delete the items """ errors = ErrorList() for item in self.cleaned_data['items']: path = os.path.join(self.file_dir, item) try: utility.remove_items([path], ignore_missing=False) except OSError as e: if not os.path.exists(path): errors.append( format_html('Item named <i>{}</i> did not exist', item)) else: errors.append( format_html( 'Unable to delete <i>{}</i>', os.path.relpath(e.filename or path, self.file_dir))) return 'Your items have been deleted', errors
def perform_action(self): """ Move the items into the selected directory """ errors = ErrorList() dest = self.cleaned_data['destination_folder'] for item in self.cleaned_data['items']: path = os.path.join(self.file_dir, item) try: utility.move_items([path], self.dest_dir) except FileExistsError: errors.append( format_html( 'Item named <i>{}</i> already exists in <i>{}</i>', item, dest)) except OSError: if not os.path.exists(path): errors.append( format_html('Item named <i>{}</i> does not exist', item)) else: errors.append( format_html('Unable to move <i>{}</i> into <i>{}</i>', item, dest)) return 'Your items have been moved', errors
def read_and_save_data(self, file_contents, site): errors = ErrorList() try: book = xlrd.open_workbook(file_contents=file_contents.read()) except IOError: errors.append(_("Something went wrong while reading the file.")) return errors sheet = book.sheets()[0] with transaction.atomic(): for row_id in range(0, sheet.nrows): data = sheet.row_values(row_id) old_path = data[0] redirect_link = data[1] if old_path and redirect_link: if old_path.startswith('/') and redirect_link.startswith( '/'): Redirect.objects.get_or_create( old_path=old_path, redirect_link=redirect_link, site=site) else: errors.append( _("Row: {} - The old path and new path, must both start with /" .format(row_id + 1))) else: errors.append( _("Row: {} - The old path and new path, must both be filled in." .format(row_id + 1))) return errors
def clean(self, value): # value is expected to be a ListValue, but if it's been assigned through external code it might # be a plain list; normalise it to a ListValue if not isinstance(value, ListValue): value = ListValue(self, values=value) result = [] errors = [] non_block_errors = ErrorList() for bound_block in value.bound_blocks: try: result.append( ListValue.ListChild( self.child_block, self.child_block.clean(bound_block.value), id=bound_block.id, )) except ValidationError as e: errors.append(ErrorList([e])) else: errors.append(None) if self.meta.min_num is not None and self.meta.min_num > len(value): non_block_errors.append( ValidationError( _("The minimum number of items is %d") % self.meta.min_num)) if self.meta.max_num is not None and self.meta.max_num < len(value): non_block_errors.append( ValidationError( _("The maximum number of items is %d") % self.meta.max_num)) if any(errors) or non_block_errors: raise ListBlockValidationError(block_errors=errors, non_block_errors=non_block_errors) return ListValue(self, bound_blocks=result)
def clean_sub_fields(self, value): """'value' being the list of the values of the subfields, validate each subfield.""" clean_data = [] errors = ErrorList() # Remove the field corresponding to the SKIP_CHECK_NAME boolean field # if required. fields = self.fields if not self.skip_check else self.fields[:-1] for index, field in enumerate(fields): try: field_value = value[index] except IndexError: field_value = None # Set the field_value to the default value if not set. if field_value is None and field.initial not in (None, ""): field_value = field.initial # Check the field's 'required' field instead of the global # 'required' field to allow subfields to be required or not. if field.required and field_value in validators.EMPTY_VALUES: errors.append("%s: %s" % (field.label, self.error_messages["required"])) continue try: clean_data.append(field.clean(field_value)) except ValidationError as e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. errors.extend("%s: %s" % (field.label, message) for message in e.messages) if errors: raise ValidationError(errors) out = self.compress(clean_data) self.validate(out) return out
def perform_action(self): """ Rename the items """ errors = ErrorList() old_name = self.cleaned_data['items'][0] new_name = self.cleaned_data['new_name'] try: utility.rename_file(os.path.join(self.file_dir, old_name), os.path.join(self.file_dir, new_name)) except FileExistsError: errors.append( format_html('Item named <i>{}</i> already exists', new_name)) except FileNotFoundError: errors.append( format_html('Item named <i>{}</i> does not exist', old_name)) except OSError: errors.append( format_html('Unable to rename <i>{}</i> to <i>{}</i>', old_name, new_name)) return 'Your item has been renamed', errors
class ActiveProject(Metadata, UnpublishedProject, SubmissionInfo): """ The project used for submitting The submission_status field: - 0 : Not submitted - 10 : Submitting author submits. Awaiting editor assignment. - 20 : Editor assigned. Awaiting editor decision. - 30 : Revisions requested. Waiting for resubmission. Loops back to 20 when author resubmits. - 40 : Accepted. In copyedit stage. Awaiting editor to copyedit. - 50 : Editor completes copyedit. Awaiting authors to approve. - 60 : Authors approve copyedit. Ready for editor to publish """ submission_status = models.PositiveSmallIntegerField(default=0) # Max number of active submitting projects a user is allowed to have MAX_SUBMITTING_PROJECTS = 10 INDIVIDUAL_FILE_SIZE_LIMIT = 10 * 1024**3 # Where all the active project files are kept FILE_ROOT = os.path.join(ProjectFiles().file_root, 'active-projects') REQUIRED_FIELDS = ( # 0: Database ('title', 'abstract', 'background', 'methods', 'content_description', 'usage_notes', 'conflicts_of_interest', 'version', 'license', 'short_description'), # 1: Software ('title', 'abstract', 'background', 'content_description', 'usage_notes', 'installation', 'conflicts_of_interest', 'version', 'license', 'short_description'), # 2: Challenge ('title', 'abstract', 'background', 'methods', 'content_description', 'usage_notes', 'conflicts_of_interest', 'version', 'license', 'short_description'), # 3: Model ('title', 'abstract', 'background', 'methods', 'content_description', 'usage_notes', 'installation', 'conflicts_of_interest', 'version', 'license', 'short_description'), ) # Custom labels that don't match model field names LABELS = ( # 0: Database { 'content_description': 'Data Description' }, # 1: Software { 'content_description': 'Software Description', 'methods': 'Technical Implementation', 'installation': 'Installation and Requirements' }, # 2: Challenge { 'background': 'Objective', 'methods': 'Participation', 'content_description': 'Data Description', 'usage_notes': 'Evaluation' }, # 3: Model { 'content_description': 'Model Description', 'methods': 'Technical Implementation', 'installation': 'Installation and Requirements' }, ) SUBMISSION_STATUS_LABELS = { 0: 'Not submitted.', 10: 'Awaiting editor assignment.', 20: 'Awaiting editor decision.', 30: 'Revisions requested.', 40: 'Submission accepted; awaiting editor copyedits.', 50: 'Awaiting authors to approve publication.', 60: 'Awaiting editor to publish.', } class Meta: default_permissions = ('change', ) permissions = [('can_assign_editor', 'Can assign editor')] def storage_used(self): """ Total storage used in bytes. This includes the total size of new files uploaded to this project, as well as the total size of files published in past versions of this CoreProject. (The QuotaManager should ensure that the same file is not counted twice in this total.) """ current = ProjectFiles().active_project_storage_used(self) published = self.core_project.total_published_size return current + published def storage_allowance(self): """ Storage allowed in bytes """ return self.core_project.storage_allowance def get_inspect_dir(self, subdir): """ Return the folder to inspect if valid. subdir joined onto the file root of this project. """ # Sanitize subdir for illegal characters validate_subdir(subdir) # Folder must be a subfolder of the file root # (but not necessarily exist or be a directory) inspect_dir = os.path.join(self.file_root(), subdir) if inspect_dir.startswith(self.file_root()): return inspect_dir else: raise Exception('Invalid directory request') def file_url(self, subdir, file): """ Url of a file to download in this project """ return reverse('serve_active_project_file', args=(self.slug, os.path.join(subdir, file))) def file_display_url(self, subdir, file): """ URL of a file to display in this project """ return reverse('display_active_project_file', args=(self.slug, os.path.join(subdir, file))) def under_submission(self): """ Whether the project is under submission """ return bool(self.submission_status) def submission_deadline(self): return self.creation_datetime + timedelta(days=180) def submission_days_remaining(self): return (self.submission_deadline() - timezone.now()).days def submission_status_label(self): return ActiveProject.SUBMISSION_STATUS_LABELS[self.submission_status] def author_editable(self): """ Whether the project can be edited by its authors """ if self.submission_status in [0, 30]: return True def copyeditable(self): """ Whether the project can be copyedited """ if self.submission_status == 40: return True def archive(self, archive_reason): """ Archive the project. Create an ArchivedProject object, copy over the fields, and delete this object """ archived_project = ArchivedProject(archive_reason=archive_reason, slug=self.slug) modified_datetime = self.modified_datetime # Direct copy over fields for attr in [f.name for f in Metadata._meta.fields ] + [f.name for f in SubmissionInfo._meta.fields]: setattr(archived_project, attr, getattr(self, attr)) archived_project.save() # Redirect the related objects for reference in self.references.all(): reference.project = archived_project reference.save() for publication in self.publications.all(): publication.project = archived_project publication.save() for topic in self.topics.all(): topic.project = archived_project topic.save() for author in self.authors.all(): author.project = archived_project author.save() for edit_log in self.edit_logs.all(): edit_log.project = archived_project edit_log.save() for copyedit_log in self.copyedit_logs.all(): copyedit_log.project = archived_project copyedit_log.save() for parent_project in self.parent_projects.all(): archived_project.parent_projects.add(parent_project) UploadedDocument.objects.filter( object_id=self.pk, content_type=ContentType.objects.get_for_model(ActiveProject) ).update( object_id=archived_project.pk, content_type=ContentType.objects.get_for_model(ArchivedProject)) if self.resource_type.id == 1: languages = self.programming_languages.all() if languages: archived_project.programming_languages.add(*list(languages)) # Voluntary delete if archive_reason == 1: self.clear_files() else: # Move over files os.rename(self.file_root(), archived_project.file_root()) # Copy the ActiveProject timestamp to the ArchivedProject. # Since this is an auto_now field, save() doesn't allow # setting an arbitrary value. queryset = ArchivedProject.objects.filter(id=archived_project.id) queryset.update(modified_datetime=modified_datetime) return self.delete() def fake_delete(self): """ Appear to delete this project. Actually archive it. """ self.archive(archive_reason=1) def check_integrity(self): """ Run integrity tests on metadata fields and return whether the project passes the checks """ self.integrity_errors = ErrorList() # Invitations for invitation in self.authorinvitations.filter(is_active=True): self.integrity_errors.append( 'Outstanding author invitation to {0}'.format( invitation.email)) # Storage requests for storage_request in self.storagerequests.filter(is_active=True): self.integrity_errors.append('Outstanding storage request') # Authors for author in self.authors.all().order_by('display_order'): if not author.get_full_name(): self.integrity_errors.append( 'Author {0} has not fill in name'.format( author.user.username)) if not author.affiliations.all(): self.integrity_errors.append( 'Author {0} has not filled in affiliations'.format( author.user.username)) # Metadata for attr in ActiveProject.REQUIRED_FIELDS[self.resource_type.id]: value = getattr(self, attr) text = unescape(strip_tags(str(value))) if value is None or not text or text.isspace(): l = self.LABELS[ self.resource_type.id][attr] if attr in self.LABELS[ self.resource_type.id] else attr.title().replace( '_', ' ') self.integrity_errors.append( 'Missing required field: {0}'.format(l)) # Ethics if not self.ethics_statement: self.integrity_errors.append( 'Missing required field: Ethics Statement') published_projects = self.core_project.publishedprojects.all() if published_projects: published_versions = [p.version for p in published_projects] if self.version in published_versions: self.integrity_errors.append( 'The version matches a previously published version.') self.version_clash = True else: self.version_clash = False if self.access_policy != AccessPolicy.OPEN and self.dua is None: self.integrity_errors.append( 'You have to choose one of the data use agreements.') if self.integrity_errors: return False else: return True def is_submittable(self): """ Whether the project can be submitted """ return (not self.under_submission() and self.check_integrity()) def submit(self, author_comments): """ Submit the project for review. """ if not self.is_submittable(): raise Exception('ActiveProject is not submittable') self.submission_status = 10 self.submission_datetime = timezone.now() self.author_comments = author_comments self.save() # Create the first edit log EditLog.objects.create(project=self, author_comments=author_comments) def set_submitting_author(self): """ Used to save query time in templates """ self.submitting_author = self.submitting_author() def assign_editor(self, editor): """ Assign an editor to the project and set the submission status to the edit stage. """ self.editor = editor self.submission_status = 20 self.editor_assignment_datetime = timezone.now() self.save() def reassign_editor(self, editor): """ Reassign the current project editor with new editor """ self.editor = editor self.save() def reject(self): """ Reject a project under submission """ self.archive(archive_reason=3) def is_resubmittable(self): """ Submit the project for review. """ return (self.submission_status == 30 and self.check_integrity()) def resubmit(self, author_comments): """ """ if not self.is_resubmittable(): raise Exception('ActiveProject is not resubmittable') with transaction.atomic(): self.submission_status = 20 self.resubmission_datetime = timezone.now() self.save() # Create a new edit log EditLog.objects.create(project=self, is_resubmission=True, author_comments=author_comments) def reopen_copyedit(self): """ Reopen the project for copyediting """ if self.submission_status == 50: self.submission_status = 40 self.copyedit_completion_datetime = None self.save() CopyeditLog.objects.create(project=self, is_reedit=True) self.authors.all().update(approval_datetime=None) def approve_author(self, author): """" Approve an author. Move the project into the next state if the author is the final outstanding one. Return whether the process was successful. """ if self.submission_status == 50 and not author.approval_datetime: now = timezone.now() author.approval_datetime = now author.save() if self.all_authors_approved(): self.author_approval_datetime = now self.submission_status = 60 self.save() return True def all_authors_approved(self): """ Whether all authors have approved the publication """ authors = self.authors.all() return len(authors) == len( authors.filter(approval_datetime__isnull=False)) def is_publishable(self): """ Check whether a project may be published """ if self.submission_status == 60 and self.check_integrity( ) and self.all_authors_approved(): return True return False def clear_files(self): """ Delete the project file directory """ ProjectFiles().rmtree(self.file_root()) def publish(self, slug=None, make_zip=True, title=None): """ Create a published version of this project and update the submission status. Parameters ---------- slug : the desired custom slug of the published project. make_zip : whether to make a zip of all the files. """ if not self.is_publishable(): raise Exception('The project is not publishable') published_project = PublishedProject(has_wfdb=self.has_wfdb()) # Direct copy over fields for field in [f.name for f in Metadata._meta.fields ] + [f.name for f in SubmissionInfo._meta.fields]: setattr(published_project, field, getattr(self, field)) published_project.slug = slug or self.slug # Create project file root if this is first version or the first # version with a different access policy ProjectFiles().publish_initial(self, published_project) try: with transaction.atomic(): # If this is a new version, previous fields need to be updated # and slug needs to be carried over if self.version_order: previous_published_projects = self.core_project.publishedprojects.all( ) slug = previous_published_projects.first().slug title = previous_published_projects.first().title if slug != published_project.slug: raise ValueError({ "message": "The published project has different slugs." }) # Set the slug if specified published_project.slug = slug or self.slug published_project.title = title or self.title published_project.doi = self.doi # Change internal links (that point to files within # the active project) to point to their new locations # in the published project published_project.update_internal_links(old_project=self) published_project.save() # If this is a new version, all version fields have to be updated if self.version_order > 0: published_project.set_version_order() # Same content, different objects. for reference in self.references.all().order_by('id'): published_reference = PublishedReference.objects.create( description=reference.description, project=published_project) for publication in self.publications.all(): published_publication = PublishedPublication.objects.create( citation=publication.citation, url=publication.url, project=published_project) published_project.set_topics( [t.description for t in self.topics.all()]) for parent_project in self.parent_projects.all(): published_project.parent_projects.add(parent_project) if self.resource_type.id == 1: languages = self.programming_languages.all() if languages: published_project.programming_languages.add( *list(languages)) for author in self.authors.all(): author_profile = author.user.profile published_author = PublishedAuthor.objects.create( project=published_project, user=author.user, is_submitting=author.is_submitting, is_corresponding=author.is_corresponding, approval_datetime=author.approval_datetime, display_order=author.display_order, first_names=author_profile.first_names, last_name=author_profile.last_name, ) affiliations = author.affiliations.all() for affiliation in affiliations: published_affiliation = PublishedAffiliation.objects.create( name=affiliation.name, author=published_author) UploadedDocument.objects.filter( object_id=self.pk, content_type=ContentType.objects.get_for_model( ActiveProject)).update( object_id=published_project.pk, content_type=ContentType.objects.get_for_model( PublishedProject), ) if author.is_corresponding: published_author.corresponding_email = author.corresponding_email.email published_author.save() Contact.objects.create( name=author.get_full_name(), affiliations='; '.join(a.name for a in affiliations), email=author.corresponding_email, project=published_project) # Move the edit and copyedit logs for edit_log in self.edit_logs.all(): edit_log.project = published_project edit_log.save() for copyedit_log in self.copyedit_logs.all(): copyedit_log.project = published_project copyedit_log.save() published_project.required_trainings.set( self.required_trainings.all()) # Set files read only and make zip file if requested move_files_as_readonly( published_project.id, self.file_root(), published_project.file_root(), make_zip, verbose_name='Read Only Files - {}'.format( published_project), ) # Remove the ActiveProject self.delete() except BaseException: ProjectFiles().publish_rollback(self, published_project) raise ProjectFiles().publish_complete(self, published_project) return published_project
def non_field_errors(self): errors = ErrorList() for form in self.forms.values(): for error in form.non_field_errors(): errors.append(error) return errors
def append_validation_error(form, field_name, message): error_list = form.errors.get(field_name) if error_list is None: error_list = ErrorList() form.errors[field_name] = error_list error_list.append(message)