def create(self, validated_data): validated_data_copy = dict(validated_data) del validated_data_copy['creators'] del validated_data_copy['subjects'] del validated_data_copy['keywords'] content = Content(**validated_data_copy) content = self.__create_update(content, None) content.creators.set(validated_data['creators']) content.subjects.set(validated_data['subjects']) content.keywords.set(validated_data['keywords']) content.active = validated_data['active'] return content
def upload_content_file(full_path, content: Content): content_file = open(full_path, "rb") base_name = get_valid_filename(os.path.basename(content_file.name)) validate_unique_filename(File(content_file, base_name)) validate_unique_file(File(content_file, base_name)) content.file_name = base_name content.content_file.save(base_name, File(content_file)) content_file.close()
def test_duplicate_file_on_create(self): """ Tests whether a DuplicateContentFileException is raised, when a duplicate file is uploaded, when a new model instance is created """ first_value = { "name": "Content 1", "description": "Content's Description", "content_file": SimpleUploadedFile( "uploaded_file_name", "This will be the contents of the uploaded file.".encode()), "updated_time": timezone.now() } content1 = Content(**first_value) content1.content_file_uploaded = True content1.save() content1.content_file.close() second_value = { "name": "Content 2", "description": "Content's Description", "content_file": SimpleUploadedFile( "uploaded_file_name_2", "This will be the contents of the uploaded file.".encode()), "updated_time": timezone.now() } content2 = Content(**second_value) content2.content_file_uploaded = True with self.assertRaises(DuplicateContentFileException) as cm: content2.save() self.assertEqual(cm.exception.content.pk, content1.pk) content2.content_file.close()
def load_metadata(metadata_sheet): singletons = [["Cataloger", "cataloger", Cataloger], ["Language", "language", Language], ["Audience", "audience", Audience], ["Resource Type", "resourcetype", ResourceType], ["Collection Type", "collection", Collection]] multiples = [["Creator", "creators", Creator], ["Keyword", "keywords", Keyword], ["Subject", "subjects", Subject]] contents = csv.DictReader(open(metadata_sheet.metadata_file.path)) for row in contents: year_str = row["Date"] year = int(year_str) if year_str != '' else 2020 content_dict = { "name": row["Title"], "description": row["Description"], "updated_time": timezone.now(), "last_uploaded_time": timezone.now(), "published_date": datetime.date(year, 1, 1), "original_file_name": row["File Name"], "copyright": row["Copyright"], "rights_statement": row["Rights Statement"], "active": 1, } try: content_object = Content(**content_dict) content_object.save() for metadata in singletons: dict_key, member_key, model = metadata try: raw_metadata = row[dict_key] if raw_metadata is not None: trimmed = raw_metadata.strip() if trimmed != "": obj, created = model.objects.get_or_create( name=trimmed) setattr(content_object, member_key, obj) except ObjectDoesNotExist: continue for metadata in multiples: dict_key, member_key, model = metadata metadata_names = row[dict_key].split("|") field = getattr(content_object, member_key) for metadata_name in metadata_names: trimmed = metadata_name.strip() if trimmed != "": obj, created = model.objects.get_or_create( name=trimmed) field.add(obj) content_object.save() except IntegrityError as e: print(e) continue
def test_last_uploaded_time_on_update_without_file(self): """ Tests whether the last uploaded time is not updated, when there is no file uploaded on update. """ currtime = timezone.now() first_upload_time = currtime - timedelta(days=4) updated_time = currtime - timedelta(days=5) values = { "name": "Content 1", "description": "Content's Description", "content_file": SimpleUploadedFile( "uploaded_file_name", "This will be the contents of the uploaded file.".encode()), "updated_time": updated_time, } content = Content(**values) # marks that the file was uploaded during the creation process. content.content_file_uploaded = True mock_timezone_now = MagicMock(return_value=first_upload_time) with patch('django.utils.timezone.now', mock_timezone_now): content.save() content.content_file.close() # mark that there is no file uploaded during the update process content.content_file_uploaded = False new_upload_time = currtime - timedelta(days=2) mock_timezone_now.return_value = new_upload_time with patch('django.utils.timezone.now', mock_timezone_now): content.save() content.content_file.close() self.assertEqual(content.last_uploaded_time, first_upload_time)
def test_delete_content(self): """ Tests whether the file is deleted after the model instance is deleted """ values = { "name": "Content 1", "description": "Content's Description", "content_file": SimpleUploadedFile( "uploaded_file_name", "This will be the contents of the uploaded file.".encode()), "updated_time": timezone.now() } content1 = Content(**values) content1.content_file_uploaded = True content1.save() self.assertTrue(os.path.exists(content1.content_file.path)) content1.delete() self.assertFalse(os.path.exists(content1.content_file.path))
def setUp(self): self.dir_layout = DirectoryLayout.objects.create( name="foo", description="bar", banner_file=SimpleUploadedFile("sample_file", "File Contents".encode())) self.coverages = Coverage.objects.bulk_create([ Coverage(name="cov1", description="desc cov1"), Coverage(name="cov2", description="desc cov2"), Coverage(name="cov3", description="desc cov3"), ]) self.creators = Creator.objects.bulk_create([ Creator(name="Creator 1", description="desc Creator 1"), Creator(name="Creator 2", description="desc Creator 2"), Creator(name="Creator 3", description="desc Creator 3"), ]) self.subjects = Subject.objects.bulk_create([ Subject(name="subject 1", description="desc subject 1"), Subject(name="subject 2", description="desc subject 2"), Subject(name="subject 3", description="desc subject 3"), ]) self.keywords = Keyword.objects.bulk_create([ Keyword(name="keyword 1", description="desc keyword 1"), Keyword(name="keyword 2", description="desc keyword 2"), Keyword(name="keyword 3", description="desc keyword 3"), ]) self.workareas = Workarea.objects.bulk_create([ Workarea(name="workarea 1", description="desc workarea 1"), Workarea(name="workarea 2", description="desc workarea 2"), Workarea(name="workarea 3", description="desc workarea 3"), ]) self.languages = Language.objects.bulk_create([ Language(name="language 1", description="desc language 1"), Language(name="language 2", description="desc language 2"), Language(name="language 3", description="desc language 3"), ]) self.catalogers = Cataloger.objects.bulk_create([ Cataloger(name="cataloger 1", description="desc cataloger 1"), Cataloger(name="cataloger 2", description="desc cataloger 2"), Cataloger(name="cataloger 3", description="desc cataloger 3"), ]) content_1 = Content(name="content file 1", description="content file 1 desc", content_file=SimpleUploadedFile( "file_1", "Contents of file 1".encode()), updated_time=date.today()) content_1.content_file_uploaded = True content_1.save() content_2 = Content(name="content file 2", description="content file 2 desc", content_file=SimpleUploadedFile( "file_2", "Contents of file 2".encode()), updated_time=date.today()) content_2.content_file_uploaded = True content_2.save() self.contents = [content_1, content_2] dir_1 = Directory( name="parent dir 1", dir_layout=self.dir_layout, banner_file=SimpleUploadedFile( "top_dir1_banner_file", "dir 1 banner file content".encode()), creators_need_all=True, coverages_need_all=True, subjects_need_all=True, keywords_need_all=True, workareas_need_all=True, languages_need_all=True, catalogers_need_all=True, ) dir_1.save() dir_1.individual_files.set([content_1]) dir_1.coverages.set([self.coverages[0]]) dir_1.creators.set([self.creators[0]]) dir_1.subjects.set([self.subjects[0]]) dir_1.keywords.set([self.keywords[0]]) dir_1.workareas.set([self.workareas[0]]) dir_1.languages.set([self.languages[0]]) dir_1.catalogers.set([self.catalogers[0]]) dir_2 = Directory( name="parent dir 2", dir_layout=self.dir_layout, banner_file=SimpleUploadedFile( "top_dir2_banner_file", "dir 2 banner file content".encode()), creators_need_all=True, coverages_need_all=True, subjects_need_all=False, keywords_need_all=True, workareas_need_all=False, languages_need_all=True, catalogers_need_all=True, ) dir_2.save() dir_2.individual_files.set([content_1, content_2]) dir_2.coverages.set([self.coverages[1]]) dir_2.creators.set([self.creators[1]]) dir_2.subjects.set([self.subjects[1]]) dir_2.keywords.set([self.keywords[1]]) dir_2.workareas.set([self.workareas[1]]) dir_2.languages.set([self.languages[1]]) dir_2.catalogers.set([self.catalogers[0]]) dir_3 = Directory( name="child dir", dir_layout=self.dir_layout, banner_file=SimpleUploadedFile( "child_banner_file", "dir 3 banner file content".encode()), creators_need_all=True, coverages_need_all=False, subjects_need_all=True, keywords_need_all=True, workareas_need_all=True, languages_need_all=False, catalogers_need_all=False, ) dir_3.save() dir_3.individual_files.set([content_2]) dir_3.coverages.set([self.coverages[0], self.coverages[1]]) dir_3.creators.set([self.creators[0], self.creators[1]]) dir_3.subjects.set([self.subjects[0], self.subjects[1]]) dir_3.keywords.set([self.keywords[0], self.keywords[1]]) dir_3.workareas.set([self.workareas[0], self.workareas[1]]) dir_3.languages.set([self.languages[0], self.languages[1]]) dir_3.catalogers.set([self.catalogers[0], self.catalogers[1]]) dir_4 = Directory( name="inner child dir", dir_layout=self.dir_layout, banner_file=SimpleUploadedFile( "inner_child_banner_file", "dir 4 banner file content".encode()), creators_need_all=False, coverages_need_all=True, subjects_need_all=True, keywords_need_all=False, workareas_need_all=False, languages_need_all=False, catalogers_need_all=True, ) dir_4.save() self.directories = [dir_1, dir_2, dir_3, dir_4]
def upload_sheet_contents(self, sheet_contents): """ This method adds bulk content data from the Excel sheet uploaded :param sheet_contents: :return: success status """ unsuccessful_uploads = [] successful_uploads_count = 0 try: content_data = json.loads(sheet_contents.get("sheet_data")) main_path = sheet_contents.get("content_path") for each_content in content_data: # if the actual file is not uploaded, don't upload its metadata file_path = os.path.join(main_path, each_content.get("File Name")) if os.path.exists(file_path) is not True: unsuccessful_uploads.append({ 'file_name': each_content.get("File Name"), 'error': 'file does not exist' }) continue else: try: content = Content() content.title = each_content.get("Title") content.description = each_content.get("Description") content.copyright_notes = each_content.get( "Copyright Notes") content.reviewed_on = datetime.datetime.now() content.rights_statement = each_content.get( "Rights Statement") if each_content.get("Year Published"): try: content.published_date = datetime.date( each_content.get("Year Published"), 1, 1) except ValueError: content.published_date = None content.modified_on = timezone.now() content.additional_notes = each_content.get( "Additional Notes") content.active = True content.filesize = os.stat(file_path).st_size try: content.save() except Exception as e: raise Exception(str(e)) try: self.upload_content_file(file_path, content) except (Exception, ValidationError) as e: content.delete() raise e try: metadata = self.get_associated_meta(each_content) for metadata_item in metadata: obj, created = Metadata.objects.get_or_create( defaults={'name': metadata_item.name}, name__iexact=metadata_item.name, type_id=metadata_item.type.id) content.metadata.add(obj) content.save() successful_uploads_count = successful_uploads_count + 1 except Exception as e: content.delete() raise e except (Exception, ValidationError) as e: unsuccessful_uploads.append({ 'file_name': each_content.get("File Name"), 'error': str(e) }) continue data = { 'success_count': successful_uploads_count, 'unsuccessful_uploads': unsuccessful_uploads, } return data except Exception as e: data = { 'success': False, 'error': str(e), 'status': status.HTTP_500_INTERNAL_SERVER_ERROR } return data