def setUp(self): super().setUp() self.active_dataset = DatasetFactory.create(name="active test", is_active=True) created_time = make_aware(datetime.now()) for version_number in range(0, 29, 7): created_time -= timedelta(days=version_number) include_current = version_number == 0 create_dataset_version( self.active_dataset, f"0.0.{28 - version_number}", created_time, include_current=include_current ) self.inactive_dataset = DatasetFactory.create(name="inactive test", is_active=False) created_time = make_aware(datetime.now()) for version_number in range(21, 43, 7): created_time -= timedelta(days=version_number) create_dataset_version( self.inactive_dataset, f"0.0.{42 - version_number}", created_time, include_current=False ) self.new_extension = Extension.objects.create(id="new", is_addition=True, properties={}) self.old_extension = Extension.objects.create( id="old", is_addition=True, properties={}, deleted_at=make_aware(datetime(year=1970, month=1, day=1)) )
def setUp(self): super().setUp() self.client.force_login(self.user) created_time = make_aware(datetime.now()) self.active_dataset = DatasetFactory.create(name="active test", is_active=True, is_latest=True) create_dataset_version(self.active_dataset, "0.0.1", created_time, include_current=True) self.inactive_dataset = DatasetFactory.create(name="inactive test", is_active=False, is_latest=False) create_dataset_version(self.inactive_dataset, "0.0.1", created_time, docs=1, include_current=False)
def setUp(self): super().setUp() datasets = { "inactive": DatasetFactory.create(name="inactive", is_active=False), "secondary": DatasetFactory.create(name="secondary"), "primary": DatasetFactory.create(name="primary"), } self.pushed_ats = {} for dataset_type, dataset in datasets.items(): dataset_versions = create_dataset_data(dataset) for dataset_version in dataset_versions: pushed_at = create_dataset_version_indices(dataset_version) self.pushed_ats[dataset_version.id] = pushed_at sleep(3)
def setUp(self): super().setUp() self.latest_update_at = make_aware( datetime(year=2020, month=2, day=10, hour=13, minute=8, second=39, microsecond=315000)) sources = { "sharekit": HarvestSourceFactory(spec="edusources", repository=Repositories.SHAREKIT), "sharekit_private": HarvestSourceFactory(spec="edusourcesprivate", repository=Repositories.SHAREKIT), "wikiwijs": HarvestSourceFactory(spec="wikiwijsmaken", repository=Repositories.EDUREP) } datasets = { "primary": DatasetFactory.create(name="primary"), "inactive": DatasetFactory.create(name="inactive", is_active=False) } for dataset_type, dataset in datasets.items(): create_dataset_data(dataset, include_current=dataset_type == "primary") create_dataset_harvests(dataset_type, dataset, sources, self.latest_update_at) SharekitMetadataHarvestFactory.create(is_initial=False, number=0, is_restricted=False, is_extracted=True) SharekitMetadataHarvestFactory.create(is_initial=False, number=1, is_restricted=False) sleep( 1 ) # makes sure created_at and modified_at will differ at least 1 second when asserting
def setUp(self): super().setUp() self.dataset = DatasetFactory.create(name="test") dataset_version = DatasetVersionFactory.create(dataset=self.dataset) self.harvest = HarvestFactory.create(dataset=self.dataset, stage=HarvestStages.PREVIEW) # Documents that will actually get processed DocumentFactory.create(dataset_version=dataset_version, mime_type="text/html", from_youtube=True) DocumentFactory.create(dataset_version=dataset_version, mime_type="application/pdf") # Other Documents that get ignored due to various reasons DocumentFactory.create(dataset_version=dataset_version, mime_type="text/html", analysis_allowed=False, from_youtube=True) DocumentFactory.create(dataset_version=dataset_version, mime_type="application/pdf", analysis_allowed=False) DocumentFactory.create(dataset_version=dataset_version, mime_type="foo/bar")