def stat_addon(addon_short_name, job_pk): """Collect metadata about the file tree of a given addon :param addon_short_name: AddonConfig.short_name of the addon to be examined :param job_pk: primary key of archive_job :return: AggregateStatResult containing file tree metadata """ # Dataverse reqires special handling for draft and # published content addon_name = addon_short_name version = None if 'dataverse' in addon_short_name: addon_name = 'dataverse' version = 'latest' if addon_short_name.split('-')[-1] == 'draft' else 'latest-published' create_app_context() job = ArchiveJob.load(job_pk) src, dst, user = job.info() src_addon = src.get_addon(addon_name) try: file_tree = src_addon._get_file_tree(user=user, version=version) except HTTPError as e: dst.archive_job.update_target( addon_short_name, ARCHIVER_NETWORK_ERROR, errors=[e.data['error']], ) raise result = AggregateStatResult( src_addon._id, addon_short_name, targets=[utils.aggregate_file_tree_metadata(addon_short_name, file_tree, user)], ) return result
def test_archive_addon(self, mock_make_copy_request): result = archiver_utils.aggregate_file_tree_metadata( 'dropbox', FILE_TREE, self.user) archive_addon('dropbox', self.archive_job._id, result) assert_equal( self.archive_job.get_target('dropbox').status, ARCHIVER_INITIATED) cookie = self.user.get_or_create_cookie() assert (mock_make_copy_request.called_with( self.archive_job._id, settings.WATERBUTLER_URL + '/ops/copy', data=dict( source=dict( cookie=cookie, nid=self.src._id, provider='dropbox', path='/', ), destination=dict( cookie=cookie, nid=self.dst._id, provider=settings.ARCHIVE_PROVIDER, path='/', ), rename='Archive of DropBox', )))
def test_archive_addon(self, mock_make_copy_request): result = archiver_utils.aggregate_file_tree_metadata('dropbox', FILE_TREE, self.user) archive_addon('dropbox', self.archive_job._id, result) assert_equal(self.archive_job.get_target('dropbox').status, ARCHIVER_INITIATED) cookie = self.user.get_or_create_cookie() assert(mock_make_copy_request.called_with( self.archive_job._id, settings.WATERBUTLER_URL + '/ops/copy', data=dict( source=dict( cookie=cookie, nid=self.src._id, provider='dropbox', path='/', ), destination=dict( cookie=cookie, nid=self.dst._id, provider=settings.ARCHIVE_PROVIDER, path='/', ), rename='Archive of DropBox', ) ))
def test_aggregate_file_tree_metadata(self): a_stat_result = archiver_utils.aggregate_file_tree_metadata('dropbox', FILE_TREE, self.user) assert_equal(a_stat_result.disk_usage, 128 + 256) assert_equal(a_stat_result.num_files, 2) assert_equal(len(a_stat_result.targets), 2)