def test_is_virtual_repository_path(self):
     virtual_path_example = "target/virtual/foobarrepo"
     static_path_example = "target/static/foobarrepo"
     self.assertTrue(RepoConfigService().is_virtual_repository_path(
         virtual_path_example))
     self.assertFalse(RepoConfigService().is_virtual_repository_path(
         static_path_example))
    def filter(self, request, repos):
        if 'name' in request.GET:
            pattern = re.compile(request.GET['name'])
            repos = filter(lambda d: pattern.match(d), repos)

        if 'tag' in request.GET:
            tags = set(request.GET['tag'].split(','))
            taggingService = RepoTaggingService()
            repos = filter(lambda d: len(tags.intersection(taggingService.getTags(d))) > 0, repos)

        if 'notag' in request.GET:
            forbiddentags = set(request.GET['notag'].split(','))
            taggingService = RepoTaggingService()
            repos = filter(lambda d: len(forbiddentags.intersection(taggingService.getTags(d))) == 0, repos)

        if 'older' in request.GET:
            pastTime = self.get_past_time(int(request.GET['older']))
            configService = RepoConfigService()
            repos = filter(lambda d: os.stat(configService.getStaticRepoDir(d)).st_mtime < pastTime, repos)

        if 'newer' in request.GET:
            pastTime = self.get_past_time(int(request.GET['newer']))
            configService = RepoConfigService()
            repos = filter(lambda d: os.stat(configService.getStaticRepoDir(d)).st_mtime > pastTime, repos)

        return repos
    def test_get_no_rpm_group_if_less_than_max_rpm(self):
        rpm_file_names = ['rss-4-1.1.noarch.rpm']
        dict_group_with_obsolete_rpms = RepoConfigService()._get_rpm_group_with_obsolete_files_by_file_name(
            rpm_file_names, 3)

        self.assertEqual([], dict_group_with_obsolete_rpms.keys())
        self.assertEqual([], dict_group_with_obsolete_rpms.values())
Example #4
0
def rpm_info_static(request, rpm):
    config = RepoConfigService()
    rpm_path = os.path.join(config.getStaticRepoDir(), rpm)

    if '../' in rpm:
        raise Http404('../ not allowed')

    return rpm_info(rpm_path)
Example #5
0
def rpm_info_static(request, rpm):
    config = RepoConfigService()
    rpm_path = os.path.join(config.getStaticRepoDir(), rpm) 
       
    if '../' in rpm:
        raise Http404('../ not allowed')
       
    return rpm_info(rpm_path)
Example #6
0
def rpm_info_virtual(request, reponame, rpm):
    config = RepoConfigService()
    repoConfig = config.getConfig(reponame)
    rpm_path = os.path.join(config.getRepoDir(), repoConfig.destination[1:], rpm) 
    
    if '../' in rpm:
        raise Http404('../ not allowed')
    
    return rpm_info(rpm_path)
    def test_filter_by_older_then(self):
        repo1 = self.createNewRepoAndAssertValid()
        repo2 = self.createNewRepoAndAssertValid()
        configService = RepoConfigService()
        repo2dir = configService.getStaticRepoDir(repo2)

        self.set_mtime(repo2dir)

        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC+".txt?older=5")
        self.assertEqual(repo2 + "\n", response.read())
    def test_get_no_rpm_group_if_rpms_have_different_names(self):
        rpm_file_names = ['rss-4-1.1.noarch.rpm',
                          'rss-4-1.2.noarch.rpm',
                          'rss-5-1.1.noarch.rpm',
                          'feed-rss-7-1.4.noarch.rpm']
        dict_group_with_obsolete_rpms = RepoConfigService()._get_rpm_group_with_obsolete_files_by_file_name(
            rpm_file_names, 3)

        self.assertEqual([], dict_group_with_obsolete_rpms.keys())
        self.assertEqual([], dict_group_with_obsolete_rpms.values())
Example #9
0
def rpm_info_virtual(request, reponame, rpm):
    config = RepoConfigService()
    repoConfig = config.getConfig(reponame)
    rpm_path = os.path.join(config.getRepoDir(), repoConfig.destination[1:],
                            rpm)

    if '../' in rpm:
        raise Http404('../ not allowed')

    return rpm_info(rpm_path)
Example #10
0
    def test_filter_by_newer_then(self):
        repo1 = self.createNewRepoAndAssertValid()
        repo2 = self.createNewRepoAndAssertValid()
        configService = RepoConfigService()
        repo2dir = configService.getStaticRepoDir(repo2)

        self.set_mtime(repo2dir)

        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           ".txt?newer=5")
        self.assertEqual(repo1 + "\n", response.read())
Example #11
0
def virtual_repo_info(request, reponame):
    config = RepoConfigService()
    repoConfig = config.getConfig(reponame)

    template = loader.select_template(['static/virtual_repo_info.html'])
    context = Context({
        'reponame': reponame,
        'config': repoConfig,
        'staticRepos': config.staticRepos,
    })
    return HttpResponse(template.render(context))
Example #12
0
def virtual_repo_info(request, reponame):
    config = RepoConfigService()
    repoConfig = config.getConfig(reponame)
    
    template = loader.select_template(['static/virtual_repo_info.html'])
    context = Context({
        'reponame' : reponame,
        'config' : repoConfig,
        'staticRepos' : config.staticRepos,
    })
    return HttpResponse(template.render(context))
    def test_should_determine_repository_path(self):
        service = RepoConfigService()

        when(service).getStaticRepoDir(any_value()).thenReturn("path/to/repository")
        when(yum_repo_server.api.services.repoConfigService.os.path).exists(any_value()).thenReturn(True)

        actual_repository_path = service.determine_static_repository_path("repository-name")

        self.assertEqual("path/to/repository", actual_repository_path)

        verify(service).getStaticRepoDir("repository-name")
        verify(yum_repo_server.api.services.repoConfigService.os.path).exists("path/to/repository")

        unstub()
class YumMetaDataHandler(BaseHandler):
    repoConfigService = RepoConfigService()
    
    def create(self, request, reponame):

        full_path_to_repo = self.repoConfigService.getStaticRepoDir(reponame)

        if not os.path.exists(full_path_to_repo):
            resp = rc.NOT_FOUND
            resp.content = 'Repository %s not found' % full_path_to_repo
            return resp


        try:
            self.repoConfigService.doCreateRepo(full_path_to_repo, reponame)
        except:
            resp = rc.BAD_REQUEST
            resp.content = 'createrepo has finished with Error'
            return resp

        resp = rc.CREATED
        resp.content = config.get_repo_dir()
        return resp
    
    def read(self, request, reponame):
        return serve(request, '/' + reponame + '/repodata/', self.repoConfigService.getStaticRepoDir(), True)
        
Example #15
0
class YumRepoAliasHandler(BaseHandler):

    POST_PARAM_DESTINATION_NAME = 'destination'
    POST_PARAM_VIRTUAL_REPO_NAME = 'name'

    repoConfigService = RepoConfigService()
    audit = RepoAuditService()

    def create(self, request, text):
        try:
            virtual_repo_name, destination_repo = self.check_request_sanity(
                request)
        except RequestFailException, rfe:
            return rfe.args[0]

        try:
            result = self.repoConfigService.createVirtualRepo(
                virtual_repo_name, destination_repo)
        except RepoNotFoundException:
            resp = rc.NOT_HERE  #Do NOT disclose the actual path to the client -> return relative destination
            resp.content = 'The destination repository at %s does not exist.' % destination_repo
            return resp

        self.audit.log_action(
            "created a virtual link from %s to %s" %
            (virtual_repo_name, destination_repo), request)
        response = rc.CREATED
        response.content = result
        return response
Example #16
0
class TestCleanup(BaseIntegrationTestCase):

    config = RepoConfigService()

    def test_cleanup_cache_dir(self):
        reponame1 = self.createStaticRepoWithContent()
        reponame2 = self.createStaticRepoWithContent()
        reponame3 = self.createStaticRepoWithContent()

        # delete repo 1
        shutil.rmtree(self.config.getStaticRepoDir(reponame1))
        # lock repo 2
        open(self.config.getRepoLockFile(reponame2), 'w').close()

        MetaDataGenerationScheduler().cleanupCacheDir()

        self.assertFalse(os.path.exists(
            self.config.getRepoCacheDir(reponame1)))
        self.assertTrue(os.path.exists(self.config.getRepoCacheDir(reponame2)))
        self.assertFalse(os.path.exists(
            self.config.getRepoCacheDir(reponame3)))

    def createStaticRepoWithContent(self):
        reponame = self.createNewRepoAndAssertValid()
        testRPMFilePath = Constants.TEST_RPM_FILE_LOC + Constants.TEST_RPM_FILE_NAME
        self.upload_testfile(reponame, testRPMFilePath)
        self.generate_metadata(reponame)
        repoCacheDir = self.config.getRepoCacheDir(reponame)
        self.assertTrue(os.path.exists(repoCacheDir))
        return reponame
Example #17
0
    def test_do_not_remove_rpms_when_rpm_max_keep_is_zero(self):
        os.makedirs(self.testRepo)
        self.touchRpms(self.testRepo)

        try:
            RepoConfigService().doCleanup(self.targetDir, 0)
            self.assertTrue(os.path.exists(self.testRepo + '/rss-4-1.1.noarch.rpm'), 'rpm-file was deleted.')
        finally:
            shutil.rmtree(self.targetDir)
Example #18
0
 def test_find_rpms_to_delete_returns_empty_list_when_single_group_is_not_big_enough(self):
     rpm_file_names = ['rss-4-1.1.noarch.rpm',
                       'rss-4-1.2.noarch.rpm',
                       'rss-5-1.1.noarch.rpm',
                       'rss-feed-7-1.4.noarch.rpm']
     
     rpm_to_delete = RepoConfigService()._find_rpms_to_delete(rpm_file_names, 3)
     
     self.assertEquals(0, len(rpm_to_delete))
Example #19
0
    def test_doCleanup(self):
        os.makedirs(self.testRepo)
        self.touchRpms(self.testRepo)

        try:
            RepoConfigService().doCleanup(self.targetDir, 3)
            self.assertFalse(os.path.exists(self.testRepo + '/rss-4-1.1.noarch.rpm'))
        finally:
            shutil.rmtree(self.targetDir)
Example #20
0
 def test_find_rpms_to_delete(self):
     rpm_file_names = ['rss-4-1.1.noarch.rpm',
                       'rss-4-1.2.noarch.rpm',
                       'rss-5-1.1.noarch.rpm',
                       'rss-7-1.4.noarch.rpm']
     
     rpm_to_delete = RepoConfigService()._find_rpms_to_delete(rpm_file_names, 3)
     
     self.assertEquals(1, len(rpm_to_delete))
     self.assertEquals('rss-4-1.1.noarch.rpm', rpm_to_delete[0].file_name)
    def filter(self, request, repos):
        if 'name' in request.GET:
            pattern = re.compile(request.GET['name'])
            repos = filter(lambda d: pattern.match(d), repos)

        if 'tag' in request.GET:
            tags = set(request.GET['tag'].split(','))
            taggingService = RepoTaggingService()
            repos = filter(
                lambda d: len(tags.intersection(taggingService.getTags(d))) >
                0, repos)

        if 'notag' in request.GET:
            forbiddentags = set(request.GET['notag'].split(','))
            taggingService = RepoTaggingService()
            repos = filter(
                lambda d: len(
                    forbiddentags.intersection(taggingService.getTags(d))) ==
                0, repos)

        if 'older' in request.GET:
            pastTime = self.get_past_time(int(request.GET['older']))
            configService = RepoConfigService()
            repos = filter(
                lambda d: os.stat(configService.getStaticRepoDir(d)).st_mtime <
                pastTime, repos)

        if 'newer' in request.GET:
            pastTime = self.get_past_time(int(request.GET['newer']))
            configService = RepoConfigService()
            repos = filter(
                lambda d: os.stat(configService.getStaticRepoDir(d)).st_mtime >
                pastTime, repos)

        return repos
 def assertVirtualRepoConfig(self, virtual_reponame, static_reponame):
     metadatapath = RepoConfigService().getVirtualRepoDir(
         virtual_reponame) + "/" + RepoConfigService.ALIAS_METADATA_FILENAME
     self.assertTrue(os.path.exists(metadatapath),
                     "No metadata generated -> alias failed")
     alias_file = open(metadatapath)
     alias_config = yaml.load(alias_file)
     alias_file.close()
     alias_metadata = dict(alias_config.items())
     config_destination = alias_metadata['destination']
     expected_destination = "/static/" + static_reponame
     self.assertEquals(config_destination, expected_destination,
                       "Alias points to wrong repo")
Example #23
0
class YumRepoHandler(BaseHandler):
    NAME_REGEX = '^[a-zA-Z0-9][a-zA-Z0-9_\-\.]*$'

    repoConfigService = RepoConfigService()
    audit = RepoAuditService()

    def create(self, request, text):
        data = request.POST

        if not data:
            resp = rc.BAD_REQUEST
            resp.content = 'POST data missing'
            return resp

        name = data.get('name',
                        None)  #set None as default if the key is missing
        if not name:
            resp = rc.BAD_REQUEST
            resp.content = 'The name attribute is missing'
            return resp

        if not re.match(self.NAME_REGEX, data['name']):
            resp = rc.BAD_REQUEST
            resp.content = 'The provided name is invalid. It must match this regular expression : ' + self.NAME_REGEX
            return resp

        path = self.repoConfigService.getStaticRepoDir(name)
        if os.path.exists(path):
            resp = rc.DUPLICATE_ENTRY
            resp.content = 'The repository at ' + path + ' already exists.'
            return resp

        os.makedirs(path)

        resp = rc.CREATED
        resp.content = dict(name=request.POST['name'], dir=path)

        self.audit.log_action("created static repository %s" % name, request)

        return resp

    def read(self, request, text):
        static_path = self.repoConfigService.getStaticRepoDir()
        return serve(request=request,
                     path='/',
                     document_root=static_path,
                     add_virtual=True,
                     show_indexes=True,
                     parent_dir_type=ParentDirType.STATIC)
Example #24
0
class MetaDataGenerationScheduler():
    def __init__(self, updateIntervalSeconds=30):
        self.interval = updateIntervalSeconds
        config = {'apscheduler.daemonic': False}
        self.sched = Scheduler(config)
        # initialize these per instance.
        self.repo_timestamps = {}
        self.jobs = {}


    repo_timestamps = {}  #dictionary with jobName (=reponame) : last scheduler modification timestamp (float)
    jobs = {} #dictionary with jobName (=reponame) : jobHandle

    configService = RepoConfigService()
    static_root_dir = configService.getStaticRepoDir()
    sched = None
    interval = None

    def start(self):
        self.update_program_config() #read configs, schedule jobs

        # schedule an update as a job
        self.sched.add_interval_job(self.update_program_config, seconds=self.interval)
        
        # schedule cleanup cache
        self.sched.add_cron_job(self.cleanupCacheDir, hour = 23, minute = 17, second = 20)

        self.sched.start()
        
    def createrepo_with_optional_cleanup_job(self, *argList):
        monitor = JobMonitorer()
        monitor.job_starts()
        repoDir = argList[0]
        reponame = argList[1]
        rpm_max_keep = argList[2]
        didCleanUp=False
        try:
            if rpm_max_keep != None:
                didCleanUp=True
                self.configService.doCleanup(repoDir, rpm_max_keep)
                logging.info("job RpmCleanup on "+reponame+" took "+str(monitor.get_execution_time_until_now_seconds())+" seconds")
            self.configService.doCreateRepo(repoDir, reponame)
            monitor.job_finishes()
            logging.info(monitor.get_pretty_job_summary("createrepo on "+reponame+" (cleanup included : "+str(didCleanUp)+")"))
        except Exception, ex:
            logging.error(traceback.format_exc())
class RepoTaggingHandler(BaseHandler):

    repoConfigService = RepoConfigService()
    repoTaggingService = RepoTaggingService()
    repoAuditService = RepoAuditService()

    def create(self, request, repodir,tag):
        try:
            tag = self.check_request_sanity(request)
        except RequestFailException, rfe:
            return rfe.args[0]

        try:
            result = self.repoTaggingService.tagRepo(repodir,tag)
        except CouldNotLockTagsException, lockErr:
            response = rc.BAD_REQUEST
            response.content = "Could not lock tags file"
            return response
class RepoContentService(object):
    """
        Service to retrieve information about the content of a repository.
    """
    METADATA_DIRECTORY = "repodata"

    repoConfigService = RepoConfigService()

    def list_architectures(self, repository_name):
        """
            @return: list of architecture paths
        """
        repository_path = self.repoConfigService.getStaticRepoDir(
            repository_name)
        directories_in_repository = os.listdir(repository_path)

        available_architectures = []

        for directory in directories_in_repository:
            architecture_path = os.path.join(repository_path, directory)

            if directory != self.METADATA_DIRECTORY:
                if os.path.isdir(architecture_path):
                    if len(os.listdir(architecture_path)) > 0:
                        available_architectures.append(architecture_path)

        return available_architectures

    def list_packages(self, repository_name):
        """
            @return: a list of tuples (first element is the architecture name, second element is the package file name)
        """
        available_architectures = self.list_architectures(repository_name)
        packages_in_repository = []

        for architecture_path in available_architectures:
            packages_in_architecture_dir = os.listdir(architecture_path)

            for package in packages_in_architecture_dir:
                package_path = os.path.join(architecture_path, package)
                packages_in_repository.append(package_path)

        return packages_in_repository
class VirtualRepoConfigHandler(BaseHandler):
    
    repoConfigService = RepoConfigService()
    
    def read(self, request, reponame):
        if '../' in reponame:
            return rc.BAD_REQUESST
        repo_dir = self.repoConfigService.getVirtualRepoDir(reponame)
        if not os.path.exists(repo_dir):
            return rc.NOT_FOUND
        
        repo_config = self.repoConfigService.getConfig(reponame)
        
        resp = rc.ALL_OK
        resp.content = ''
        json.dump(repo_config.data, resp)
        return resp
        
        
class VirtualRepoHandler(BaseHandler):

    repoConfigService = RepoConfigService()
    audit = RepoAuditService()

    def read(self, request, reponame, rpm='/'):
        virtual_reponame = reponame
        path_relative_to_repository = rpm

        virtual_repo_dir = self.repoConfigService.getVirtualRepoDir(
            virtual_reponame)
        if not os.path.exists(virtual_repo_dir):
            resp = rc.NOT_FOUND
            resp.content = 'Virtual Repository does not exists!'
            return resp

        repoConfig = self.repoConfigService.getConfig(virtual_reponame)

        if re.match('^https?://.*', repoConfig.destination):
            return HttpResponseRedirect(repoConfig.destination + rpm)
        else:
            absoluteDestinationRepoPath = config.get_repo_dir(
            ) + repoConfig.destination
            return serve(request, path_relative_to_repository,
                         absoluteDestinationRepoPath, True)

    def delete(self, request, reponame, rpm='/'):
        if not rpm is None and len(rpm) > 1:
            resp = rc.NOT_IMPLEMENTED
            resp.content = "You are just allowed to remove virtual repositories"
            return resp

        repo_path = self.repoConfigService.getVirtualRepoDir(reponame)
        if not os.path.exists(repo_path):
            resp = rc.NOT_FOUND
            resp.content = 'Virtual Repository does not exists!'
            return resp

        self.audit.log_action("deleted virtual repository %s" % (reponame),
                              request)
        shutil.rmtree(repo_path)
        return rc.DELETED
Example #29
0
def directory_index(path, fullpath, add_virtual = False, parent_dir_type = ParentDirType.NONE):
    t = loader.select_template(['static/directory_index.html', 'static/directory_index'])
    unsorted_files = []
    for f in os.listdir(fullpath):
        if not f.startswith('.'):
            unsorted_files.append(FileInfo(f, fullpath, parent_dir_type))
            
    if add_virtual:
        virtual_repodir = RepoConfigService().getVirtualRepoDir()
        repodir=os.path.join(virtual_repodir,os.path.pardir)
        relative_virtualdir=os.path.basename(os.path.normpath(virtual_repodir))
        file_info = FileInfo(relative_virtualdir, repodir)
        if os.path.exists(file_info.fullpath):
            unsorted_files.append(file_info)
        
    files = sorted(unsorted_files, key = attrgetter('isFile', 'filename'))
        
    c = Context({
        'directory' : path,
        'file_list' : files,
        'file_count': len(files),
    })
    return HttpResponse(t.render(c))
Example #30
0
 def read(self, request, path):
     return serve(request, path, RepoConfigService().getStaticRepoDir(), True, False, False)
Example #31
0
 def _repository_contains(self, architecture, file_name, repository):
     repository_path = RepoConfigService().getStaticRepoDir(repository)
     path_to_file = os.path.join(repository_path, architecture, file_name)
     repository_contains_file = os.path.exists(path_to_file)
     return repository_contains_file
Example #32
0
class RepoPropagationService(object):
    repoConfigService = RepoConfigService()
    repoContentService = RepoContentService()
    rpmService = RpmService()

    def propagatePackage(self, package_name, source_repository, destination_repository, architecture):
        source_repo_path = self.determine_repository_path(source_repository)
        destination_repo_path = self.determine_repository_path(destination_repository)

        source_architecture_path = os.path.join(source_repo_path, architecture)
        file_name = self._determine_rpm_file_name(source_architecture_path, package_name)

        source_rpm_path = os.path.join(source_repo_path, architecture, file_name)
        destination_rpm_parent_dir = os.path.join(destination_repo_path, architecture)
        destination_rpm_path = os.path.join(destination_rpm_parent_dir, file_name)

        if not os.path.exists(source_rpm_path):
            raise PropagationException("Package '{0}' could not be found.".format(source_rpm_path))

        if not os.path.exists(destination_rpm_parent_dir):
            os.mkdir(destination_rpm_parent_dir)

        shutil.move(source_rpm_path, destination_rpm_path)

        return file_name

    def _determine_rpm_file_name(self, directory, rpm):
        if create_rpm_file_object(rpm) is None:
            latest_rpm = self.rpmService.get_latest_rpm(rpm, directory)

            if latest_rpm is None:
                raise PropagationException("Package for {0} could not be found in {1}".format(rpm, directory))

            return latest_rpm

        return rpm


    def propagate_repository(self, source_repository, destination_repository):
        destination_repository_path = self.determine_repository_path(destination_repository)

        packages_to_propagate = self.repoContentService.list_packages(source_repository)
        propagated_packages = []

        for package_path in packages_to_propagate:
            architecture_path = os.path.dirname(package_path)
            architecture = os.path.basename(architecture_path)
            destination_path = os.path.join(destination_repository_path, architecture)

            if not os.path.exists(destination_path):
                os.makedirs(destination_path)

            shutil.move(package_path, destination_path)
            propagated_packages.append(package_path)

        return propagated_packages

    def determine_repository_path(self, repository_name):
        repository_path = self.repoConfigService.getStaticRepoDir(repository_name)

        if not os.path.exists(repository_path):
            raise PropagationException("Static repository '{0}' does not exist.".format(repository_name))

        return repository_path
Example #33
0
 def test_do_cleanup_should_raise_valueError_when_rpm_max_keep_is_not_an_integer(self):
     try:
         RepoConfigService().doCleanup(self.targetDir, '3')
         self.fail('do cleanup should check, that rpm_max_keep is an integer.')
     except ValueError:
         pass
class CsvListingHandler(BaseHandler):

    config = RepoConfigService()

    def read(self, request, repodir=''):
        if not self._valid(repodir):
            response = rc.BAD_REQUEST
            response.write("You are not allowed to look into %s" % repodir)
            return response

        is_virtual = repodir == 'virtual'
        show_destination = is_virtual and 'showDestination' in request.GET and request.GET[
            'showDestination'].lower() == 'true'

        root_dir = self.config.getStaticRepoDir()
        if is_virtual:
            root_dir = self.config.getVirtualRepoDir()

        repos = os.listdir(root_dir)
        repos = self.filter(request, repos)

        response = rc.ALL_OK
        response.content = ""

        for repo in repos:
            response.write(self._write_repo(repo, show_destination))
            response.write("\n")

        return response

    def _valid(self, repodir):
        if repodir in ["virtual", ""]:
            return True
        return False

    def _write_repo(self, repo, show_destination=False):

        if show_destination:
            destination = self.config.getConfig(repo).destination
            return '%s:%s' % (repo, destination)

        return repo

    def filter(self, request, repos):
        if 'name' in request.GET:
            pattern = re.compile(request.GET['name'])
            repos = filter(lambda d: pattern.match(d), repos)

        if 'tag' in request.GET:
            tags = set(request.GET['tag'].split(','))
            taggingService = RepoTaggingService()
            repos = filter(
                lambda d: len(tags.intersection(taggingService.getTags(d))) >
                0, repos)

        if 'notag' in request.GET:
            forbiddentags = set(request.GET['notag'].split(','))
            taggingService = RepoTaggingService()
            repos = filter(
                lambda d: len(
                    forbiddentags.intersection(taggingService.getTags(d))) ==
                0, repos)

        if 'older' in request.GET:
            pastTime = self.get_past_time(int(request.GET['older']))
            configService = RepoConfigService()
            repos = filter(
                lambda d: os.stat(configService.getStaticRepoDir(d)).st_mtime <
                pastTime, repos)

        if 'newer' in request.GET:
            pastTime = self.get_past_time(int(request.GET['newer']))
            configService = RepoConfigService()
            repos = filter(
                lambda d: os.stat(configService.getStaticRepoDir(d)).st_mtime >
                pastTime, repos)

        return repos

    def get_past_time(self, days):
        pastDay = datetime.datetime.now() - datetime.timedelta(days=days)
        return int(time.mktime(pastDay.timetuple()))
Example #35
0
class TestCsvListing(BaseIntegrationTestCase):

    config = RepoConfigService()

    def test_static_listing_is_200_ok(self):
        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC + ".txt")
        self.assertEquals(httplib.OK, response.status)

    def test_virtual_listing_is_200_ok(self):
        response = self.helper.do_http_get(Constants.HTTP_PATH_VIRTUAL +
                                           ".txt")
        self.assertEquals(httplib.OK, response.status)

    def test_does_not_list_invalid_repos(self):
        response = self.helper.do_http_get(Constants.HTTP_PATH_VIRTUAL +
                                           "sldknlkdnlnsd.txt")
        self.assertEquals(httplib.BAD_REQUEST, response.status)

    def test_static_listing_lists_repos(self):
        reponame = self.createNewRepoAndAssertValid()
        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC + ".txt")
        self.assertTrue(reponame in response.read())
        shutil.rmtree(self.config.getStaticRepoDir(reponame))

    def test_static_listing_lists_repos_without_destination_url_when_parameter_is_given(
            self):
        reponame = self.createNewRepoAndAssertValid()
        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           ".txt?showDestination=true")
        response_text = response.read().strip()
        self.assertEquals(reponame, response_text)

    def test_virtual_listing_lists_repos(self):
        reponame = unique_repo_name()
        virtualRepoPath = self.config.getVirtualRepoDir() + reponame
        os.makedirs(virtualRepoPath)
        response = self.helper.do_http_get(Constants.HTTP_PATH_VIRTUAL +
                                           ".txt")
        self.assertTrue(reponame in response.read())
        shutil.rmtree(virtualRepoPath)

    def test_virtual_listing_has_destination_url(self):
        static_repo, virtual_repo = self.assert_create_virtual_repo()
        response = self.helper.do_http_get(Constants.HTTP_PATH_VIRTUAL +
                                           ".txt?showDestination=true")
        response_text = response.read().strip()
        self.assertEquals('%s:/static/%s' % (virtual_repo, static_repo),
                          response_text)

    def test_filter_by_name(self):
        self.createNewRepoAndAssertValid()
        reponame = 'some-app-321.1.1-' + unique_repo_name()
        self.repoclient().createStaticRepo(reponame)
        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           ".txt?name=some-app-.*")
        self.assertEqual(reponame + "\n", response.read())

    def test_filter_by_name_with_regex(self):
        self.createNewRepoAndAssertValid()
        reponame = 'some-app-321.1.12'
        self.repoclient().createStaticRepo(reponame)
        encoded_url = urllib.urlencode({'name': 'some-app-[\d\.]+'})
        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           '.txt?%s' % encoded_url)
        self.assertEqual(reponame + "\n", response.read())

    def test_filter_by_tags_inclusive(self):
        repo1 = self.createNewRepoAndAssertValid()
        self.repoclient().tagRepo(repo1, "atag")
        repo2 = self.createNewRepoAndAssertValid()
        self.repoclient().tagRepo(repo2, "atag")
        self.repoclient().tagRepo(repo2, "atag2")
        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           ".txt?tag=atag2,btag")
        self.assertEqual(repo2 + "\n", response.read())

    def test_filter_by_tags_exclusive(self):
        repo1 = self.createNewRepoAndAssertValid()
        self.repoclient().tagRepo(repo1, "atag")
        self.repoclient().tagRepo(repo1, "atag2")
        repo2 = self.createNewRepoAndAssertValid()
        self.repoclient().tagRepo(repo2, "atag")

        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           ".txt?notag=atag2")
        self.assertEqual(repo2 + "\n", response.read())

    def test_filter_by_older_then(self):
        repo1 = self.createNewRepoAndAssertValid()
        repo2 = self.createNewRepoAndAssertValid()
        configService = RepoConfigService()
        repo2dir = configService.getStaticRepoDir(repo2)

        self.set_mtime(repo2dir)

        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           ".txt?older=5")
        self.assertEqual(repo2 + "\n", response.read())

    def test_filter_by_newer_then(self):
        repo1 = self.createNewRepoAndAssertValid()
        repo2 = self.createNewRepoAndAssertValid()
        configService = RepoConfigService()
        repo2dir = configService.getStaticRepoDir(repo2)

        self.set_mtime(repo2dir)

        response = self.helper.do_http_get(Constants.HTTP_PATH_STATIC +
                                           ".txt?newer=5")
        self.assertEqual(repo1 + "\n", response.read())

    def set_mtime(self, path):
        today = datetime.datetime.now()
        pastday = today - datetime.timedelta(days=11)
        atime = int(time.mktime(pastday.timetuple()))
        os.utime(path, (atime, atime))
class RepoTaggingService(object):
    config = RepoConfigService()

    def tagRepo(self, static_reponame, tag):
        repo = self.config.getStaticRepoDir(static_reponame)
        tag_encoded = self.utf8Encode(tag)
        if not os.path.exists(repo):
            raise IsNotAStaticRepoException()
        tagpath = self.config.getTagsFileForStaticRepo(static_reponame)

        lock = lockfile.FileLock(tagpath)
        while not lock.i_am_locking():
            try:
                lock.acquire(timeout=15)  #wait 15sec max
            except LockTimeout:
                raise CouldNotLogTagsException()
        try:
            fileHandle = open(tagpath, 'a')
            fileHandle.write(tag_encoded)
            fileHandle.write('\n')
            fileHandle.close()
        finally:
            lock.release()
        return "Tagged OK"

    def utf8Encode(self, string):
        return string.encode('utf-8')

    def unTagRepo(self, static_reponame, tag):
        initialTags = self.getTags(static_reponame)
        tag_encoded = self.utf8Encode(tag)

        if not tag_encoded in initialTags:
            raise NoSuchTagException()

        initialTags.remove(tag_encoded)

        repo = self.config.getStaticRepoDir(static_reponame)
        if not os.path.exists(repo):
            raise IsNotAStaticRepoException()
        tagpath = self.config.getTagsFileForStaticRepo(static_reponame)

        lock = lockfile.FileLock(tagpath)
        while not lock.i_am_locking():
            try:
                lock.acquire(timeout=15)  #wait 15sec max
            except LockTimeout:
                raise CouldNotLogTagsException()
        try:
            fileHandle = open(tagpath, 'w')  #replace instead of appending
            for tag in initialTags:
                tag_encoded = self.utf8Encode(tag)
                fileHandle.write(tag_encoded)
                fileHandle.write('\n')
            fileHandle.close()
        finally:
            lock.release()
        return "Untagged OK"

    def getTags(self, static_reponame):
        filepath = self.config.getTagsFileForStaticRepo(static_reponame)
        if not os.path.exists(self.config.getStaticRepoDir(static_reponame)):
            raise NotFoundException()
        if not os.path.exists(filepath):
            return set()
        f = open(filepath, "r")
        try:
            tags = set(f.read().split('\n'))
        finally:
            f.close()
        tags.discard('')
        return tags

    def getDefaultTags(self):
        tagFilePath = os.path.join(self.config.getRepoDir(), 'defaultTags.txt')
        if os.path.exists(tagFilePath):
            f = open(tagFilePath)
            try:
                content = f.read()
                tags = content.split('\n')
                tags = [tag.strip() for tag in tags]
                tagsSet = set(tags)
                tagsSet.discard('')
                return tagsSet
            finally:
                f.close()

        return set()
class TestScheduler(BaseIntegrationTestCase):
    configService = RepoConfigService()
    METADATA_SCHEDULED = """
generation_type: scheduled
generation_interval: 1
"""
    METADATA_MANUAL = """
generation_type: manual
"""
    
    def setUp(self):
        self.newRepoName = unique_repo_name()
        self.createdRepoDir = self.configService.getStaticRepoDir(self.newRepoName)
        self.mockedscheduler = self.get_mocked_scheduler()
    
    def tearDown(self):
        if os.path.exists(self.createdRepoDir):
            shutil.rmtree(self.createdRepoDir)
        
    def test_mockscheduler_schedules_update(self):
        self.create_dummy_repo_with_metadata_generation(None) #creates only repo, no metadata
        self.mockedscheduler.start() #should NOT queue a job for the repo
        jobs = self.mockedscheduler.sched.get_active_jobs()
        self.assertTrue(jobs["NOPATH"]=="update_program_config")
        
    def test_mockscheduler_does_not_care_about_manual_jobs(self):
        self.create_dummy_repo_with_metadata_generation(self.METADATA_MANUAL)
        self.mockedscheduler.start() #should NOT queue a job for the repo (since is manual)
        jobs = self.mockedscheduler.sched.get_active_jobs()
        self.assertFalse(self.createdRepoDir in jobs)
        
    def test_mockscheduler_works_on_scheduled_jobs(self):
        self.create_dummy_repo_with_metadata_generation(self.METADATA_SCHEDULED)
        self.mockedscheduler.start() #should NOT queue a job for the repo (since is manual)
        jobs = self.mockedscheduler.sched.get_active_jobs()
        self.assertTrue(self.createdRepoDir in jobs)
        
    def test_mockscheduler_schedules_update_only_once_after_alternation(self):
        self.create_dummy_repo_with_metadata_generation(self.METADATA_SCHEDULED)
        self.mockedscheduler.start()
        
        self.unscheduled_counter=0
        self.add_job_counter=0
        def count_unschedule(reponame):
            self.unscheduled_counter += 1
            MetaDataGenerationScheduler.unschedule_by_reponame(self.mockedscheduler, reponame)
            
        def count_add_job(reponame):
            self.add_job_counter += 1
            MetaDataGenerationScheduler.add_job_for_repo(self.mockedscheduler, reponame)
        
        self.mockedscheduler.unschedule_by_reponame=count_unschedule
        self.mockedscheduler.add_job_for_repo=count_add_job
        
        # update timestamp of metadataFile
        metadata_file = self._get_metadata_generation_file_path(self.createdRepoDir)
        # ensure, that the timestamp will be changed.
        time.sleep(1)
        self._touch_file(metadata_file)
        
        self.mockedscheduler.update_program_config()
        self.mockedscheduler.update_program_config()
        self.mockedscheduler.update_program_config()
        
        self.assertEqual(1, self.unscheduled_counter, 'too many jobs were unscheduled.')
        self.assertEqual(1, self.add_job_counter, 'too many jobs were added.')
        
    def test_mockscheduler_works_on_scheduled_jobs_and_stops_them_when_repo_deleted(self):
        self.create_dummy_repo_with_metadata_generation(self.METADATA_SCHEDULED)
        self.mockedscheduler.start() #queues a job
        self.assertTrue(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        shutil.rmtree(self.createdRepoDir)
        self.mockedscheduler.update_program_config() #should remove the queued job since repo was deleted
        self.assertFalse(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        
    def test_mockscheduler_works_on_scheduled_jobs_and_stops_them_when_metadata_file_deleted(self):
        self.create_dummy_repo_with_metadata_generation(self.METADATA_SCHEDULED)
        self.mockedscheduler.start() #queues a job
        self.assertTrue(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        os.remove(self.configService.getMetaDataGenerationFilePathRelativeToRepoDirByRepoName(self.newRepoName))
        self.mockedscheduler.update_program_config() #should remove the queued job since file was deleted
        self.assertFalse(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        
        
    def test_mockscheduler_works_on_scheduled_jobs_and_stops_them_when_switching_to_manual(self):
        self.create_dummy_repo_with_metadata_generation(self.METADATA_SCHEDULED)
        self.mockedscheduler.start() #queues a job
        self.assertTrue(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        time.sleep(1) #THIS IS NECESSARY!! if not present, in some cases the timestamp does not change because the test is so fast, causing it to fail (no change = no update..)
        self.create_dummy_repo_with_metadata_generation(self.METADATA_MANUAL) #overwrite with manual        
        self.mockedscheduler.update_program_config() #should remove the queued job since was set to manual
        self.assertFalse(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        
    def test_mockscheduler_works_on_scheduled_jobs_after_switching_from_manual_to_scheduled(self):
        self.create_dummy_repo_with_metadata_generation(self.METADATA_MANUAL)
        self.mockedscheduler.start() #queues a job
        self.assertFalse(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        time.sleep(2) #THIS IS NECESSARY!! if not present, in some cases the timestamp does not change because the test is so fast, causing it to fail (no change = no update..)
        self.create_dummy_repo_with_metadata_generation(self.METADATA_SCHEDULED) #overwrite with scheduled        
        self.mockedscheduler.update_program_config() #should add a job since was set to scheduled
        self.assertTrue(self.createdRepoDir in self.mockedscheduler.sched.get_active_jobs())
        
    def get_mocked_scheduler(self):
        mockedscheduler = MetaDataGenerationScheduler(5)
        mockedscheduler.sched=MockedScheduler()
        return mockedscheduler
    
    def create_dummy_repo_with_metadata_generation(self,metadataContents):
        if not os.path.exists(self.createdRepoDir):
            os.makedirs(self.createdRepoDir)
        if not metadataContents:
            return
        filepath = self._get_metadata_generation_file_path(self.createdRepoDir)
        schedulingFile = open(filepath,'w')
        schedulingFile.write(metadataContents)
        schedulingFile.close()
    
    def _get_metadata_generation_file_path(self, repoDir):
        return repoDir+"/"+self.configService.METADATA_GENERATION_FILENAME
    
    def _touch_file(self, filename):
        handle = file(filename, 'a')
        try:
            os.utime(filename, None)
        finally:
            handle.close()
Example #38
0
class TestTagRepo(BaseIntegrationTestCase):
    config=RepoConfigService()


    def test_tag_repo_fails_with_no_tag_in_postdata(self):
        reponame=self.createNewRepoAndAssertValid()
        response = self.doHttpPost(Constants.HTTP_PATH_STATIC+"/"+reponame+"/tags/","hello world how are you today")
        self.assertTrue(response.status, httplib.BAD_REQUEST)
        
    def test_tag_repo_post_ok(self):
        reponame=self.createNewRepoAndAssertValid()
        tag="tag_"+reponame
        response = self.repoclient().tagRepo(reponame,tag)
        self.assertEquals(response.status, httplib.CREATED)

    def test_tag_repo_read_ok(self):
        reponame=self.createNewRepoAndAssertValid()
        tag="tag_"+reponame
        self.repoclient().tagRepo(reponame,tag)
        response = self.repoclient().tagList(reponame)
        read_tags=response.read()
        self.assertTrue(tag in read_tags)

   
    def test_tag_repo_writes_tag(self):
        reponame=self.createNewRepoAndAssertValid()
        tag="tag_"+reponame
        self.repoclient().tagRepo(reponame,tag)
        targetFile=self.config.getTagsFileForStaticRepo(reponame)
        self.assertTrue(self.fileContains(targetFile,tag))



    def test_simple_tag_untag_repo(self):
        reponame=self.createNewRepoAndAssertValid()
        tag="tag_"+reponame
        self.repoclient().tagRepo(reponame,tag)
        targetFile=self.config.getTagsFileForStaticRepo(reponame)
        self.assertTrue(self.fileContains(targetFile,tag))
        response=self.repoclient().untagRepo(reponame,tag)
        self.assertEquals(response.status, httplib.NO_CONTENT)
        self.assertFalse(self.fileContains(targetFile,tag))

    def test_many_tags_untag(self):
        reponame=self.createNewRepoAndAssertValid()
        for i in range(10):
          self.repoclient().tagRepo(reponame,str(i))
          self.assertTrue(str(i) in self.repoclient().tagList(reponame).read())
        targetFile=self.config.getTagsFileForStaticRepo(reponame)
        self.assertTrue(self.fileContains(targetFile,str(5)))
        response=self.repoclient().untagRepo(reponame,str(5))
        self.assertEquals(response.status, httplib.NO_CONTENT)
        self.assertFalse(self.fileContains(targetFile,str(5)))
        for i in range(10):
          if not i==5:
            self.assertTrue(self.fileContains(targetFile,str(i)))
    
    def test_untag_tag_fails_if_tag_not_present(self):
        reponame=self.createNewRepoAndAssertValid()
        tag="tag_"+reponame
        self.repoclient().tagRepo(reponame,tag)
        msg=self.repoclient().doHttpDelete(Constants.HTTP_PATH_STATIC+"/"+reponame+"/tags/asladalkdsla")
        self.assertEquals(msg.status, httplib.NOT_FOUND)


    def test_tag_twice_writes_tags(self):
        reponame=self.createNewRepoAndAssertValid()
        tag1="tag1_"+reponame
        tag2="tag2_"+reponame
        self.repoclient().tagRepo(reponame,tag1)
        self.repoclient().tagRepo(reponame,tag2)
        targetFile=self.config.getTagsFileForStaticRepo(reponame)
        self.assertTrue(self.fileContains(targetFile,tag1))
        self.assertTrue(self.fileContains(targetFile,tag2))
    
    def fileContains(self,targetFile,contents):
        f = open(targetFile, "r")
        text = f.read()
        f.close()
        if contents in text:
          return True
        return False
    def test_get_rpm_group_with_obsolete_files_by_file_name(self):
        dict_group_with_obsolete_rpms = RepoConfigService()._get_rpm_group_with_obsolete_files_by_file_name(
            self.rpm_file_names, 3)

        self.assertEqual(self.dict_rpm_tuples.keys(), dict_group_with_obsolete_rpms.keys())
        self.assertEqual(self.dict_rpm_tuples.values(), dict_group_with_obsolete_rpms.values())