Пример #1
0
    def _build_search_conn(self, conn, config):
        """
        Given an existing bound connection and ldap search config,
        assess if the existing connection is suitable for search,
        and if not, attempt to bind such a connection. Return
        None if no suitable connection can be bound.

        side-effect: always mutates config to mask a password.
        """
        search_userdn = config.get('userdn')
        search_password = config.get('password')
        if search_userdn is None:
            search_password = None
        if 'password' in config:
            # obscure password in logs
            config['password'] = '******'
        needs_conn = (conn is None or (search_userdn is not None
                                       and conn.user != search_userdn))
        if needs_conn:
            conn = self.connection(self.server(),
                                   userdn=search_userdn,
                                   password=search_password)
            if not self._open_and_bind(conn):
                threadlog.error("Search failed, couldn't bind user %s %s: %s" %
                                (search_userdn, config, conn.result))
                return
        return conn
Пример #2
0
def devpiserver_pyramid_configure(config, pyramid_config):
    # make the theme path absolute if it exists and make it available via the
    # pyramid registry
    theme_path = config.args.theme
    if theme_path:
        theme_path = os.path.abspath(theme_path)
        if not os.path.exists(theme_path):
            threadlog.error("The theme path '%s' does not exist." % theme_path)
            sys.exit(1)
        if not os.path.isdir(theme_path):
            threadlog.error("The theme path '%s' is not a directory." %
                            theme_path)
            sys.exit(1)
    pyramid_config.registry['theme_path'] = theme_path
    # by using include, the package name doesn't need to be set explicitly
    # for registrations of static views etc
    pyramid_config.include('devpi_web.main')
    pyramid_config.registry['devpiweb-pluginmanager'] = get_pluginmanager(
        config)
    pyramid_config.registry['search_index'] = get_indexer(config)

    # monkeypatch mimetypes.guess_type on because pyramid-1.5.1/webob
    # choke on mimtypes.guess_type on windows with python2.7
    if sys.platform == "win32" and sys.version_info[:2] == (2, 7):
        import mimetypes
        old = mimetypes.guess_type

        def guess_type_str(url, strict=True):
            res = old(url, strict)
            return str(res[0]), res[1]

        mimetypes.guess_type = guess_type_str
        threadlog.debug("monkeypatched mimetypes.guess_type to return bytes")
Пример #3
0
    def _build_search_conn(self, conn, config):
        """
        Given an existing bound connection and ldap search config,
        assess if the existing connection is suitable for search,
        and if not, attempt to bind such a connection. Return
        None if no suitable connection can be bound.

        side-effect: always mutates config to mask a password.
        """
        search_userdn = config.get('userdn')
        search_password = config.get('password')
        if search_userdn is None:
            search_password = None
        if 'password' in config:
            # obscure password in logs
            config['password'] = '******'
        needs_conn = (
            conn is None
            or (search_userdn is not None and conn.user != search_userdn)
        )
        if needs_conn:
            conn = self.connection(
                self.server(),
                userdn=search_userdn, password=search_password)
            if not self._open_and_bind(conn):
                threadlog.error("Search failed, couldn't bind user %s %s: %s" % (search_userdn, config, conn.result))
                return
        return conn
Пример #4
0
 def _update_projects(self, writer, projects, clear=False):
     add_document = partial(self._add_document, writer)
     counter = itertools.count()
     count = next(counter)
     main_keys = self.project_ix.schema.names()
     text_keys = (('author', 0.5), ('author_email', 0.5),
                  ('description', 1.5), ('summary', 1.75), ('keywords',
                                                            1.75))
     for project in projects:
         data = dict((u(x), get_mutable_deepcopy(project[x]))
                     for x in main_keys if x in project)
         data['path'] = u"/{user}/{index}/{name}".format(**data)
         if not clear:
             # because we use hierarchical documents, we have to delete
             # everything we got for this path and index it again
             writer.delete_by_term('path', data['path'])
         data['type'] = "project"
         data['text'] = "%s %s" % (data['name'], project_name(data['name']))
         with writer.group():
             add_document(**data)
             count = next(counter)
             for key, boost in text_keys:
                 if key not in project:
                     continue
                 add_document(
                     **{
                         "path": data['path'],
                         "type": key,
                         "text": project[key],
                         "_text_boost": boost
                     })
                 count = next(counter)
             if '+doczip' not in project:
                 continue
             if not project['+doczip'].exists():
                 log.error("documentation zip file is missing %s",
                           data['path'])
                 continue
             for page in project['+doczip'].values():
                 if page is None:
                     continue
                 add_document(
                     **{
                         "path": data['path'],
                         "type": "title",
                         "text": page['title'],
                         "text_path": page['path'],
                         "text_title": page['title']
                     })
                 count = next(counter)
                 add_document(
                     **{
                         "path": data['path'],
                         "type": "page",
                         "text": page['text'],
                         "text_path": page['path'],
                         "text_title": page['title']
                     })
                 count = next(counter)
     return count
Пример #5
0
    def _search(self, conn, config, **kw):
        config = dict(config)
        conn = self._build_search_conn(conn, config)
        if not conn:
            return []
        search_filter = config['filter'].format(**kw)
        search_scope = self._search_scope(config)
        attribute_name = config['attribute_name']
        found = conn.search(
            config['base'], search_filter,
            search_scope=search_scope, attributes=[attribute_name])
        if found:
            if any(attribute_name in x.get('attributes', {}) for x in conn.response):
                def extract_search(s):
                    if 'attributes' in s:
                        attributes = s['attributes'][attribute_name]
                        if not isinstance(attributes, list):
                            attributes = [attributes]
                        return attributes
                    else:
                        return []
            elif attribute_name in ('dn', 'distinguishedName'):
                def extract_search(s):
                    return [s[attribute_name]]
            else:
                threadlog.error('configured attribute_name {} not found in any search results'.format(attribute_name))
                return []

            return sum((extract_search(x) for x in conn.response), [])
        else:
            threadlog.error("Search failed %s %s: %s" % (search_filter, config, conn.result))
            return []
Пример #6
0
def get_toxresults_info(linkstore, for_link, newest=True):
    result = []
    toxlinks = linkstore.get_links(rel="toxresult", for_entrypath=for_link)
    for toxlink, toxenvs in iter_toxresults(toxlinks,
                                            load_toxresult,
                                            newest=newest):
        if toxenvs is None:
            log.error("Couldn't parse test results %s." % toxlink)
            continue
        for toxenv in toxenvs:
            status = 'unknown'
            if not toxenv.setup['failed'] and not toxenv.test[
                    'failed'] and toxenv.test['commands']:
                status = 'passed'
            elif toxenv.setup['failed'] or toxenv.test['failed']:
                status = 'failed'
            info = dict(basename=toxlink.basename,
                        _key="-".join(toxenv.key),
                        host=toxenv.host,
                        platform=toxenv.platform,
                        envname=toxenv.envname,
                        setup=toxenv.setup,
                        test=toxenv.test,
                        status=status)
            if toxenv.pyversion:
                info["pyversion"] = toxenv.pyversion
            result.append(info)
    return result
Пример #7
0
    def _search(self, conn, config, **kw):
        config = dict(config)
        conn = self._build_search_conn(conn, config)
        if not conn:
            return []
        search_filter = config['filter'].format(**kw)
        search_scope = self._search_scope(config)
        attribute_name = config['attribute_name']
        found = conn.search(
            config['base'], search_filter.decode("utf8"),
            search_scope=search_scope, attributes=[attribute_name])
        if found:
            if any(attribute_name in x.get('attributes', {}) for x in conn.response):
                def extract_search(s):
                    if 'attributes' in s:
                        attributes = s['attributes'][attribute_name]
                        if not isinstance(attributes, list):
                            attributes = [attributes]
                        return attributes
                    else:
                        return []
            elif attribute_name in ('dn', 'distinguishedName'):
                def extract_search(s):
                    return [s[attribute_name]]
            else:
                threadlog.error('configured attribute_name {} not found in any search results'.format(attribute_name))
                return []

            return sum((extract_search(x) for x in conn.response), [])
        else:
            threadlog.error("Search failed %s %s: %s" % (search_filter, config, conn.result))
            return []
Пример #8
0
 def process_sub_hits(self, stage, sub_hits, data):
     search_index = self.request.registry['search_index']
     result = []
     for sub_hit in sub_hits:
         sub_data = sub_hit['data']
         text_type = sub_data['type']
         title = text_type.title()
         highlight = None
         if text_type == 'project':
             continue
         elif text_type in ('title', 'page'):
             docs = self.get_docs(stage, data)
             try:
                 entry = docs[sub_data['text_path']]
             except KeyError:
                 highlight = (
                     "Couldn't access documentation files for %s "
                     "version %s on %s. This is a bug. If you find a way "
                     "to reproduce this, please file an issue at: "
                     "https://github.com/devpi/devpi/issues" %
                     (data['name'], data['doc_version'], stage.name))
             else:
                 text = entry['text']
                 highlight = search_index.highlight(text,
                                                    sub_hit.get('words'))
             title = sub_data.get('text_title', title)
             text_path = sub_data.get('text_path')
             if text_path:
                 sub_hit['url'] = self.request.route_url(
                     "docviewroot",
                     user=data['user'],
                     index=data['index'],
                     project=normalize_name(data['name']),
                     version=data['doc_version'],
                     relpath="%s.html" % text_path)
         elif text_type in ('keywords', 'description', 'summary'):
             metadata = self.get_versiondata(stage, data)
             if metadata is None:
                 continue
             text = metadata.get(text_type)
             if text is None:
                 continue
             highlight = search_index.highlight(text, sub_hit.get('words'))
             if 'version' in data:
                 sub_hit['url'] = self.request.route_url(
                     "/{user}/{index}/{project}/{version}",
                     user=data['user'],
                     index=data['index'],
                     project=normalize_name(data['name']),
                     version=data['version'],
                     _anchor=text_type)
         else:
             log.error("Unknown type %s" % text_type)
             continue
         sub_hit['title'] = title
         sub_hit['highlight'] = highlight
         result.append(sub_hit)
     return result
Пример #9
0
def index_get(context, request):
    context = ContextWrapper(context)
    user, index = context.username, context.index
    stage = context.stage
    bases = []
    packages = []
    result = dict(
        title="%s index" % stage.name,
        simple_index_url=request.route_url(
            "/{user}/{index}/+simple/", user=user, index=index),
        bases=bases,
        packages=packages)
    if stage.name == "root/pypi":
        return result

    if hasattr(stage, "ixconfig"):
        for base in stage.ixconfig["bases"]:
            base_user, base_index = base.split('/')
            bases.append(dict(
                title=base,
                url=request.route_url(
                    "/{user}/{index}",
                    user=base_user, index=base_index),
                simple_url=request.route_url(
                    "/{user}/{index}/+simple/",
                    user=base_user, index=base_index)))

    for projectname in stage.list_projectnames_perstage():
        version = stage.get_latest_version_perstage(projectname)
        verdata = stage.get_versiondata_perstage(projectname, version)
        try:
            name, ver = verdata["name"], verdata["version"]
        except KeyError:
            log.error("metadata for project %r empty: %s, skipping",
                      projectname, verdata)
            continue
        show_toxresults = not (stage.user.name == 'root' and stage.index == 'pypi')
        linkstore = stage.get_linkstore_perstage(name, ver)
        packages.append(dict(
            info=dict(
                title="%s-%s" % (name, ver),
                url=request.route_url(
                    "/{user}/{index}/{name}/{version}",
                    user=stage.user.name, index=stage.index,
                    name=name, version=ver)),
            make_toxresults_url=functools.partial(
                request.route_url, "toxresults",
                user=stage.user.name, index=stage.index,
                name=name, version=ver),
            files=get_files_info(request, linkstore, show_toxresults),
            docs=get_docs_info(request, stage, verdata)))
    packages.sort(key=lambda x: x["info"]["title"])

    return result
Пример #10
0
 def _userdn(self, username):
     if 'user_template' in self:
         return self['user_template'].format(username=username)
     else:
         result = self._search(None, self['user_search'], username=username)
         if len(result) == 1:
             return result[0]
         elif not result:
             threadlog.info("No user '%s' found." % username)
         else:
             threadlog.error("Multiple results for user '%s' found.")
Пример #11
0
 def _userdn(self, username):
     if 'user_template' in self:
         return self['user_template'].format(username=username)
     else:
         result = self._search(None, self['user_search'], username=username)
         if len(result) == 1:
             return result[0]
         elif not result:
             threadlog.info("No user '%s' found." % username)
         else:
             threadlog.error("Multiple results for user '%s' found.")
Пример #12
0
 def filtered_list_all(self):
     stage = self.context.stage
     try:
         stage_results = list(stage.op_sro("list_projects_perstage"))
     except stage.UpstreamError as e:
         threadlog.error(e.msg)
         abort(self.request, 502, e.msg)
     releasefilter = set(get_release_filter(stage))
     results = []
     for stage, names in stage_results:
         filtered_names = releasefilter.intersection(names)
         results.append((stage, filtered_names))
     # at this point we are sure we can produce the data without
     # depending on remote networks
     return Response(body=b"".join(self._simple_list_all(stage, results)))
Пример #13
0
 def process_sub_hits(self, stage, sub_hits, data):
     search_index = self.request.registry['search_index']
     result = []
     for sub_hit in sub_hits:
         sub_data = sub_hit['data']
         text_type = sub_data['type']
         title = text_type.title()
         highlight = None
         if text_type == 'project':
             continue
         elif text_type in ('title', 'page'):
             docs = self.get_docs(stage, data)
             entry = docs[sub_data['text_path']]
             text = entry['text']
             highlight = search_index.highlight(text, sub_hit.get('words'))
             title = sub_data.get('text_title', title)
             text_path = sub_data.get('text_path')
             if text_path:
                 sub_hit['url'] = self.request.route_url(
                     "docviewroot",
                     user=data['user'],
                     index=data['index'],
                     project=normalize_name(data['name']),
                     version=data['doc_version'],
                     relpath="%s.html" % text_path)
         elif text_type in ('keywords', 'description', 'summary'):
             metadata = self.get_versiondata(stage, data)
             if metadata is None:
                 continue
             text = metadata.get(text_type)
             if text is None:
                 continue
             highlight = search_index.highlight(text, sub_hit.get('words'))
             if 'version' in data:
                 sub_hit['url'] = self.request.route_url(
                     "/{user}/{index}/{project}/{version}",
                     user=data['user'],
                     index=data['index'],
                     project=normalize_name(data['name']),
                     version=data['version'],
                     _anchor=text_type)
         else:
             log.error("Unknown type %s" % text_type)
             continue
         sub_hit['title'] = title
         sub_hit['highlight'] = highlight
         result.append(sub_hit)
     return result
Пример #14
0
 def process_sub_hits(self, stage, sub_hits, data):
     search_index = self.request.registry['search_index']
     result = []
     for sub_hit in sub_hits:
         sub_data = sub_hit['data']
         text_type = sub_data['type']
         title = text_type.title()
         highlight = None
         if text_type == 'project':
             continue
         elif text_type in ('title', 'page'):
             docs = self.get_docs(stage, data)
             entry = docs[sub_data['text_path']]
             text = entry['text']
             highlight = search_index.highlight(text, sub_hit.get('words'))
             title = sub_data.get('text_title', title)
             text_path = sub_data.get('text_path')
             if text_path:
                 sub_hit['url'] = self.request.route_url(
                     "docviewroot", user=data['user'], index=data['index'],
                     name=data['name'], version=data['doc_version'],
                     relpath="%s.html" % text_path)
         elif text_type in ('keywords', 'description', 'summary'):
             metadata = self.get_versiondata(stage, data)
             if metadata is None:
                 continue
             text = metadata.get(text_type)
             if text is None:
                 continue
             highlight = search_index.highlight(text, sub_hit.get('words'))
             if 'version' in data:
                 sub_hit['url'] = self.request.route_url(
                     "/{user}/{index}/{name}/{version}",
                     user=data['user'], index=data['index'],
                     name=data['name'], version=data['version'],
                     _anchor=text_type)
         else:
             log.error("Unknown type %s" % text_type)
             continue
         sub_hit['title'] = title
         sub_hit['highlight'] = highlight
         result.append(sub_hit)
     return result
Пример #15
0
    def filtered_list_project(self):
        request = self.request
        abort_if_invalid_project(request, request.matchdict["project"])
        project = self.context.project
        # we only serve absolute links so we don't care about the route's slash
        stage = self.context.stage
        releasefilter = get_release_filter(stage).get(project)
        if releasefilter is None:
            abort(self.request, 404, "The project %s does not exist." %(project))

        try:
            links = stage.get_simplelinks(project, sorted_links=False)
        except stage.UpstreamError as e:
            threadlog.error(e.msg)
            abort(request, 502, e.msg)

        result = []
        for key, url in links:
            parts = splitext_archive(key)[0].split('-')
            for index in range(1, len(parts)):
                name = normalize_name('-'.join(parts[:index]))
                if name == project:
                    version = '-'.join(parts[index:])
                    break
            else:
                continue
            if version in releasefilter:
                result.append((key, url))

        if not result:
            self.request.context.verified_project  # access will trigger 404 if not found

        # we don't need the extra stuff on the simple page for pip
        embed_form = False
        blocked_index = None
        response = Response(body=b"".join(self._simple_list_project(
            stage, project, result, embed_form, blocked_index)))
        if stage.ixconfig['type'] == 'mirror':
            serial = stage.key_projsimplelinks(project).get().get("serial")
            if serial > 0:
                response.headers[str("X-PYPI-LAST-SERIAL")] = str(serial)
        return response
Пример #16
0
def get_toxresults_info(linkstore, for_link, newest=True):
    result = []
    toxlinks = linkstore.get_links(rel="toxresult", for_entrypath=for_link)
    for toxlink, toxenvs in iter_toxresults(toxlinks, load_toxresult, newest=newest):
        if toxenvs is None:
            log.error("Couldn't parse test results %s." % toxlink)
            continue
        for toxenv in toxenvs:
            info = dict(
                basename=toxlink.basename,
                _key="-".join(toxenv.key),
                host=toxenv.host,
                platform=toxenv.platform,
                envname=toxenv.envname,
                setup=toxenv.setup,
                test=toxenv.test,
                failed=toxenv.failed)
            if toxenv.pyversion:
                info["pyversion"] = toxenv.pyversion
            result.append(info)
    return result
Пример #17
0
    def _search(self, conn, config, **kw):
        config = dict(config)
        search_userdn = config.get('userdn')
        search_password = config.get('password')
        if 'password' in config:
            # obscure password in logs
            config['password'] = '******'
        if conn is None:
            if search_userdn is None:
                conn = self.connection(self.server())
            else:
                conn = self.connection(
                    self.server(),
                    userdn=search_userdn, password=search_password)
            if not self._open_and_bind(conn):
                threadlog.error("Search failed, couldn't bind user %s %s: %s" % (search_userdn, config, conn.result))
                return []
        else:
            if search_userdn is not None and conn.user != search_userdn:
                conn = self.connection(
                    self.server(),
                    userdn=search_userdn, password=search_password)
                if not self._open_and_bind(conn):
                    threadlog.error("Search failed, couldn't bind user %s %s: %s" % (search_userdn, config, conn.result))
                    return []
        search_filter = config['filter'].format(**kw)
        search_scope = self._search_scope(config)
        attribute_name = config['attribute_name']
        found = conn.search(
            config['base'], search_filter,
            search_scope=search_scope, attributes=[attribute_name])
        if found:
            if any(attribute_name in x['attributes'] for x in conn.response):
                def extract_search(s):
                    return s['attributes'][attribute_name]
            elif attribute_name in ('dn', 'distinguishedName'):
                def extract_search(s):
                    return [s[attribute_name]]
            else:
                threadlog.error('configured attribute_name {} not found in any search results'.format(attribute_name))
                return []

            return sum((extract_search(x) for x in conn.response), [])
        else:
            threadlog.error("Search failed %s %s: %s" % (search_filter, config, conn.result))
            return []
Пример #18
0
 def _search(self, conn, config, **kw):
     config = dict(config)
     search_userdn = config.get('userdn')
     search_password = config.get('password')
     if 'password' in config:
         # obscure password in logs
         config['password'] = '******'
     if conn is None:
         if search_userdn is None:
             conn = self.connection(self.server())
         else:
             conn = self.connection(self.server(),
                                    userdn=search_userdn,
                                    password=search_password)
         if not self._open_and_bind(conn):
             threadlog.error("Search failed, couldn't bind user %s %s: %s" %
                             (search_userdn, config, conn.result))
             return []
     else:
         if search_userdn is not None and conn.user != search_userdn:
             conn = self.connection(self.server(),
                                    userdn=search_userdn,
                                    password=search_password)
             if not self._open_and_bind(conn):
                 threadlog.error(
                     "Search failed, couldn't bind user %s %s: %s" %
                     (search_userdn, config, conn.result))
                 return []
     search_filter = config['filter'].format(**kw)
     search_scope = self._search_scope(config)
     attribute_name = config['attribute_name']
     found = conn.search(config['base'],
                         search_filter,
                         search_scope=search_scope,
                         attributes=[attribute_name])
     if found:
         return sum(
             (x['attributes'][attribute_name] for x in conn.response), [])
     else:
         threadlog.error("Search failed %s %s: %s" %
                         (search_filter, config, conn.result))
         return []
Пример #19
0
 def _search(self, conn, config, **kw):
     config = dict(config)
     search_userdn = config.get('userdn')
     search_password = config.get('password')
     if 'password' in config:
         # obscure password in logs
         config['password'] = '******'
     if conn is None:
         if search_userdn is None:
             conn = self.connection(self.server())
         else:
             conn = self.connection(
                 self.server(),
                 userdn=search_userdn, password=search_password)
         if not self._open_and_bind(conn):
             threadlog.error("Search failed, couldn't bind user %s %s: %s" % (search_userdn, config, conn.result))
             return []
     else:
         if search_userdn is not None and conn.user != search_userdn:
             conn = self.connection(
                 self.server(),
                 userdn=search_userdn, password=search_password)
             if not self._open_and_bind(conn):
                 threadlog.error("Search failed, couldn't bind user %s %s: %s" % (search_userdn, config, conn.result))
                 return []
     search_filter = config['filter'].format(**kw)
     search_scope = self._search_scope(config)
     attribute_name = config['attribute_name']
     found = conn.search(
         config['base'], search_filter,
         search_scope=search_scope, attributes=[attribute_name])
     if found:
         if 'regex' not in config:
             return sum((x['attributes'][attribute_name] for x in conn.response), [])
         # otherwise filter out groups by the regex provided
         groupresult = sum((x['attributes'][attribute_name] for x in conn.response), [])
         findwhat = re.compile(config['regex'])
         return [ findwhat.search(g).group('group') for g in groupresult]
     else:
         threadlog.error("Search failed %s %s: %s" % (search_filter, config, conn.result))
         return []
Пример #20
0
 def _search(self, conn, config, **kw):
     config = dict(config)
     search_userdn = config.get('userdn')
     search_password = config.get('password')
     if 'password' in config:
         # obscure password in logs
         config['password'] = '******'
     if conn is None:
         if search_userdn is None:
             conn = self.connection(self.server())
         else:
             conn = self.connection(
                 self.server(),
                 userdn=search_userdn, password=search_password)
         if not self._open_and_bind(conn):
             threadlog.error("Search failed, couldn't bind user %s %s: %s" % (search_userdn, config, conn.result))
             return []
     else:
         if search_userdn is not None and conn.user != search_userdn:
             conn = self.connection(
                 self.server(),
                 userdn=search_userdn, password=search_password)
             if not self._open_and_bind(conn):
                 threadlog.error("Search failed, couldn't bind user %s %s: %s" % (search_userdn, config, conn.result))
                 return []
     search_filter = config['filter'].format(**kw)
     search_scope = self._search_scope(config)
     attribute_name = config['attribute_name']
     found = conn.search(
         config['base'], search_filter,
         search_scope=search_scope, attributes=[attribute_name])
     if found:
         return sum((x['attributes'][attribute_name] for x in conn.response), [])
     else:
         threadlog.error("Search failed %s %s: %s" % (search_filter, config, conn.result))
         return []
Пример #21
0
def fatal(msg):
    threadlog.error(msg)
    sys.exit(1)
Пример #22
0
    def _update_project(self, project, serial, counter, main_keys, writer,
                        searcher):
        def add_document(**kw):
            try:
                writer.add_document(**kw)
            except Exception:
                log.exception(
                    "Exception while trying to add the following data to the search index:\n%r"
                    % kw)
                raise

        text_keys = (('author', 0.5), ('author_email', 0.5),
                     ('description', 1.5), ('summary', 1.75), ('keywords',
                                                               1.75))
        data = dict((u(x), get_mutable_deepcopy(project[x])) for x in main_keys
                    if x in project)
        data['path'] = u"/{user}/{index}/{name}".format(user=data['user'],
                                                        index=data['index'],
                                                        name=normalize_name(
                                                            data['name']))
        existing = None
        doc_num = searcher.document_number(path=data['path'])
        if doc_num is not None:
            existing = searcher.stored_fields(doc_num)
        if existing is not None:
            needs_reindex = False
            if ('+doczip' in project) != ('doc_version' in existing):
                needs_reindex = True
            existing_serial = existing.get('serial', -1)
            if existing_serial < serial:
                needs_reindex = True
            if not needs_reindex:
                return
        # because we use hierarchical documents, we have to delete
        # everything we got for this path and index it again
        writer.delete_by_term('path', data['path'], searcher=searcher)
        data['serial'] = serial
        data['type'] = "project"
        data['text'] = "%s %s" % (data['name'], project_name(data['name']))
        with writer.group():
            add_document(**data)
            next(counter)
            for key, boost in text_keys:
                if key not in project:
                    continue
                add_document(
                    **{
                        "path": data['path'],
                        "type": key,
                        "text": project[key],
                        "_text_boost": boost
                    })
                next(counter)
            if '+doczip' not in project:
                return
            if not project['+doczip'].exists():
                log.error("documentation zip file is missing %s", data['path'])
                return
            for page in project['+doczip'].values():
                if page is None:
                    continue
                add_document(
                    **{
                        "path": data['path'],
                        "type": "title",
                        "text": page['title'],
                        "text_path": page['path'],
                        "text_title": page['title']
                    })
                next(counter)
                add_document(
                    **{
                        "path": data['path'],
                        "type": "page",
                        "text": page['text'],
                        "text_path": page['path'],
                        "text_title": page['title']
                    })
                next(counter)
Пример #23
0
def fatal(msg):
    threadlog.error(msg)
    sys.exit(1)
Пример #24
0
def index_get(context, request):
    context = ContextWrapper(context)
    stage = context.stage
    permissions = []
    bases = []
    packages = []
    whitelist = []
    result = dict(
        title="%s index" % stage.name,
        simple_index_url=request.simpleindex_url(stage),
        permissions=permissions,
        bases=bases,
        packages=packages,
        whitelist=whitelist,
        index_name=stage.name,
        index_title=stage.ixconfig.get('title', None),
        index_description=stage.ixconfig.get('description', None))
    if stage.ixconfig['type'] == 'mirror':
        return result

    if hasattr(stage, "ixconfig"):
        whitelist.extend(sorted(stage.ixconfig['mirror_whitelist']))
        for base in stage.ixconfig["bases"]:
            bases.append(dict(
                title=base,
                url=request.stage_url(base),
                simple_url=request.simpleindex_url(base)))
        acls = [
            (key[4:], stage.ixconfig[key])
            for key in stage.ixconfig
            if key.startswith('acl_')]
        for permission, principals in sorted(acls):
            groups = []
            special = []
            users = []
            for principal in principals:
                if principal.startswith(':'):
                    if principal.endswith(':'):
                        special.append(dict(title=principal[1:-1]))
                    else:
                        groups.append(dict(title=principal[1:]))
                else:
                    users.append(dict(title=principal))
            permissions.append(dict(
                title=permission,
                groups=groups,
                special=special,
                users=users))

    for project in stage.list_projects_perstage():
        version = stage.get_latest_version_perstage(project)
        verdata = stage.get_versiondata_perstage(project, version)
        try:
            name, ver = normalize_name(verdata["name"]), verdata["version"]
        except KeyError:
            log.error("metadata for project %r empty: %s, skipping",
                      project, verdata)
            continue
        show_toxresults = (stage.ixconfig['type'] != 'mirror')
        linkstore = stage.get_linkstore_perstage(name, ver)
        packages.append(dict(
            info=dict(
                title="%s-%s" % (name, ver),
                url=request.route_url(
                    "/{user}/{index}/{project}/{version}",
                    user=stage.user.name, index=stage.index,
                    project=name, version=ver)),
            make_toxresults_url=functools.partial(
                request.route_url, "toxresults",
                user=stage.user.name, index=stage.index,
                project=name, version=ver),
            files=get_files_info(request, linkstore, show_toxresults),
            docs=get_docs_info(request, stage, linkstore),
            _version_data=verdata))
    packages.sort(key=lambda x: x["info"]["title"])

    return result
Пример #25
0
def project_get(context, request):
    context = ContextWrapper(context)
    try:
        releaselinks = context.stage.get_releaselinks(context.verified_project)
        stage_versions = context.stage.list_versions_perstage(context.verified_project)
    except context.stage.UpstreamError as e:
        log.error(e.msg)
        raise HTTPBadGateway(e.msg)
    version_info = {}
    seen = set()
    for release in releaselinks:
        user, index = release.entrypath.split("/", 2)[:2]
        name, version = release.project, release.version
        if not version or version == 'XXX':
            continue
        seen_key = (user, index, name, version)
        if seen_key in seen:
            continue
        version_info[version] = dict(
            index_title="%s/%s" % (user, index),
            index_url=request.stage_url(user, index),
            title=version,
            url=request.route_url(
                "/{user}/{index}/{project}/{version}",
                user=user, index=index, project=name, version=version),
            docs=None,
            _release=release)
        seen.add(seen_key)
    user = context.username
    index = context.stage.index
    index_title = "%s/%s" % (user, index)
    name = context.verified_project
    index_url = request.stage_url(user, index)
    for version in stage_versions:
        linkstore = context.stage.get_linkstore_perstage(name, version)
        docs = get_docs_info(request, context.stage, linkstore)
        if not docs:
            continue
        if version not in version_info:
            version_info[version] = dict(
                index_title=index_title,
                index_url=index_url,
                title=version,
                url=request.route_url(
                    "/{user}/{index}/{project}/{version}",
                    user=user, index=index, project=name, version=version),
                docs=docs,
                _release=None)
        else:
            version_info[version]['docs'] = docs
    versions = []
    for version in get_sorted_versions(version_info):
        versions.append(version_info[version])
    if hasattr(context.stage, 'get_mirror_whitelist_info'):
        whitelist_info = context.stage.get_mirror_whitelist_info(context.project)
    else:
        whitelist_info = dict(
            has_mirror_base=context.stage.has_mirror_base(context.project),
            blocked_by_mirror_whitelist=None)
    return dict(
        title="%s/: %s versions" % (context.stage.name, context.project),
        blocked_by_mirror_whitelist=whitelist_info['blocked_by_mirror_whitelist'],
        versions=versions)
Пример #26
0
def version_get(context, request):
    """ Show version for the precise stage, ignores inheritance. """
    context = ContextWrapper(context)
    name, version = context.verified_project, context.version
    stage = context.stage
    try:
        verdata = context.get_versiondata(perstage=True)
    except stage.UpstreamError as e:
        log.error(e.msg)
        raise HTTPBadGateway(e.msg)
    infos = []
    skipped_keys = frozenset(
        ("description", "home_page", "name", "summary", "version"))
    for key, value in sorted(verdata.items()):
        if key in skipped_keys or key.startswith('+'):
            continue
        if isinstance(value, seq_types):
            if not len(value):
                continue
            value = html.ul([html.li(x) for x in value]).unicode()
        else:
            if not value:
                continue
            value = py.xml.escape(value)
        infos.append((py.xml.escape(key), value))
    show_toxresults = (stage.ixconfig['type'] != 'mirror')
    linkstore = stage.get_linkstore_perstage(name, version)
    files = get_files_info(request, linkstore, show_toxresults)
    docs = get_docs_info(request, stage, linkstore)
    home_page = verdata.get("home_page")
    nav_links = []
    if docs:
        nav_links.append(dict(
            title="Documentation",
            url=docs['url']))
    if home_page:
        nav_links.append(dict(
            title="Homepage",
            url=home_page))
    nav_links.append(dict(
        title="Simple index",
        url=request.route_url(
            "/{user}/{index}/+simple/{project}",
            user=context.username, index=context.index, project=context.project)))
    if hasattr(stage, 'get_mirror_whitelist_info'):
        whitelist_info = stage.get_mirror_whitelist_info(name)
    else:
        whitelist_info = dict(
            has_mirror_base=stage.has_mirror_base(name),
            blocked_by_mirror_whitelist=False)
    if whitelist_info['has_mirror_base']:
        for base in reversed(list(stage.sro())):
            if base.ixconfig["type"] != "mirror":
                continue
            mirror_web_url_fmt = base.ixconfig.get("mirror_web_url_fmt")
            if not mirror_web_url_fmt:
                continue
            nav_links.append(dict(
                title="%s page" % base.ixconfig.get("title", "Mirror"),
                url=mirror_web_url_fmt.format(name=name)))
    return dict(
        title="%s/: %s-%s metadata and description" % (stage.name, name, version),
        content=get_description(stage, name, version),
        summary=verdata.get("summary"),
        nav_links=nav_links,
        infos=infos,
        files=files,
        blocked_by_mirror_whitelist=whitelist_info['blocked_by_mirror_whitelist'],
        show_toxresults=show_toxresults,
        make_toxresults_url=functools.partial(
            request.route_url, "toxresults",
            user=context.username, index=context.index,
            project=context.project, version=context.version),
        make_toxresult_url=functools.partial(
            request.route_url, "toxresult",
            user=context.username, index=context.index,
            project=context.project, version=context.version))