Exemplo n.º 1
0
    def load_project_config(self):
        """Choose the config file.

        Try to guess whether this is a development or installed project.

        """

        # check whether user specified custom settings
        if self.load_config:
            load_project_config(self.config)

        if config.get("i18n.locale_dir"):
            self.locale_dir = config.get("i18n.locale_dir")
            print 'Use %s as a locale directory' % self.locale_dir
        if config.get('i18n.domain'):
            self.domain = config.get("i18n.domain")
            print 'Use %s as a message domain' % self.domain

        if os.path.exists(self.locale_dir) and \
                not os.path.isdir(self.locale_dir):
            raise ProgramError, (
                '%s is not a directory' % self.locale_dir)

        if not os.path.exists(self.locale_dir):
            os.makedirs(self.locale_dir)
Exemplo n.º 2
0
 def wait_for_sync(self):
     """
     Block until our repomd.xml hits the master mirror
     """
     if not len(self.updates):
         log.debug("No updates in masher; skipping wait_for_sync")
         return
     log.info("Waiting for updates to hit mirror...")
     update = self.updates.pop()
     release = update.release
     self.updates.add(update)
     mashdir = config.get("mashed_dir")
     repo = release.stable_repo
     master_repomd = config.get("%s_master_repomd" % release.id_prefix.lower().replace("-", "_"))
     repomd = join(mashdir, repo, "i386", "repodata", "repomd.xml")
     if not exists(repomd):
         log.error("Cannot find local repomd: %s" % repomd)
         return
     checksum = sha.new(file(repomd).read()).hexdigest()
     while True:
         sleep(600)
         try:
             masterrepomd = urllib2.urlopen(master_repomd % release.get_version())
         except urllib2.URLError, e:
             log.error("Error fetching repomd.xml: %s" % str(e))
             continue
         except urllib2.HTTPError, e:
             log.error("Error fetching repomd.xml: %s" % str(e))
             continue
Exemplo n.º 3
0
 def __init__(self, *args, **kw):
     super(ErrorCatcher, self).__init__(*args, **kw)
     self.sender_email = config.get("error_catcher.sender_email")
     self.admin_email = config.get("error_catcher.admin_email")
     self.smtp_host = config.get("error_catcher.smtp_host", "localhost")
     self.smtp_user = config.get("error_catcher.smtp_user")
     self.smtp_passwd = config.get("error_catcher.smtp_passwd")
Exemplo n.º 4
0
 def do_krb_auth(self):
     from bkr.common.krb_auth import AuthManager
     if not self.principal:
         self.principal = tg_config.get('identity.krb_auth_qpid_principal')
     if not self.keytab:
         self.keytab = tg_config.get('identity.krb_auth_qpid_keytab')
     self.auth_mgr = AuthManager(primary_principal=self.principal, keytab=self.keytab)
Exemplo n.º 5
0
    def get_package_updates(self, package, release):
        entries = []
        pkg = Package.byName(package)
        base = config.get('base_address')
        for i, update in enumerate(pkg.updates()):
            delta = datetime.utcnow() - update.date_submitted
            if delta and delta.days > config.get('feeds.num_days_to_show'):
                if len(entries) >= config.get('feeds.max_entries'):
                    break

            if release and not update.release.name == release:
                continue

            entries.append({
                'id'        : base + url(update.get_url()),
                'summary'   : update.notes,
                'link'      : base + url(update.get_url()),
                'published' : update.date_submitted,
                'updated'   : update.date_submitted,
                'title'     : update.title,
            })
        return dict(
                title = 'Latest Updates for %s' % package,
                subtitle = "",
                link = config.get('base_address') + url('/'),
                entries = entries
        )
Exemplo n.º 6
0
    def get_latest_comments(self, user=None):
        entries = []
        if user:
            comments = Comment.select(Comment.q.author == user,
                    orderBy=Comment.q.timestamp).reversed()
        else:
            comments = Comment.select(Comment.q.author != 'bodhi',
                    orderBy=Comment.q.timestamp).reversed()
        for comment in comments:
            delta = datetime.utcnow() - comment.update.date_submitted
            if delta and delta.days > config.get('feeds.num_days_to_show'):
                if len(entries) >= config.get('feeds.max_entries'):
                    break

            entries.append({
                'id'        : config.get('base_address') + \
                              url(comment.update.get_url()),
                'summary'   : comment.text,
                'published' : comment.timestamp,
                'link'      : config.get('base_address') + \
                              url(comment.update.get_url()),
                              'title'     : "[%s] [%s] [%d]" % (
                                  comment.update.title, comment.author,
                                  comment.karma)
            })
        return dict(
                title = 'Latest Comments',
                subtitle = "",
                link = config.get('base_address') + url('/'),
                entries = entries,
        )
Exemplo n.º 7
0
def render(info, template=None, format=None ,content_type=None, mapping=None, fragment=False):
    """Renders data in the desired format.

    @param info: the data itself
    @type info: dict
    @param format: "html", "xml" or "json"
    @type format: string
    @param fragment: passed through to tell the template if only a
                     fragment of a page is desired
    @type fragment: bool
    @param template: name of the template to use
    @type template: string
    """
    template = info.pop("tg_template", template)
    if not info.has_key("tg_flash"):
        if config.get("tg.empty_flash", True):
            info["tg_flash"] = None
    engine, template, enginename = _choose_engine(template)
    if  not content_type and getattr(engine, 'get_content_type', None):
        ua = getattr(cherrypy.request.headers, "User-Agent", None)
        ua = UserAgent(ua)
        content_type = engine.get_content_type(ua)
    elif not content_type:
        content_type = "text/html"
    if content_type == 'text/html' and enginename in ('genshi', 'kid'):
        charset = get_template_encoding_default(enginename)
        content_type = content_type + '; charset=' + charset
    cherrypy.response.headers["Content-Type"] = content_type
    if not format:
        format = config.get("%s.outputformat" % enginename, "html") 
    args, kw = adapt_call(engine.render, args= [],
                kw = dict(info=info, format=format, fragment=fragment, template=template, mapping=mapping), start=1)
    return engine.render(**kw)
Exemplo n.º 8
0
    def update_comps(self):
        """
        Update our comps module, so we can pass it to mash to stuff into
        our repositories
        """
        log.debug("Updating comps...")
        comps_dir = config.get("comps_dir")
        comps_url = config.get("comps_url")
        if not exists(comps_dir):
            if comps_url.startswith("git://"):
                cmd = "git clone %s" % (comps_url,)
            else:
                cmd = "cvs -d %s co comps" % (comps_url,)
            log.debug("running command: %s" % cmd)
            subprocess.call(cmd, shell=True, cwd=comps_dir)
        if comps_url.startswith("git://"):
            log.debug("Running git pull")
            p = subprocess.Popen("git pull", shell=True, cwd=comps_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, err = p.communicate()
            log.debug(out)
            if err:
                log.error(err)
        else:
            subprocess.call("cvs update", shell=True, cwd=comps_dir)

        log.info("Merging translations")
        p = subprocess.Popen("make", shell=True, cwd=comps_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out, err = p.communicate()
        log.debug(out)
        if err:
            log.error(err)
Exemplo n.º 9
0
 def by_user_name(cls, user_name):
     """
     A class method that permits to search users
     based on their user_name attribute.
     """
     # Try to look up the user via local DB first.
     user = cls.query.filter_by(user_name=user_name).first()
     # If user doesn't exist in DB check ldap if enabled.
     ldapenabled = get('identity.ldap.enabled', False)
     autocreate = get('identity.soldapprovider.autocreate', False)
     # Presence of '/' indicates a Kerberos service principal.
     if not user and ldapenabled and autocreate and '/' not in user_name:
         filter = ldap.filter.filter_format('(uid=%s)', [user_name.encode('utf8')])
         ldapcon = ldap.initialize(get('identity.soldapprovider.uri'))
         objects = ldapcon.search_st(get('identity.soldapprovider.basedn', ''),
                 ldap.SCOPE_SUBTREE, filter,
                 timeout=get('identity.soldapprovider.timeout', 20))
         # no match
         if(len(objects) == 0):
             return None
         # need exact match
         elif(len(objects) > 1):
             return None
         attrs = objects[0][1]
         # LDAP normalization rules means that we might have found a user
         # who doesn't actually match the username we were given.
         if attrs['uid'][0].decode('utf8') != user_name:
             return None
         user = User()
         user.user_name = attrs['uid'][0].decode('utf8')
         user.display_name = attrs['cn'][0].decode('utf8')
         user.email_address = attrs['mail'][0].decode('utf8')
         session.add(user)
         session.flush()
     return user
Exemplo n.º 10
0
  def login(self, SAMLRequest, RelayState='', *args, **kw):
    if config.get('apps.use_header_auth'):
      # header auth
      # retrieve user name from header
      key = config.get('apps.auth_header_key')
      user_name = cherrypy.request.headers.get(key, None)
      if user_name is None:
        raise errors.GheimdallException('Can not retrieve user name.')

      ret = utils.createLoginDict(SAMLRequest, RelayState, user_name)
      ret['tg_template'] = 'gheimdall.templates.gheimdall-login-success'
      return ret

    remember_me = None
    authenticated = None
    remember_me = cherrypy.session.get('remember_me', False)
    authenticated = cherrypy.session.get('authenticated', False)
    if remember_me and authenticated:
      auth_time = cherrypy.session.get('auth_time', 0)
      valid_time = cherrypy.session.get('valid_time', 0)
      now = time.time()
      if auth_time < now and now < valid_time:
        ret = utils.createLoginDict(SAMLRequest, RelayState,
                                    cherrypy.session.get('user_name'),
                                    set_time=False)
        ret['tg_template'] = 'gheimdall.templates.gheimdall-login-success'
        return ret

    tg_exception = kw.get('tg_exceptions', None)
    if tg_exception is not None:
      log.error(tg_exception)
    return dict(form=login_form_widget,
                values=dict(SAMLRequest=SAMLRequest,RelayState=RelayState))
Exemplo n.º 11
0
def send_email(template, recipients=[], cc=[], bcc=[], params={}):
    import turbomail

    def replace(field, params):
        for key, value in params.iteritems():
            field = field.replace(u'${%s}' % key, unicode(value))
        return field

    template = os.path.join(config.get('mail.templates', ''), template + '.txt')
    if not os.path.exists(template):
        raise ValueError('Template %s does not exist' % template)

    file = open(template, 'r')
    subject = replace(unicode(file.readline().strip(), 'UTF-8'), params)
    content = replace(unicode(file.read(), 'UTF-8'), params)
    file.close()

    sender = list(config.get('mail.sender'))
    if isinstance(recipients, (str, unicode)):
        recipients = [recipients]
    elif not isinstance(recipients, list):
        recipients = list(recipients)
    if not isinstance(cc, list):
        cc = list(cc)
    if not isinstance(bcc, list):
        bcc = list(bcc)
    
    message = turbomail.Message(subject=subject, sender=sender,
        recipient=recipients, cc=cc, bcc=bcc)
    message.plain = content

    turbomail.enqueue(message)
Exemplo n.º 12
0
def _create_runtime_env():
    source_dir = config.get('basepath.assets')
    output_dir = config.get('basepath.assets_cache')
    debug = config.get('assets.debug')
    auto_build = config.get('assets.auto_build')
    return _create_env(source_dir=source_dir, output_dir=output_dir,
            debug=debug, auto_build=auto_build)
Exemplo n.º 13
0
 def mail_changed_email_validation(self, new_email):
     '''
         Sends an email out that has validation information for changed email addresses.
         The logic is that we keep the old (verified) email in the User table, and add the
         new information into the RegistrationUserEmailChange table.  When the user eventually
         validates the new address, we delete the information out of RegistrationUserEmailChange
         and put the new email address into User table.  That way, we always have a "good" email
         address in the User table.
     
         @param new_email: The new email
     '''
     
     unique_str = new_email + identity.current.user.email_address
     validation_key = self.validation_hash(unique_str)
     email_change = register_model.RegistrationUserEmailChange.new(
                                         user=identity.current.user,
                                         new_email_address=new_email,
                                         validation_key=validation_key)
     reg_base_url = self.registration_base_url()
     queryargs = urllib.urlencode(dict(email=new_email, 
                                       key=validation_key))
     url = '%s/validate_email_change?%s' % (reg_base_url, queryargs)
                                         
     body = pkg_resources.resource_string(__name__, 
                                         'templates/register_changed_email.txt')
     self.send_email(new_email,
                 config.get('registration.mail.admin_email'), 
                 config.get('registration.mail.changed_email.subject', 
                             'Please verify your new email address'),
                 body % {'validation_url': url})
Exemplo n.º 14
0
    def __init__(self, url, method='post', auth=None, *args, **kargs):
        """Constructs a proxy object for a remote service.

    url - A URL of the service
    method - HTTP method: get or post. Default post
    auth - dict(realm=<realm>, user=<username>, passwd=<password>). Default None
        """
        super(Service, self).__init__(*args, **kargs)

        from urllib2 import HTTPError

        # Add the missing parts of address to the url
        url = url.lower()
        if url.startswith('/'):
            url = 'http://%s:%s%s' % (config.get('server.hostname'),
                config.get('server.socket_port'), url)
        elif not url.startswith('http://'):
            url = 'http://%s' % url

        try:
            res = _unjsonify(callservice(url, method=method, basicauth=auth))
        except HTTPError:
            raise ValueError('Service not found at %s' % url)
        self.url = url
        self.method = method
        self.auth = auth

        for method in res[u'methods']:
            setattr(self, method, self.__hide(method))
Exemplo n.º 15
0
def get_pkg_pushers(pkg, branch):
    watchers = []
    committers = []
    watchergroups = []
    committergroups = []

    if config.get('acl_system') == 'dummy':
        return ((['guest', 'admin'], ['guest', 'admin']),
                (['guest', 'admin'], ['guest', 'admin']))

    from pkgdb2client import PkgDB
    pkgdb = PkgDB(config.get('pkgdb_url'))
    acls = pkgdb.get_package(pkg, branches=branch)

    for package in acls['packages']:
        for acl in package.get('acls', []):
            if acl['status'] == 'Approved':
                if acl['acl'] == 'watchcommits':
                    name = acl['fas_name']
                    if name.startswith('group::'):
                        watchergroups.append(name.split('::')[1])
                    else:
                        watchers.append(name)
                elif acl['acl'] == 'commit':
                    name = acl['fas_name']
                    if name.startswith('group::'):
                        committergroups.append(name.split('::')[1])
                    else:
                        committers.append(name)

    return (committers, watchers), (committergroups, watchergroups)
Exemplo n.º 16
0
    def validate_password(self, user, user_name, password):
        '''
        Check the supplied user_name and password against existing credentials.
        Note: user_name is not used here, but is required by external
        password validation schemes that might override this method.
        If you use SaFasIdentityProvider, but want to check the passwords
        against an external source (i.e. PAM, LDAP, Windows domain, etc),
        subclass SaFasIdentityProvider, and override this method.

        :user: User information.  Not used.
        :user_name: Given username.
        :password: Given, plaintext password.
        :returns: True if the password matches the username.  Otherwise False.
            Can return False for problems within the Account System as well.
        '''
        # crypt.crypt(stuff, '') == ''
        # Just kill any possibility of blanks.
        if not user.password:
            return False
        if not password:
            return False

        # Check if yubi-authentication is being used
        if len(password) == 44 and password.startswith('ccccc') and config.get('yubi_server_prefix', False):
            if config.get('yubi_enabled', False):
                return otp_validate(user_name, password)
            flash(_("Yubikey single-factor authentication has been disabled."))
            return False

        # TG identity providers take user_name in case an external provider
        # needs it so we can't get rid of it. (W0613)
        # pylint: disable-msg=W0613
        return user.password == crypt.crypt(password.encode('utf-8'), user.password)
Exemplo n.º 17
0
def start_bonjour(package=None):
    """Register the TurboGears server with the Bonjour framework.

    Currently only Unix-like systems are supported where either the 'avahi'
    daemon (Linux etc.) is available or the 'dns-sd' program (Mac OS X).

    """
    global DNS_SD_PID
    if DNS_SD_PID:
        return
    if not getattr(cherrypy, 'root', None):
        return
    if not package:
        package = cherrypy.root.__module__
        package = package[:package.find(".")]

    host = config.get('server.socket_host', '')
    port = str(config.get('server.socket_port'))
    env = config.get('server.environment')
    name = package + ": " + env
    type = "_http._tcp"

    cmds = [['/usr/bin/avahi-publish-service', ["-H", host, name, type, port]],
            ['/usr/bin/dns-sd', ['-R', name, type, "."+host, port, "path=/"]]]

    for cmd, args in cmds:
        # TODO:. This check is flawed.  If one has both services installed and
        # avahi isn't the one running, then this won't work.  We should either
        # try registering with both or checking what service is running and use
        # that.  Program availability on the filesystem was never enough...
        if exists(cmd):
            DNS_SD_PID = os.spawnv(os.P_NOWAIT, cmd, [cmd]+args)
            atexit.register(stop_bonjour)
            break
Exemplo n.º 18
0
Arquivo: auth.py Projeto: Affix/fas
def undeprecated_cla_done(person):
    '''Checks if the user has completed the cla.

    As opposed to :func:`cla_done`, this method returns information about both
    whether the cla has been satisfied and whether the cla has been satisfied
    by a deprecated method.  This is useful if you have switched to a new CLA
    and want to have a transition period where either CLA is okay but you want
    to warn people that they need to sign the new version.

    :arg person: People object or username to check for FPCA status
    :rtype: tuple
    :returns: The first element of the tuple is True if the cla_done_group is
        approved otherwise False.  The second element of the tuple is True if
        a non-deprecated cla group is approved, otherwise False.
    '''
    cla_done_group = config.get('cla_done_group')
    cla_deprecated = frozenset(config.get('cla_deprecated_groups', []))

    if isinstance(person, basestring):
        name = person
    else:
        name = person.username

    cla_roles = set()
    for role in PersonRoles.query.filter_by(role_status='approved').join('group'
            ).filter(GroupsTable.c.group_type=='cla').join('member'
                    ).filter_by(username=name).all():
        cla_roles.add(role.group.name)

    # If the cla is considered signed only because of deprecated groups, 
    # return negative here.
    cla_roles.difference_update(cla_deprecated)
    if len(cla_roles) >= 2:
        return (cla_done_group in cla_roles, True)
    return (cla_done_group in cla_roles, False)
Exemplo n.º 19
0
def absolute_url(tgpath, tgparams=None, scheme=None, 
                 labdomain=False, webpath=True, **kw):
    """
    Like turbogears.url, but makes the URL absolute (with scheme, hostname, 
    and port from the tg.url_scheme and tg.url_domain configuration 
    directives).
    If labdomain is True we serve an alternate tg.proxy_domain if defined
    in server.cfg.  This is to support multi-home systems which have
    different external vs internal names.
    """
    order = []
    if labdomain:
        order.append(config.get('tg.lab_domain'))
    order.extend([config.get('tg.url_domain'),
                  config.get('servername'),
                  socket.getfqdn()])

    # TODO support relative paths
    if webpath:
        theurl = url(tgpath, tgparams, **kw)
    else:
        theurl = url_no_webpath(tgpath, tgparams, **kw)
    assert theurl.startswith('/')
    scheme = scheme or config.get('tg.url_scheme', 'http')
    host_port = filter(None, order)[0]
    return '%s://%s%s' % (scheme, host_port, theurl)
Exemplo n.º 20
0
def _create_runtime_env():
    directory = config.get('basepath.assets',
            # default location is at the base of our source tree
            os.path.join(os.path.dirname(__file__), '..', '..', 'assets'))
    debug = config.get('assets.debug')
    auto_build = config.get('assets.auto_build')
    return _create_env(directory=directory, debug=debug, auto_build=auto_build)
Exemplo n.º 21
0
def get_carbon():
    global _carbon
    if _carbon is not None:
        return _carbon
    _carbon = CarbonSender(config.get('carbon.address'),
            config.get('carbon.prefix', 'beaker.'))
    return _carbon
Exemplo n.º 22
0
def start_extension():
    global _manager

    # Bail out if the application hasn't enabled this extension
    if not config.get("visit.on", False):
        return

    # Bail out if this extension is already running
    if _manager:
        log.warning("Visit manager already running.")
        return

    # How long may the visit be idle before a new visit ID is assigned?
    # The default is 20 minutes.
    timeout = timedelta(minutes=config.get("visit.timeout", 20))
    log.info("Visit Tracking starting (timeout = %i sec).", timeout.seconds)
    # Create the thread that manages updating the visits
    _manager = _create_visit_manager(timeout)

    visit_filter = VisitFilter()
    # Install Filter into the root filter chain
    if not hasattr(cherrypy.root, "_cp_filters"):
        cherrypy.root._cp_filters = list()
    if not visit_filter in cherrypy.root._cp_filters:
        cherrypy.root._cp_filters.append(visit_filter)
Exemplo n.º 23
0
def _execute_func(func, template, format, content_type, mapping, fragment, args, kw):
    """Call controller method and process it's output."""
    if config.get("tg.strict_parameters", False):
        tg_util.remove_keys(kw, ["tg_random", "tg_format"]
            + config.get("tg.ignore_parameters", []))
    else:
        # get special parameters used by upstream decorators like paginate
        try:
            tg_kw = dict([(k, v) for k, v in kw.items() if k in func._tg_args])
        except AttributeError:
            tg_kw = {}
        # remove excessive parameters
        args, kw = tg_util.adapt_call(func, args, kw)
        # add special parameters again
        kw.update(tg_kw)
    if config.get('server.environment', 'development') == 'development':
        # Only output this in development mode: If it's a field storage object,
        # this means big memory usage, and we don't want that in production
        log.debug("Calling %s with *(%s), **(%s)", func, args, kw)
    output = errorhandling.try_call(func, *args, **kw)
    assert isinstance(output, (basestring, dict, list, types.GeneratorType)), \
           "Method %s.%s() returned unexpected output. Output should " \
           "be of type basestring, dict, list or generator." % (
            args[0].__class__.__name__, func.__name__)
    if isinstance(output, dict):
        template = output.pop("tg_template", template)
        format = output.pop("tg_format", format)
    if template and template.startswith("."):
        template = func.__module__[:func.__module__.rfind('.')]+template
    return _process_output(output, template, format, content_type, mapping, fragment)
Exemplo n.º 24
0
 def __init__(self, *args, **kw):
   super(ErrorCatcher, self).__init__(*args, **kw)
   self.sender_email = config.get('error_catcher.sender_email')
   self.admin_email = config.get('error_catcher.admin_email')
   self.smtp_host = config.get('error_catcher.smtp_host', 'localhost')
   self.smtp_user = config.get('error_catcher.smtp_user')
   self.smtp_passwd = config.get('error_catcher.smtp_passwd')
Exemplo n.º 25
0
def start_bonjour(package=None):
    global DNS_SD_PID
    if DNS_SD_PID:
        return
    if (not hasattr(cherrypy, "root")) or (not cherrypy.root):
        return
    if not package:
        package = cherrypy.root.__module__
        package = package[:package.find(".")]

    host = config.get('server.socket_host', '')
    port = str(config.get('server.socket_port'))
    env = config.get('server.environment')
    name = package + ": " + env
    type = "_http._tcp"

    cmds = [['/usr/bin/avahi-publish-service', ["-H", host, name, type, port]],
            ['/usr/bin/dns-sd', ['-R', name, type, "."+host, port, "path=/"]]]

    for cmd, args in cmds:
        # TODO:. This check is flawed.  If one has both services installed and
        # avahi isn't the one running, then this won't work.  We should either
        # try registering with both or checking what service is running and use
        # that.  Program availability on the filesystem was never enough...
        if os.path.exists(cmd):
            DNS_SD_PID = os.spawnv(os.P_NOWAIT, cmd, [cmd]+args)
            atexit.register(stop_bonjour)
            break
Exemplo n.º 26
0
    def insert_pkgtags(self):
        """ Download and inject the pkgtags sqlite from the pkgdb """
        if config.get('pkgtags_url') not in [None, ""]:
            try:
                for arch in os.listdir(self.repo):
                    if arch == 'SRPMS':
                        continue
                    filename = ''
                    reponame = os.path.basename(self.repo)
                    if reponame.startswith('f'):
                        release = reponame[1:].split('-')[0]
                        filename = 'F-%s-%s-' % (release, arch)
                        if 'testing' in reponame:
                            filename += 'tu'
                        else:
                            filename += 'u'
                    elif reponame.startswith('el'):
                        release = reponame[2:].split('-')[0]
                        filename = 'E-%s-%s' % (release, arch)
                        if 'testing' in reponame:
                            filename += '-t'
                    else:
                        log.error('Unknown repo %s' % reponame)
                        return

                    tags_url = config.get('pkgtags_url') + filename
                    log.info('Downloading %s' % tags_url)
                    f = urllib.urlretrieve(tags_url, filename='/tmp/pkgtags.sqlite')

                    repomd = RepoMetadata(join(self.repo, arch, 'repodata'))
                    repomd.add('/tmp/pkgtags.sqlite')

            except Exception, e:
                log.exception(e)
                log.error("There was a problem injecting pkgtags")
Exemplo n.º 27
0
 def sync_user_to_ipa(self, user, user_name, password):
     if user.ipa_sync_status is None:
         os.system('kinit -k -t %s %s' % (config.get('ipa_sync_keytab'),
                                          config.get('ipa_sync_principal')))
         r = requests.post('https://%s/ipa/json'
                           % config.get('ipa_sync_server'),
             json={'method': 'user_add',
                   'params':[
                       [user_name],
                       {'givenname': 'FAS',
                        'sn': 'Synced',
                        'cn': user_name,
                        'userpassword': password
                       }], 
                   'id': 0},
             verify=config.get('ipa_sync_certfile'),
             auth=HTTPKerberosAuth(),
             headers={'referer':
                      'https://%s/ipa'
                      % config.get('ipa_sync_server')}).json()
         if r['error'] is None:
             log.info('User %s synced to IPA' % user_name)
             user.ipa_sync_status = 'success'
         else:
             user.ipa_sync_status = 'error:%s' % r['error']['message']
             log.error('Error syncing %s: %s' % (user_name, 
                                                 r['error']['message']))
Exemplo n.º 28
0
    def _lock(self):
        """ Write out what updates we are pushing and any successfully mashed
        repositories to our MASHING lock """
        mashed_dir = config.get("mashed_dir")
        mash_stage = config.get("mashed_stage_dir")
        mash_lock = join(mashed_dir, "MASHING-%s" % self.mash_lock_id)
        if not os.path.isdir(mashed_dir):
            log.info("Creating mashed_dir %s" % mashed_dir)
            os.makedirs(mashed_dir)
        if not os.path.isdir(mash_stage):
            log.info("Creating mashed_stage_dir %s" % mash_stage)
            os.makedirs(mash_stage)
        if os.path.exists(mash_lock):
            if self.resume:
                log.debug("Resuming previous push!")
                lock = file(mash_lock, "r")
                masher_state = pickle.load(lock)
                lock.close()

                # For backwards compatability, we need to make sure we handle
                # masher state that is just a list of updates, as well as a
                # dictionary of updates and successfully mashed repos
                if isinstance(masher_state, list):
                    for up in masher_state:
                        try:
                            up = PackageUpdate.byTitle(up)
                            self.updates.add(up)
                        except SQLObjectNotFound:
                            log.warning("Cannot find %s" % up)

                # { 'updates' : [PackageUpdate.title,],
                #   'repos'   : ['/path_to_completed_repo',] }
                elif isinstance(masher_state, dict):
                    for up in masher_state["updates"]:
                        try:
                            up = PackageUpdate.byTitle(up)
                            self.updates.add(up)
                        except SQLObjectNotFound:
                            log.warning("Cannot find %s" % up)
                    for repo in masher_state["composed_repos"]:
                        self.composed_repos.append(repo)
                else:
                    log.error("Unknown masher lock format: %s" % masher_state)
                    raise MashTaskException
            else:
                log.error("Previous mash not complete!  Either resume the last " "push, or remove %s" % mash_lock)
                raise MashTaskException
        else:
            if self.resume:
                msg = "Trying to resume a push, yet %s doesn't exist!" % mash_lock
                log.error(msg)
                raise MashTaskException(msg)

            log.debug("Creating lock for updates push: %s" % mash_lock)
            lock = file(mash_lock, "w")
            pickle.dump(
                {"updates": [update.title for update in self.updates], "composed_repos": self.composed_repos}, lock
            )
            lock.close()
Exemplo n.º 29
0
 def __init__(self, *args, **kw):
     if not self._shared_state:
         state = dict(topic_exchange=tg_config.get('beaker.qpid_topic_exchange'),
             _broker=tg_config.get('beaker.qpid_broker'),
             stopped = True)
         self._shared_state.update(state)
     self.__dict__.update(self._shared_state)
     super(ServerBeakerBus, self).__init__(*args, **kw)
Exemplo n.º 30
0
 def __init__(self, timeout):
     self.debug = config.get('jsonfas.debug', False)
     if not self.fas:
         self.fas = FasProxyClient(self.fas_url, debug=self.debug,
                 session_name=config.get('visit.cookie.name', 'tg-visit'),
                 useragent='JsonFasVisitManager/%s' % __version__)
     BaseVisitManager.__init__(self, timeout)
     log.debug('JsonFasVisitManager.__init__: exit')
Exemplo n.º 31
0
def get_pkg_pushers(pkgName,
                    collectionName='Fedora',
                    collectionVersion='devel'):
    """ Pull users who can commit and are watching a package

    Return two two-tuples of lists:
    * The first tuple is for usernames.  The second tuple is for groups.
    * The first list of the tuple is for committers.  The second is for
      watchers.

    An example::
      >>> people, groups = get_pkg_pushers('foo', 'Fedora', 'devel')
      >>> print people
      (['toshio', 'lmacken'], ['wtogami', 'toshio', 'lmacken'])
      >>> print groups
      (['cvsextras'], [])

    Note: The interface to the pkgdb could undergo the following changes:
      FAS2 related:
      * pkg['packageListings'][0]['owneruser'] =>
        pkg['packageListings'][0]['owner']
      * pkg['packageListings'][0]['people'][0..n]['user'] =>
        pkg['packageListings'][0]['people'][0..n]['userid']

    * We may want to create a 'push' acl specifically for bodhi instead of
      reusing 'commit'.
    * ['status']['translations'] may one day contain more than the 'C'
      translation.  The pkgdb will have to figure out how to deal with that
      if so.

    This may raise: fedora.client.AppError if there's an error talking to the
    PackageDB (for instance, no such package)
    """
    if config.get('acl_system') == 'dummy':
        return (['guest'], ['guest']), (['guest'], ['guest'])

    pkgdb = PackageDB(config.get('pkgdb_url'))
    # Note if AppError is raised (for no pkgNamme or other server errors) we
    # do not catch the exception here.
    pkg = pkgdb.get_owners(pkgName, collectionName, collectionVersion)

    # Owner is allowed to commit and gets notified of pushes
    # This will always be the 0th element as we'll retrieve at most one
    # value for any given Package-Collection-Version
    pNotify = [pkg.packageListings[0]['owner']]
    pAllowed = [pNotify[0]]

    # Find other people in the acl
    for person in pkg['packageListings'][0]['people']:
        if person['aclOrder']['watchcommits'] and \
           pkg['statusMap'][str(person['aclOrder']['watchcommits']['statuscode'])] == 'Approved':
            pNotify.append(person['username'])
        if person['aclOrder']['commit'] and \
           pkg['statusMap'][str(person['aclOrder']['commit']['statuscode'])] == 'Approved':
            pAllowed.append(person['username'])

    # Find groups that can push
    gNotify = []
    gAllowed = []
    for group in pkg['packageListings'][0]['groups']:
        if group['aclOrder']['watchcommits'] and \
           pkg['statusMap'][str(group['aclOrder']['watchcommits']['statuscode'])] == 'Approved':
            gNotify.append(group['groupname'])
        if group['aclOrder']['commit'] and \
           pkg['statusMap'][str(group['aclOrder']['commit']['statuscode'])] == 'Approved':
            gAllowed.append(group['groupname'])

    return ((pAllowed, pNotify), (gAllowed, gNotify))
Exemplo n.º 32
0
def _image_name():
    # Beaker doesn't actually care about the image name at all, and OpenStack
    # doesn't require it to be unique, but we try to generate a descriptive
    # name just to make it easier for the admin to find in Horizon.
    return 'ipxe-beaker-%s-%s' % (config.get('tg.url_domain', socket.getfqdn()),
                                  datetime.date.today().strftime('%Y%m%d'))
Exemplo n.º 33
0
def global_install_options():
    return InstallOptions.from_strings(
        config.get('beaker.ks_meta', ''),
        config.get('beaker.kernel_options', 'ksdevice=bootif'),
        config.get('beaker.kernel_options_post', ''))
Exemplo n.º 34
0
from turbogears.identity import set_login_attempted
import cherrypy
from kitchen.pycompat24 import sets
from kitchen.text.converters import to_bytes

sets.add_builtin_set()

from fedora.client import AccountSystem, AuthError, BaseClient, \
        FedoraServiceError
from fedora import b_, __version__

import logging

log = logging.getLogger('turbogears.identity.jsonfasprovider')

if config.get('identity.ssl', False):
    fas_user = config.get('fas.username', None)
    fas_password = config.get('fas.password', None)
    if not (fas_user and fas_password):
        raise identity.IdentityConfigurationException(
            b_('Cannot enable ssl certificate auth via identity.ssl'
               ' without setting fas.usernamme and fas.password for'
               ' authorization'))
    __url = config.get('fas.url', None)
    if __url:
        fas = AccountSystem(__url,
                            username=config.get('fas.username'),
                            password=config.get('fas.password'),
                            retries=3)

Exemplo n.º 35
0
import captcha
from turbogears import config
import random
import os.path
from pkg_resources import resource_filename

plugin_name = 'fas.tgcaptcha2.plugin.vanasco_dowty.'

width = int(config.get(plugin_name + 'width', 300))
height = int(config.get(plugin_name + 'height', 100))
bg_color = config.get(plugin_name + 'bg_color', '#DDDDDD')
fg_color = config.get(plugin_name + 'fg_color',
            ["#330000","#660000","#003300","#006600","#000033","#000066"])
font_size_min = int(config.get(plugin_name + 'font_size_min', 30))
font_size_max = int(config.get(plugin_name + 'font_size_max', 45))
font_paths = config.get(plugin_name + 'font_paths',
                [os.path.abspath(resource_filename('fas.tgcaptcha2',
                                'static/fonts/tuffy/Tuffy.ttf'))])

captcha.font__paths = font_paths
captcha.captcha__text__render_mode = config.get(plugin_name +
        'text_render_mode', 'by_letter')
captcha.captcha__font_range = (font_size_min, font_size_max)


def generate_jpeg(text, file_):
    font_size = random.randint(font_size_min, font_size_max)
    fg = random.choice(fg_color)
    ci = captcha._Captcha__Img(text, width, height, font_size, fg, bg_color)
    image = ci.render()
    image.save(file_, format='JPEG')
Exemplo n.º 36
0
def strip_webpath(url):
    webpath = (config.get('server.webpath') or '').rstrip('/')
    if webpath and url.startswith(webpath):
        return url[len(webpath):]
    return url
Exemplo n.º 37
0
class Root(controllers.RootController):
    user = srusers.User()
    fw = fw.FwServe()
    autosave = srautosave.Autosave()
    #feed = Feed()
    switchboard = switchboard.Switchboard()
    admin = admin.Admin()
    version = get_version()

    if config.get(
            "simulator.enabled"
    ):  # if simulation is enabled import the simulator controller
        import sim
        sim = sim.Sim()

    @expose()
    def index(self):
        """
        In case the apache rewrite rule isn't paying attention, serve up the
        index file from here.
        """
        loc = os.path.join(os.path.dirname(__file__), "static/index.html")
        return serveFile(loc)

    @expose("json")
    def info(self):
        info = dict(Version=self.version, User=str(srusers.get_curuser()))
        return dict(info=info)

    def get_project_path(self, path):
        """
        Get a project name and filepath from a path
        inputs:
            path - str to a file
        returns:
            tuple containing the project and path
        """
        root, project, file_path = path.split(os.path.sep, 2)
        return project, file_path

    def get_rev_id(self, team, project, revno=-1):
        """
        Get revision ID string from revision number.
        inputs:
            revno - revision number convertable with int().
                    if revno is -1 or not supplied, get latest revision id.
        returns:
            revision id string
        """

        b = open_branch(int(team), project)

        try:
            if revno == -1 or revno == "-1" or revno == "HEAD":  #TODO BZRPORT: stop anything calling "HEAD" string
                rev_id = b.last_revision()
            else:
                rev_id = b.get_rev_id(int(revno))

        except (TypeError):  # TODO BZRPORT: add bzr exception
            print "Getting ID for revno: %s failed, returning latest revision id." % revno
            rev_id = b.last_revision()

        return rev_id

    def get_file_revision(self, tree, fileid):
        """
        Get the id of the revision when the file was last modified.
        inputs: tree - a bzrlib tree of some kind
                fileid - file id of file in tree
        outputs: revid - revision id
        """
        return bzrlib.tree.Tree._file_revision(
            tree, fileid)  # static method for some reason

    @expose()
    @srusers.require(srusers.in_team())
    def checkout(self, team, project, simulator=False):
        """
        This function grabs a set of files and makes a zip available. Should be
        linked to directly.
        inputs:
            team & project - code to retrieve
            simulator - true if code is being delivered to a simulator.
        returns:
            A zip file as a downloadable file with appropriate HTTP headers
            sent.
        """
        b = open_branch(int(team), project)
        rev_tree = b.basis_tree()  # get latest revision tree for branch

        #Avoid using /tmp by writing into a memory based file
        zipData = StringIO.StringIO()
        zip = zipfile.ZipFile(zipData, "w", zipfile.ZIP_DEFLATED)
        #Need to lock_read before reading any file contents
        rev_tree.lock_read()
        try:
            #Get a list of files in the tree
            files = [
                f for f in rev_tree.iter_entries_by_dir()
                if f[1].kind == "file"
            ]
            for filename, file in files:
                #Set external_attr on a ZipInfo to make sure the files are
                #created with the right permissions
                info = zipfile.ZipInfo(filename.encode("ascii"))
                info.external_attr = 0666 << 16L
                #Read the file contents and add to zip
                zip.writestr(info, rev_tree.get_file(file.file_id).read())

            #Need a __init__ in the root of all code exports
            if not "__init__.py" in [f[0].encode("ascii") for f in files]:
                info = zipfile.ZipInfo("__init__.py")
                info.external_attr = 0666 << 16L
                zip.writestr(info, "")

        except:
            return "Error exporting project"
        finally:
            #Always unlock or get GC related errors
            rev_tree.unlock()
        zip.close()
        #Seek back to start of file so read() works later on
        zipData.seek(0)

        if not simulator:
            """
            The zipfile delivered to the robot is the contents of the
            repository as a zip inside another zip that contains firmware.
            """
            #Get a copy of the firmware zip, drop the code zip (in zipData)
            #in it and then put the resulting zip back into zipData
            sysZipData = open(config.get("robot.packagezip")).read()
            sysZipBuffer = StringIO.StringIO(sysZipData)

            sysZip = zipfile.ZipFile(sysZipBuffer, "a")
            info = zipfile.ZipInfo(ZIPNAME)
            info.external_attr = 0666 << 16L
            sysZip.writestr(info, zipData.read())
            sysZip.close()

            sysZipBuffer.seek(0)
            zipData = StringIO.StringIO(sysZipBuffer.read())

        #Set up headers for correctly serving a zipfile
        cherrypy.response.headers['Content-Type'] = \
                "application/x-download"
        cherrypy.response.headers['Content-Disposition'] = \
                'attachment; filename="' + ZIPNAME + '"'

        #Return the data
        return zipData.read()

    @expose("json")
    @srusers.require(srusers.in_team())
    def filesrc(self, team, file=None, revision=None):
        """
        Returns the contents of the file.
        """

        file_path = file  #save for later
        project, file = self.get_project_path(file_path)
        curtime = time.time()
        b = open_branch(int(team), project)

        #TODO: Need to security check here! No ../../ or /etc/passwd nautiness trac#208

        autosaved_code = self.autosave.getfilesrc(team, file_path, 1)

        if revision == None or revision == "HEAD":
            revno, revid = b.last_revision_info()
        else:
            revno = int(revision)
            revid = b.get_rev_id(revno)

        if file != None and file != "":  #TODO BZRPORT: URL checking
            #Load file from bzr
            # TODO BZRPORT: mime checking. Bzr doesn't have a mime property so the file will need to be checked with python
            try:
                branch_tree = b.repository.revision_tree(revid)
                file_id = branch_tree.path2id(file)
                b.lock_read()
                code = branch_tree.get_file_text(file_id)
                file_revid = self.get_file_revision(
                    branch_tree,
                    file_id)  # get revision the file was last modified
                file_revno = b.revision_id_to_revno(file_revid)
            except:
                code = "Error loading file '%s' at revision %s." % (file,
                                                                    revision)
                file_revno = 0
            # always unlock:
            finally:
                b.unlock()

        else:
            code = "Error loading file: No filename was supplied by the IDE.  Contact an SR admin!"
            revision = 0

        return dict(curtime=curtime,
                    code=code,
                    autosaved_code=autosaved_code,
                    file_rev=str(file_revno),
                    revision=revno,
                    path=file_path,
                    name=os.path.basename(file))

    @expose("json")
    @srusers.require(srusers.in_team())
    def gethistory(self, team, file, user=None, offset=0):
        """
        This function retrieves the bzr history for the given file(s)
        to restrict logs to particular user, supply a user parameter
        a maximum of 10 results are sent to the browser, if there are more than 10
        results available, overflow > 0.
        supply an offset to view older results: 0<offset < overflow; offset = 0 is the most recent logs
        """
        if file[:9] == 'New File ':
            return dict(path=file, history=[])

        file_path = file  #save for later
        project, file = self.get_project_path(file_path)
        b = open_branch(int(team), project)
        revisions = [
            b.repository.get_revision(r) for r in b.revision_history()
        ]

        #Get a list of authors
        authors = list(set([r.committer for r in revisions]))

        #If a user is passed, only show revisions committed by that user
        if user != None:
            revisions = [r for r in revisions if r.committer == user]

        #Only show revisions where the delta touches file
        fileid = b.basis_tree().path2id(file)
        if fileid == None:
            #File not found
            return dict()

        def revisionTouchesFile(revision):
            """
            Return true if the revision changed a the file referred to in fileid.
            """
            delta = b.get_revision_delta(
                b.revision_id_to_revno(revision.revision_id))
            return delta.touches_file_id(fileid)

        revisions = filter(revisionTouchesFile, revisions)

        #Calculate offsets for paging
        try:
            offset = int(offset)
        except ValueError:
            #Someone passed a string
            return dict()
        start = offset * 10
        end = start + 10
        maxval = len(revisions)
        if maxval % 10 > 0:
            overflow = maxval / 10 + 1
        else:
            overflow = maxval / 10

        revisions = revisions[start:end]
        revisions.reverse()

        return dict(path=file_path,
                    overflow=overflow,
                    offset=offset,
                    authors=authors,
                    history=[{
                        "author":
                        r.committer,
                        "date":
                        time.strftime("%H:%M:%S %d/%m/%Y",
                                      time.localtime(r.timestamp)),
                        "message":
                        r.message,
                        "rev":
                        b.revision_id_to_revno(r.revision_id)
                    } for r in revisions])

    @expose("json")
    @srusers.require(srusers.in_team())
    def polldata(self, team, files="", logrev=None):
        """Returns poll data:
            inputs: files - comma seperated list of files the client needs info
            on
            returns (json): A dictionary with an entry for each file (path is
            the key). Each value is a dictionary with information. The only key
            is revision, with a value of an integer of the current revision
            number in the repo"""
        pass  #TODO BZRPORT: Implement!

        #Default data
        r = {}
        l = {}
        client = Client(int(team))

        if files != "":
            files = files.split(",")
            rev = 0
            for file in files:
                r[file] = {}
                try:
                    info = client.info2(client.REPO + file)[0][1]
                    r[file]["rev"] = info["last_changed_rev"].number
                except pysvn.ClientError:
                    pass

        if logrev != None:
            try:
                newlogs = client.log(client.REPO,
                                     discover_changed_paths=True,
                                     revision_end=pysvn.Revision(
                                         pysvn.opt_revision_kind.number,
                                         int(logrev) + 1))

                l =[{"author":x["author"], \
                        "date":time.strftime("%H:%M:%S %d/%m/%Y", \
                        time.localtime(x["date"])), \
                        "message":x["message"], "rev":x["revision"].number,
                        "changed_paths":[(c.action, c.path) for c in \
                            x.changed_paths]} for x in newlogs]
            except pysvn.ClientError:
                #No commits recently, no data to return
                pass

        return dict(files=r, log=l)

    @expose("json")
    @srusers.require(srusers.in_team())
    def pollchanges(self, team, project, rev, date=0):
        """
        Used to determine if certain facets need updating.
        Currently this is only for the filelist, to remove the need to send the entire filelist just to see if its changed
        """
        b = open_branch(int(team), project)
        head_rev_id = self.get_rev_id(team, project, 'HEAD')
        target_rev_id = self.get_rev_id(team, project, rev)

        filelist = True
        if head_rev_id == target_rev_id:
            filelist = False

        return dict(filelist=filelist)

    @expose("json")
    @srusers.require(srusers.in_team())
    def delete(self, team, project, files, kind='SVN'):
        """
        Delete files from the repository, and prune empty directories.
        inputs: files - comma seperated list of paths
                kind - one of 'SVN' or 'AUTOSAVES'
        returns (json): Message - a message to show the user
        """

        if files != "":
            files = files.split(",")
            wt = WorkingTree(int(team), project)

            message = "Files deleted successfully: " + project + " >\n" + "\n".join(
                files)

            for f in files:
                self.autosave.delete(team, '/' + project + '/' + f)

            if kind == 'AUTOSAVES':
                return dict(Message="AutoSaves deleted successfully: \n" +
                            "\n".join(files))

            wt.remove(files)

            # find out current user
            ide_user = str(srusers.get_curuser())

            revproperties = {"authors": ide_user}
            wt.commit('Remove files: ' + ', '.join(files),
                      revprops=revproperties)
            wt.destroy()

            return dict(Message=message)

    @expose("json")
    @srusers.require(srusers.in_team())
    def savefile(self, team, filepath, rev, message, code):
        """
        Create/update contents of a file and attempt to commit.
        If file has been updated since submitted text was checked out,
            call update_merge to attempt to merge the changes.
        If file has not been updated since client checked it out,
            call commit_file_simple to commit the new version.

        inputs: path - path of file relative to project root.
                rev - revision of file when it was checked out by client.
        """

        project, filepath = self.get_project_path(filepath)

        projWrite = ProjectWrite(team, project, revno=rev)

        projWrite.update_file_contents(filepath, code)

        reloadfiles = "True"  # TODO: determine whether or not file list needs refreshing

        try:
            newrevno, newrevid = projWrite.commit(message)
            success = "True"
        except bzrlib.errors.OutOfDateTree:
            # a commit has occurred since code was opened.
            # A merge will need to take place
            code, newrevno, newrevid = projWrite.merge(filepath)
            if len(projWrite.conflicts) == 0:
                # TODO: when committing a merged transform preview affecting more than one file,
                #       the text changes do not commit despite the merge succeeding and returning correct text.
                #       solution for now is to open a new transform preview and pump the new code into it.
                pw2 = ProjectWrite(team, project)
                pw2.update_file_contents(filepath, code)
                newrevno, newrevid = pw2.commit(message)
                success = "AutoMerge"
                pw2.destroy()
            else:
                return dict(new_revision=newrevno,
                            code=code,
                            success="Merge",
                            file=filepath,
                            reloadfiles=reloadfiles)
        finally:
            projWrite.destroy()

        return dict(new_revision=str(newrevno),
                    code=code,
                    success=success,
                    file=filepath,
                    reloadfiles=reloadfiles)

    @expose("json")
    @srusers.require(srusers.in_team())
    def filelist(self, team, project, rootpath="/", rev=-1, date=0):
        """
        Returns a directory tree of the current repository.
        inputs: project - the bzr branch
                rootpath - to return file from a particular directory within the branch (recursive)
        returns: A tree as a list of files/directory objects:
            { tree : [{path : filepath
                       kind : FOLDER or FILE
                       children : [list as above]
                       name : name of file}, ...]}
        """

        b = open_branch(int(team), project)

        target_rev_id = self.get_rev_id(team, project, rev)
        self.user.set_setting('project.last', project)

        try:
            rev_tree = b.repository.revision_tree(target_rev_id)
        except:
            return {"error": "Error getting revision tree"}

        # Get id of root folder from which to list files. if it is not found it will return None
        rootid = rev_tree.path2id(rootpath)

        try:
            rev_tree.lock_read()
            # Get generator object containing file information from base rootid. If rootid=None, will return from root.
            files = rev_tree.inventory.iter_entries(rootid)
        except:  # TODO BZRPORT: Proper error handling
            return {"error": "Error getting file list"}
        # Always unlock tree:
        finally:
            rev_tree.unlock()

        #grab the autosave listings
        autosave_data = self.autosave.getfilesrc(team,
                                                 '/' + project + rootpath)

        def branch_recurse(project, path, entry, files, given_parent_id):
            """
            Travels recursively through a generator object provided by revision_tree.inventory.iter_items.
            Iter_items returns child items immediately after their parents, so by checking the parent_id field of the item with the actual id of the directory item that called it, we can check if we are still within that directory and therefore need to add the item as a child.
            This function will return a list of all children of a particular branch, along with the next items for analysis.
            Whenever it encounters a directory it will call itself to find the children.
            inputs: path - path of item to be analysed first
                    entry - InventoryEntry-derived object of item to be analysed first
                    files - generator object created by iter_items
                    given_parent_id - id (string) of calling directory
            returns: entry_list - list of children. if given_parent_id does not match entry.parent_id, this will be an empty list.
                     path - path of item that has not yet been added to the tree
                     entry - the entry object that has not yet been added to the tree.
                             if given_parent_id did not match entry.parent_id, then path and entry returned will be the same as path and entry called.
            """

            entry_list = []

            while entry.parent_id == given_parent_id:  # is a child of parent

                if entry.kind == "directory":
                    try:
                        next_path, next_entry = files.next()
                        children_list, next_path, next_entry = branch_recurse(
                            project, next_path, next_entry, files,
                            entry.file_id)
                    except StopIteration:  # No more files to iterate through after this one
                        next_entry = None  # break after adding this entry
                        children_list = [
                        ]  # no more items, so there can't be any children

                    entry_list.append({
                        "name": entry.name,
                        "path": project + path,
                        "kind": "FOLDER",
                        "autosave": 0,  # No autosave data for directories
                        "rev":
                        "-1",  #TODO BZRPORT: what's this show/for? yes, i know revision, i mean, current, or when it was created?
                        "children": children_list
                    })

                    if next_entry is None:
                        break  # there are no more iterations so break
                    else:
                        path = next_path
                        entry = next_entry  # now we'll use the returned entry

                else:
                    if project + path in autosave_data:
                        autosave_info = autosave_data[project + path]
                    else:
                        autosave_info = 0
                    entry_list.append({
                        "name": entry.name,
                        "path": project + path,
                        "kind": "FILE",
                        "autosave": autosave_info,
                        "rev":
                        "-1",  #TODO BZRPORT: what's this show/for? yes, i know revision, i mean, current, or when it was created?
                        "children": []
                    })

                    try:
                        path, entry = files.next()  # grab next entry
                    except StopIteration:  # No more files to iterate through
                        break

            return entry_list, path, entry

        # Determine tree_root string to pass to recursing function as a parent id
        if rootid == None:
            tree_root = "TREE_ROOT"
        else:
            tree_root = rootid

        try:
            first_path, first_entry = files.next()  # grab next entry
        except StopIteration:  # StopIteration caught on first pass: project tree must be empty
            return dict(tree=[])

        tree, last_path, last_entry = branch_recurse('/' + project + '/',
                                                     first_path, first_entry,
                                                     files, tree_root)

        return dict(tree=tree)

    #create a new directory
    @expose("json")
    @srusers.require(srusers.in_team())
    def newdir(self, team, path, msg):
        project, dirpath = self.get_project_path(path)
        projWrite = ProjectWrite(team, project)

        try:
            projWrite.new_directory(dirpath)
        except:  # TODO BZRPORT: replace with bzr error
            return dict( success=0, newdir = path,\
                        feedback="Error creating directory: " + path)

#TODO: try:
        revno, revid = projWrite.commit(msg)

        return dict( success=1, newdir = path,\
                feedback="Directory successfully created")

#        else: # directory wasn't created because it already existed
#            return dict( success=0, newdir = path,\
#                    feedback="Directory " + path + " already exists")

    @expose("json")
    @srusers.require(srusers.in_team())
    def projlist(self, team):
        """Returns a list of projects"""

        try:
            r = open_repo(int(team))
        except:
            #No repository present
            return dict(projects=[])

        self.user.set_setting('team.last', team)

        projects = []

        branches = r.find_branches()

        for branch in branches:
            projects.append(branch.nick)

        return dict(projects=projects)

    @expose("json")
    @srusers.require(srusers.in_team())
    def createproj(self, name, team):
        """Creates new project directory"""

        r = open_repo(int(team))

        if name.find(".") != -1:
            """No ../../ nastyness"""
            return nil

        url = srusers.get_svnrepo(team) + "/" + name

        r.bzrdir.create_branch_convenience(base=url, force_new_tree=False)

        return dict()

    @expose("json")
    @srusers.require(srusers.in_team())
    def revert(self, team, files, torev, message):

        file_list = files.split(',')
        if len(file_list) == 0:
            return dict(Message='Revert failed - no files specified', status=1)

        project, file = self.get_project_path(file_list[0])
        rev_spec = bzrlib.revisionspec.RevisionSpec.from_string(torev)
        file_list = [self.get_project_path(f)[1] for f in file_list]

        wt = WorkingTree(team, project)
        rev_tree = rev_spec.as_tree(wt.branch)

        wt.revert(file_list, rev_tree)

        # find out current user
        ide_user = str(srusers.get_curuser())

        revproperties = {"authors": ide_user}

        wt.commit(message, revprops=revproperties)
        newrev, id = wt.branch.last_revision_info()
        wt.destroy()

        return dict(new_revision=newrev,
                    code="",
                    success="Success !!!",
                    status=0)

        #from undelete
        return dict(fail=fail, success=','.join(success), status=status)

    @expose("json")
    @srusers.require(srusers.in_team())
    def calendar(self, mnth, yr, file, team):
        #returns data for calendar function

        if file == '/':  #no project selected
            return dict(path=file, history=[])

        month = int(mnth) + 1
        year = int(yr)
        b = open_branch(team, file)

        try:
            log = b.repository.get_revisions(b.revision_history())
        except:
            logging.debug("Log failed for %s" % file)
            print "failed to retrieve log"
            return dict(path=file, history=[])

        if len(log) == 0:  #if there's nothing there
            return dict(path=file, history=[])

        #get a list of users based on log authors
        start = datetime.datetime(year, month, 1, 0, 0, 0)

        if (month >= 12):
            end = datetime.datetime(year + 1, 1, 1, 0, 0,
                                    0)  #watchout for rollover
        else:
            end = datetime.datetime(year, month + 1, 1, 0, 0, 0)

        result = []

        for y in log:
            now = datetime.datetime(2000, 1, 1)
            #create a dummy datetime
            now = now.fromtimestamp(y.timestamp)
            if (start <= now < end):
                result.append(y)

        result.reverse()

        return dict(  path=file,\
                      history=[{"author":x.get_apparent_author(), \
                      "date":time.strftime("%Y/%m/%d/%H/%M/%S", \
                      time.localtime(x.timestamp)), \
                      "message":x.message, "rev":b.revision_id_to_revno(x.revision_id)} \
                      for x in result])

    @expose("json")
    @srusers.require(srusers.in_team())
    def move(self, team, src, dest, msg=""):
        #   the source and destination arguments may be directories or files
        #   directories rendered empty as a result of the move are automatically 'pruned'
        #   returns status = 0 on success

        src_proj, src_path = self.get_project_path(src)
        dest_proj, dest_path = self.get_project_path(dest)
        if src_proj != dest_proj:
            return dict(new_revision="0",
                        status="1",
                        message="Source and destination projects must match")

        wt = WorkingTree(int(team), src_proj)

        if not wt.has_filename(src_path):
            return dict(new_revision="0",
                        status="1",
                        message="Source file/folder doesn't exist: " + src)

        if not wt.has_filename(os.path.dirname(dest_path)):
            return dict(new_revision="0",
                        status="1",
                        message="Destination folder doesn't exist: " +
                        os.path.dirname(dest))

        if wt.has_filename(dest_path):
            return dict(new_revision="0",
                        status="1",
                        message="Destination already exists: " + dest)

        wt.rename_one(src_path, dest_path)

        # find out current user
        ide_user = str(srusers.get_curuser())

        revproperties = {"authors": ide_user}

        wt.commit('Move ' + src_path + ' to ' + dest_path,
                  revprops=revproperties)
        wt.destroy()

        self.autosave.move(team, src, dest)

        return dict(new_revision="0",
                    status="0",
                    message='Sucessfully moved file ' + src + ' to ' + dest)

    @expose("json")
    @srusers.require(srusers.in_team())
    def copyproj(self, team, src, dest):
        # Create a temporary directory
        tmpdir = tempfile.mkdtemp()
        #open the branch and sprout a new copy, in the temp dir
        b = open_branch(team, src)
        self.createproj(dest, team)
        nb = open_branch(team, dest)
        b.push(nb)
        return dict(status=0)

    @expose("json")
    @srusers.require(srusers.in_team())
    def copy(self, team, src="", dest="", msg="SVN Copy", rev="0"):
        return self.cp(team, src, dest, msg, rev)

    @srusers.require(srusers.in_team())
    def cp(self, team, src="", dest="", msg="Copy", rev="0"):

        project, src = self.get_project_path(src)
        dest_project, dest = self.get_project_path(dest)
        if dest_project != project:
            return dict(
                new_revision="0",
                status="1",
                message=
                "Copy Failed: Source and destination projects must match")

        if rev == "0":
            rev = None

        projWrite = ProjectWrite(team, project, revno=rev)

        if src == "":
            return dict(new_revision="0",
                        status="1",
                        msg="No Source file/folder specified")
        if dest == "":
            return dict(new_revision="0",
                        status="1",
                        msg="No Destination file/folder specified")

        try:
            projWrite.copy(src, dest)
            new_revno, new_rev_id = projWrite.commit(msg)

        except Exception, e:
            return dict(new_revision="0",
                        status="1",
                        message="Copy Failed: " + str(e))

        return dict(new_revision=str(new_revno),
                    status="0",
                    message="copy successful")
Exemplo n.º 38
0
    def checkout(self, team, project, simulator=False):
        """
        This function grabs a set of files and makes a zip available. Should be
        linked to directly.
        inputs:
            team & project - code to retrieve
            simulator - true if code is being delivered to a simulator.
        returns:
            A zip file as a downloadable file with appropriate HTTP headers
            sent.
        """
        b = open_branch(int(team), project)
        rev_tree = b.basis_tree()  # get latest revision tree for branch

        #Avoid using /tmp by writing into a memory based file
        zipData = StringIO.StringIO()
        zip = zipfile.ZipFile(zipData, "w", zipfile.ZIP_DEFLATED)
        #Need to lock_read before reading any file contents
        rev_tree.lock_read()
        try:
            #Get a list of files in the tree
            files = [
                f for f in rev_tree.iter_entries_by_dir()
                if f[1].kind == "file"
            ]
            for filename, file in files:
                #Set external_attr on a ZipInfo to make sure the files are
                #created with the right permissions
                info = zipfile.ZipInfo(filename.encode("ascii"))
                info.external_attr = 0666 << 16L
                #Read the file contents and add to zip
                zip.writestr(info, rev_tree.get_file(file.file_id).read())

            #Need a __init__ in the root of all code exports
            if not "__init__.py" in [f[0].encode("ascii") for f in files]:
                info = zipfile.ZipInfo("__init__.py")
                info.external_attr = 0666 << 16L
                zip.writestr(info, "")

        except:
            return "Error exporting project"
        finally:
            #Always unlock or get GC related errors
            rev_tree.unlock()
        zip.close()
        #Seek back to start of file so read() works later on
        zipData.seek(0)

        if not simulator:
            """
            The zipfile delivered to the robot is the contents of the
            repository as a zip inside another zip that contains firmware.
            """
            #Get a copy of the firmware zip, drop the code zip (in zipData)
            #in it and then put the resulting zip back into zipData
            sysZipData = open(config.get("robot.packagezip")).read()
            sysZipBuffer = StringIO.StringIO(sysZipData)

            sysZip = zipfile.ZipFile(sysZipBuffer, "a")
            info = zipfile.ZipInfo(ZIPNAME)
            info.external_attr = 0666 << 16L
            sysZip.writestr(info, zipData.read())
            sysZip.close()

            sysZipBuffer.seek(0)
            zipData = StringIO.StringIO(sysZipBuffer.read())

        #Set up headers for correctly serving a zipfile
        cherrypy.response.headers['Content-Type'] = \
                "application/x-download"
        cherrypy.response.headers['Content-Disposition'] = \
                'attachment; filename="' + ZIPNAME + '"'

        #Return the data
        return zipData.read()
Exemplo n.º 39
0
 def test_security_bug(self):
     bug = Bugzilla(bz_id=237533)
     assert bug
     if config.get('bodhi_password'):
         assert bug.title == 'CVE-2007-2165: proftpd auth bypass vulnerability'
Exemplo n.º 40
0
 def test_latest(self):
     update = self.get_update(name='yum-3.2.1-1.fc7')
     if config.get('buildsystem') == 'koji':
         latest = update.builds[0].get_latest()
         assert latest
         assert latest == '/mnt/koji/packages/yum/3.2.0/1.fc7/src/yum-3.2.0-1.fc7.src.rpm'
Exemplo n.º 41
0
import urllib2
import subprocess

from getpass import getpass, getuser
from optparse import OptionParser
from datetime import datetime, timedelta

from kitchen.text.converters import to_bytes
from fedora.client import AuthError, ServerError
from fedora.client.bodhi import BodhiClient

try:
    from turbogears import config
    from bodhi.util import load_config
    load_config()
    BODHI_URL = config.get('bodhi_url',
                           'https://admin.fedoraproject.org/updates/')
except:
    BODHI_URL = 'https://admin.fedoraproject.org/updates/'

update_types = ['bugfix', 'security', 'enhancement', 'newpackage']
update_requests = ['stable', 'testing', 'obsolete', 'unpush']

log = logging.getLogger(__name__)


def get_parser():
    usage = "usage: %prog [options] [build...|package]"
    parser = OptionParser(usage,
                          description=__description__,
                          version=__version__)
Exemplo n.º 42
0
 def setUp(self):
     port = config.get('server.socket_port')
     host = config.get('server.socket_host')
     self.connection = httplib.HTTPConnection(host, port)
Exemplo n.º 43
0
    def run(self):
        """Run the shell"""
        self.find_config()

        mod = get_model()
        if mod:
            locals = mod.__dict__
        else:
            locals = dict(__name__="tg-admin")

        if config.get("sqlalchemy.dburi"):
            using_sqlalchemy = True
            database.get_engine()
            locals.update(dict(session=database.session,
                metadata=database.metadata))
        else:
            using_sqlalchemy = False

        try:
            # try to use IPython if possible
            from IPython.terminal.interactiveshell import TerminalInteractiveShell

            class CustomIPShell(TerminalInteractiveShell):
                def raw_input(self, *args, **kw):
                    try:
                        return TerminalInteractiveShell.raw_input(self,
                            *args, **kw) # needs decoding (see below)?
                    except EOFError:
                        r = raw_input("Do you wish to commit your "
                                    "database changes? [yes]")
                        if not r.lower().startswith("n"):
                            if using_sqlalchemy:
                                self.push("session.flush()")
                            else:
                                self.push("hub.commit()")
                        raise EOFError

            shell = CustomIPShell(user_ns=locals)
            shell.mainloop()
        except ImportError:
            import code

            class CustomShell(code.InteractiveConsole):
                def raw_input(self, *args, **kw):
                    try:
                        import readline
                    except ImportError:
                        pass
                    try:
                        r = code.InteractiveConsole.raw_input(self,
                            *args, **kw)
                        for encoding in (getattr(sys.stdin, 'encoding', None),
                                sys.getdefaultencoding(), 'utf-8', 'latin-1'):
                            if encoding:
                                try:
                                    return r.decode(encoding)
                                except UnicodeError:
                                    pass
                        return r
                    except EOFError:
                        r = raw_input("Do you wish to commit your "
                                      "database changes? [yes]")
                        if not r.lower().startswith("n"):
                            if using_sqlalchemy:
                                self.push("session.flush()")
                            else:
                                self.push("hub.commit()")
                        raise EOFError

            shell = CustomShell(locals=locals)
            shell.interact()
Exemplo n.º 44
0
from flask import Flask
from bkr.common import __version__
from bkr.server import identity, assets
from bkr.server.app import app

log = logging.getLogger(__name__)

# Load config.
from bkr.log import log_to_stream
from bkr.server.util import load_config
load_config()
log_to_stream(sys.stderr, level=logging.DEBUG)

# Keep the code before the imports, otherwise we'll end up with function names
# not marked as executed (see: Coverage.py FAQ)
if config.get('coverage', False):
    import coverage
    import atexit

    log.debug('Starting coverage analysis')
    cov = coverage.coverage(data_suffix=True,
                            cover_pylib=False,
                            timid=True,
                            omit=['*.kid'])
    cov.start()

    def save_coverage():
        log.debug('Saving coverage')
        cov.stop()
        cov.save()
Exemplo n.º 45
0
def log_delete(print_logs=False, dry=False, limit=None):
    if dry:
        logger.info('Dry run only')
    logger.info('Getting expired jobs')

    failed = False
    if not dry:
        requests_session = requests.Session()
        log_delete_user = config.get('beaker.log_delete_user')
        log_delete_password = config.get('beaker.log_delete_password')

        available_auths = []
        available_auth_names = []

        if _kerberos_available:
            available_auths.append(
                requests_kerberos.HTTPKerberosAuth(
                    mutual_authentication=requests_kerberos.DISABLED))
            available_auth_names.append('Kerberos')

        if log_delete_user and log_delete_password:
            available_auths.append(
                requests.auth.HTTPDigestAuth(log_delete_user,
                                             log_delete_password))
            available_auth_names.append('HTTPDigestAuth')
        requests_session.auth = MultipleAuth(available_auths)
        logger.debug('Available authentication methods: %s' %
                     ', '.join(available_auth_names))

    for jobid, in Job.query.filter(Job.is_expired).limit(limit).values(Job.id):
        logger.info('Deleting logs for job %s', jobid)
        try:
            session.begin()
            job = Job.by_id(jobid)
            all_logs = job.all_logs(load_parent=False)
            # We always delete entire directories, not individual log files,
            # because that's faster, and because we never mix unrelated log
            # files together in the same directory so it's safe to do that.
            # We keep a trailing slash on the directories otherwise when we try
            # to DELETE them, Apache will first redirect us to the trailing
            # slash.
            log_dirs = (os.path.dirname(log.full_path) + '/'
                        for log in all_logs)
            for path in remove_descendants(log_dirs):
                if not dry:
                    if urlparse.urlparse(path).scheme:
                        # We need to handle redirects ourselves, since requests
                        # turns DELETE into GET on 302 which we do not want.
                        response = requests_session.delete(
                            path, allow_redirects=False)
                        redirect_limit = 10
                        while redirect_limit > 0 and response.status_code in (
                                301, 302, 303, 307):
                            response = requests_session.delete(
                                response.headers['Location'],
                                allow_redirects=False)
                            redirect_limit -= 1
                        if response.status_code not in (200, 204, 404):
                            response.raise_for_status()
                    else:
                        try:
                            shutil.rmtree(path)
                        except OSError, e:
                            if e.errno == errno.ENOENT:
                                pass
                if print_logs:
                    print path
            if not dry:
                job.delete()
                session.commit()
                session.close()
            else:
                session.close()
        except Exception, e:
            logger.exception('Exception while deleting logs for job %s', jobid)
            failed = True
            session.close()
            continue
Exemplo n.º 46
0
    def robolog(self, team, last_received_ping=0):
        """
        Return the log being appended direct from the robot.
        offset is the position to stream from.
        """
        # First check to see if live robot logging is enabled on the server
        # If it isn't enabled then return "disabled" to tell client not to poll
        if not config.get("robolog.enabled"):
            return {"ping": 0, "data": "", "present": 0, "disabled": True}

        try:
            team = int(team)
        except ValueError:
            log.error("Invalid team value")
            return {"ping": 0, "data": "", "present": 0}

        try:
            last_received_ping = int(last_received_ping)
        except ValueError:
            log.error("Invalid last_received_ping")
            return {"ping": 0, "data": "", "present": 0}

        log.debug("RoboIde team = %d", team)
        log.debug("RoboIde last_received_ping = %d", last_received_ping)

        if not (team in srusers.getteams()):
            log.error("Team not in the users teams")
            log.error(srusers.getteams())
            return {"ping": 0, "data": "", "present": 0}

        try:
            team = model.TeamNames.get(id=team)
        except:
            #Fake team!
            log.debug("Team not found")
            return {"ping": 0, "data": "", "present": 0}

        try:
            present = model.RoboPresent.selectBy(team=team)[0].present
        except:
            present = False

        most_recent_ping = 0
        most_recent_ping_date = None

        robologs = model.RoboLogs.selectBy(team=team)
        robologs = robologs.orderBy(sqlbuilder.DESC(model.RoboLogs.q.id))
        most_recent_ping = robologs[0].id

        log.debug("Robot presence is: %d" % present)

        last_received_ping = int(last_received_ping)
        logs = model.RoboLogs.select(
            sqlbuilder.AND(model.RoboLogs.q.team == team,
                           model.RoboLogs.q.id > last_received_ping))

        data = "\n".join([l.value for l in logs])

        data = data.replace('&', '&amp;')
        data = data.replace('"', '&quot;')
        data = data.replace("'", '&#39;')
        data = data.replace(">", '&gt;')
        data = data.replace("<", '&lt;')

        return {
            "data": data,
            "present": int(present),
            "ping": most_recent_ping
        }
Exemplo n.º 47
0
def build_url(newpath):
    base_url = config.get('samadhi.baseurl')
    return urljoin(base_url, tg_url(newpath))
Exemplo n.º 48
0
import captcha
from turbogears import config
import random
import os.path
from pkg_resources import resource_filename

plugin_name = 'fas.tgcaptcha2.plugin.vanasco_dowty.'

width = int(config.get(plugin_name + 'width', 300))
height = int(config.get(plugin_name + 'height', 100))
bg_color = config.get(plugin_name + 'bg_color', '#DDDDDD')
fg_color = config.get(
    plugin_name + 'fg_color',
    ["#330000", "#660000", "#003300", "#006600", "#000033", "#000066"])
font_size_min = int(config.get(plugin_name + 'font_size_min', 30))
font_size_max = int(config.get(plugin_name + 'font_size_max', 45))
font_paths = config.get(
    plugin_name + 'font_paths',
    [os.path.normpath('/usr/share/fonts/tulrich-tuffy/Tuffy.ttf')])

captcha.font__paths = font_paths
captcha.captcha__text__render_mode = config.get(
    plugin_name + 'text_render_mode', 'by_letter')
captcha.captcha__font_range = (font_size_min, font_size_max)


def generate_jpeg(text, file_):
    font_size = random.randint(font_size_min, font_size_max)
    fg = random.choice(fg_color)
    ci = captcha._Captcha__Img(text, width, height, font_size, fg, bg_color)
    image = ci.render()
Exemplo n.º 49
0
class Help(controllers.Controller):
    help = {
        'none': [_('Error'),
                 _('<p>We could not find that help item</p>')],
        'cla_accuracy': [
            _('Accuracy of FPCA Information'),
            _('<p>The FPCA is a legal document.  We need to have accurate information attached to it just in case we ever need to contact you about a contribution that you make to the project.  Imagine if we were to get a call from a lawyer at some other company claiming that they own the copyright to your work and we have to tell them we have a right to use it because "Mickey Moose" contributed it to us and we have no telephone number to contact them!  Potentially a very sticky situation.</p>'
              )
        ],
        'user_ircnick': [
            _('IRC Nick (Optional)'),
            _('<p>IRC Nick is used to identify yourself on irc.freenode.net.  Please register your nick on irc.freenode.net first, then fill this in so people can find you online when they need to</p>'
              )
        ],
        'user_email': [
            _('Email (Required)'),
            _('<p>This email address should be your preferred email contact and will be used to send various official emails to.  This is also where your @fedoraproject.org email will get forwarded</p>'
              )
        ],
        'user_security_question': [
            _('Security question (Required)'),
            _('<p>The security question is used when you have lost access to your email address. You can email the admins, and they will ask you this question, and confirm you give the correct answer, before resetting your password/email address</p>'
              )
        ],
        'user_human_name': [
            _('Full Name (Required)'),
            _('<p>Your Human Name or "real life" name</p>')
        ],
        'user_gpg_keyid': [
            _('GPG Key ID'),
            _('<p>A GPG key is generally used to prove that a message or email came from you or to encrypt information so that only the recipients can read it.  This can be used when a password reset is sent to your email.<br />Enter the keyid in hex format. <b>Without</b> leading "0x...".</p>'
              )
        ],
        'user_telephone': [
            _('Telephone'),
            _('<p>Required in order to complete the <a href="http://fedoraproject.org/wiki/Legal:Fedora_Project_Contributor_Agreement">FPCA</a>.  Sometimes during a time of emergency someone from the Fedora Project may need to contact you.  For more information see our <a href="http://fedoraproject.org/wiki/Legal/PrivacyPolicy">Privacy Policy</a></p>'
              )
        ],
        'user_postal_address': [
            _('Postal Address'),
            _('<p>This should be a mailing address where you can be contacted.  See our <a href="http://fedoraproject.org/wiki/Legal/PrivacyPolicy">Privacy Policy</a> about any concerns.</p>'
              )
        ],
        'user_timezone': [
            _('Timezone (Optional)'),
            _('<p>Please specify the time zone you are in.</p>')
        ],
        'user_comments':
        [_('Comments (Optional)'),
         _('<p>Misc comments about yourself.</p>')],
        'user_account_status': [
            _('Account Status'),
            _('<p>Shows account status, possible values include<ul><li>active</li><li>inactive</li></ul></p>'
              )
        ],
        'user_cla': [
            _('FPCA'),
            _('<p>In order to become a full Fedora contributor you must complete the <a href="http://fedoraproject.org/wiki/Legal:Fedora_Project_Contributor_Agreement">Fedora Project Contributor Agreement</a>.  This license is a legal agreement between you and Red Hat.  Full status allows people to contribute content and code and is recommended for anyone interested in getting involved in the Fedora Project.</p>'
              )
        ],
        'user_ssh_key': [
            _('Public RSA, ECDSA SSH Key'),
            _('<p>Many resources require public key authentication to work.  By uploading your public key to us, you can then log in to our servers.  Type "man ssh-keygen" for more information on creating your key (it must be an RSA key).  Once created you will want to upload ~/.ssh/id_rsa.pub. <br /><br />If you wish to login through several hosts, each with their own public key, you can create a concatenated file of public ssh keys and upload it in lieu of the individual ssh public key. <br /><br />"Warning: In case of having ECDSA key please upload the two types of keys because some of our servers may not accept ECDSA keys."</p>'
              )
        ],
        'user_locale': [
            _('Locale'),
            _('<p>For non-english speaking peoples this allows individuals to select which locale they are in.</p>'
              )
        ],
        'user_country_code': [
            _('Country Code'),
            _('<p>Required in order to complete the <a href="http://fedoraproject.org/wiki/Legal:Fedora_Project_Contributor_Agreement">FPCA</a>.  Sometimes during a time of emergency someone from the Fedora Project may need to contact you.  For more information see our <a href="http://fedoraproject.org/wiki/Legal/PrivacyPolicy">Privacy Policy</a></p>'
              )
        ],
        'user_age_check': [
            _('Must be over 13 Years'),
            _("<p>Out of special concern for children's privacy, we do not knowingly accept online personal information from children under the age of 13. We do not knowingly allow children under the age of 13 to become registered members of our sites or buy products and services on our sites. We do not knowingly collect or solicit personal information about children under 13.</p>"
              )
        ],
        'user_privacy': [
            _('Hide Information'),
            _('<p>In accordance with our privacy policy, you can choose to hide some of the information given on this page from other people.  Please see the <a href="https://fedoraproject.org/wiki/Legal:PrivacyPolicy">privacy policy</a> for complete details.</p>'
              )
        ],
        'group_apply': [
            _('Apply'),
            _('<p>Applying for a group is like applying for a job and it can certainly take a while to get in.  Many groups have their own rules about how to actually get approved or sponsored.  For more information on how the account system works see the <a href="%s">about page</a>.</p>'
              ) % turbogears.url('/about')
        ],
        'group_remove': [
            _('Remove'),
            _('''<p>Removing a person from a group will cause that user to no longer be in the group.  They will need to re-apply to get in.  Admins can remove anyone, Sponsors can remove users, users can't remove anyone.</p>'''
              )
        ],
        'group_upgrade': [
            _('Upgrade'),
            _('''<p>Upgrade a persons status in this group.<ul><li>from user -> to sponsor</li><li>From sponsor -> administrator</li><li>administrators cannot be upgraded beyond administrator</li></ul></p>'''
              )
        ],
        'group_downgrade': [
            _('Downgrade'),
            _('''<p>Downgrade a persons status in the group.<ul><li>from administrator -> to sponsor</li><li>From sponsor -> user</li><li>users cannot be downgraded below user, you may want to remove them</li></ul></p>'''
              )
        ],
        'group_approve': [
            _('Approve'),
            _('''<p>A sponsor or administrator can approve users to be in a group.  Once the user has applied for the group, go to the group's page and click approve to approve the user.</p>'''
              )
        ],
        'group_sponsor': [
            _('Sponsor'),
            _('''<p>A sponsor or administrator can sponsor users to be in a group.  Once the user has applied for the group, go to the group's page and click approve to sponsor the user.  Sponsorship of a user implies that you are approving a user and may mentor and answer their questions as they come up.</p>'''
              )
        ],
        'group_user_add': [
            _('Add User'),
            _('''<p>Manually add a user to a group.  Place their username in this field and click 'Add'</p>'''
              )
        ],
        'group_name': [
            _('Group Name'),
            _('''<p>The name of the group you'd like to create.  It should be lowercase alphanumeric though '-' and '_' are allowed</p>'''
              )
        ],
        'group_display_name': [
            _('Display Name'),
            _('''<p>More human readable name of the group</p>''')
        ],
        'group_owner': [
            _('Group Owner'),
            _('''<p>The name of the owner who will run this group</p>''')
        ],
        'group_type': [
            _('Group Type'),
            _('''<p>Mandatory field. Available values are 'tracking', 'shell', 'cvs', 'git', 'hg', 'svn', 'mtn', and 'pkgdb'. This value only really matters if the group is to end up getting shell access or commit access somewhere like fedorahosted.</p>'''
              )
        ],
        'group_url': [
            _('Group URL (Optional)'),
            _('''<p>A URL or wiki page for the group (for example, <a href="https://fedoraproject.org/wiki/Infrastructure">https://fedoraproject.org/wiki/Infrastructure</a>).</p>'''
              )
        ],
        'group_mailing_list': [
            _('Group Mailing List (Optional)'),
            _('''<p>A mailing list for the group (for example, [email protected]).</p>'''
              )
        ],
        'group_mailing_list_url': [
            _('Group Mailing List URL (Optional)'),
            _('''<p>A URL for the group's mailing list (for example, <a href="http://www.redhat.com/mailman/listinfo/fedora-infrastructure-list">http://www.redhat.com/mailman/listinfo/fedora-infrastructure-list</a>).</p>'''
              )
        ],
        'group_invite_only': [
            _('Invite Only'),
            _('''<p>If users should not normally be able to apply to the group, setting this will hide the usual "Apply!" links and buttons.  Users can still be added to a group directly by an admin or sponsor.</p>'''
              )
        ],
        'group_irc_channel': [
            _('Group IRC Channel (Optional)'),
            _('''<p>An IRC channel for the group (for example, #fedora-admin).</p>'''
              )
        ],
        'group_irc_network': [
            _('Group IRC Network (Optional)'),
            _('''<p>The IRC Network for the group's IRC channel (for example, Freenode).</p>'''
              )
        ],
        'group_needs_sponsor': [
            _('Needs Sponsor'),
            _('''<p>If your group requires sponsorship (recommended), this means that when a user is approved by a sponsor.  That relationship is recorded in the account system.  If user A sponsors user N, then in viewing the members of this group, people will know to contact user A about user N if something goes wrong.  If this box is unchecked, this means that only approval is needed and no relationship is recorded about who did the approving</p>'''
              )
        ],
        'group_self_removal': [
            _('Self Removal'),
            _('''<p>Should users be able to remove themselves from this group without sponsor / admin intervention?  (recommended yes)</p>'''
              )
        ],
        'group_prerequisite': [
            _('Must Belong To'),
            _('''<p>Before a user can join this group, they must belong to the group listed in this box.  Recommended values are for the '%s' group.</p>'''
              ) % config.get('cla_done_group', 'cla_done')
        ],
        'group_join_message': [
            _('Join Message'),
            _('''<p>This message will go out to users when they apply to the group.  It should be informative and offer tips about what to do next.  A description of the group would also be valuable here</p>'''
              )
        ],
        'gencert': [
            _('Client Side Cert'),
            _('''<p>The client side cert is generally used to grant access to upload packages to Fedora or for other authentication purposes like with koji.  You should save this certificate to ~/.fedora.cert.  If you are not a package maintainer there is no need to worry about the client side cert.  Please note that whenever a new cert is generated, all old ones are revoked.</p>'''
              )
        ],
        'latitude_and_longitude': [
            _('Longitude and Latitude'),
            _('''<p>Your longitude and latitude.  This optional field should be entered as a floating point number.  For instance, 312.333 or -21.2.  This may be used for mapping purposes, but will not be used if you have privacy enabled for your account.</p>'''
              )
        ],
        'apply_rules_message': [
            _('Rules for Application'),
            _('''<p>Rules or steps that applicants should follow before applying to your group.  This will be shown to users before they apply to your group.</p>'''
              )
        ],
    }

    def __init__(self):
        '''Create a JsonRequest Controller.'''

    @expose(template="fas.templates.help")
    def get_help(self, helpid='none'):
        try:
            helpItem = self.help[helpid]
        except KeyError:
            return dict(title=_('Error'),
                        helpItem=[
                            _('Error'),
                            _('<p>We could not find that help item</p>')
                        ])
        # Transform to unicode as that's what genshi expects, not lazystring
        helpItem = [unicode(s) for s in helpItem]
        return dict(help=helpItem)
Exemplo n.º 50
0
    def validate_identity(self, user_name, password, visit_key, otp=None):
        '''
        Look up the identity represented by user_name and determine whether the
        password is correct.

        Must return either None if the credentials weren't valid or an object
        with the following properties:
            user_name: original user name
            user: a provider dependant object (TG_User or similar)
            groups: a set of group IDs
            permissions: a set of permission IDs

        Side Effects:
        :cherrypy.request.fas_provided_username: set to user_name
        :cherrypy.request.fas_identity_failure_reason: if we fail to validate
            the user, set to the reason validation failed.  Values can be:
            :no_user: The username was not present in the db.
            :status_inactive: User is disabled but can reset their password
                to restore service.
            :status_expired: User is expired, account is no more.
            :status_admin_disabled: User is disabled and has to talk to an
                admin before they are re-enabled.
            :bad_password: The username and password do not match.

        Arguments:
        :arg user_name: user_name we're authenticating.  If None, we'll try
            to lookup a username from SSL variables
        :arg password: password to authenticate user_name with
        :arg visit_key: visit_key from the user's session
        :arg otp: One Time Password key to authenticate within the password
                 This is an extras argument we add to request parameters
                 in order to add 2nd factor authentication to TG1.
        '''
        # Save the user provided username so we can do other checks on it in
        # outside of this method.
        cherrypy.request.fas_provided_username = user_name
        cherrypy.request.fas_identity_failure_reason = None
        using_ssl = False

        if not user_name:
            if cherrypy.request.headers['X-Client-Verify'] == 'SUCCESS':
                user_name = cherrypy.request.headers['X-Client-CN']
                cherrypy.request.fas_provided_username = user_name
                using_ssl = True

        email_domain = '@' + config.get('email_host', '')
        if email_domain != '@' and user_name.endswith(email_domain):
            user_name = user_name[:-len(email_domain)]

        if '@' in user_name:
            user = user_class.query.filter_by(email=user_name).first()
        else:
            user = user_class.query.filter_by(username=user_name).first()

        if not user:
            log.warning("No such user: %s", user_name)
            cherrypy.request.fas_identity_failure_reason = 'no_user'
            return None

        if user.status in ('inactive', 'expired', 'admin_disabled'):
            log.warning("User %(username)s has status %(status)s" % {
                'username': user_name,
                'status': user.status
            })
            cherrypy.request.fas_identity_failure_reason = 'status_%s' % user.status
            return None

        if not using_ssl:
            # Get extras args from request params to increase auth check
            # then pop it out if found to don't mess with other object's method
            if 'otp' in cherrypy.request.params:
                otp = cherrypy.request.params.pop('otp')

            if not self.validate_password(user, user_name, password, otp):
                log.info("Passwords don't match for user: %s", user_name)
                cherrypy.request.fas_identity_failure_reason = 'bad_password'
                return None
            # user + password is sufficient to prove the user is in
            # control
            cherrypy.request.params['_csrf_token'] = hash_constructor(
                visit_key).hexdigest()

        log.info("Associating user (%s) with visit (%s)", user_name, visit_key)
        user.last_seen = datetime.now(pytz.utc)
        return SaFasIdentity(visit_key, user, using_ssl)
Exemplo n.º 51
0
class JsonFasIdentity(BaseClient):
    '''Associate an identity with a person in the auth system.
    '''
    cookie_name = config.get('visit.cookie.name', 'tg-visit')
    fas_url = config.get('fas.url',
                         'https://admin.fedoraproject.org/accounts/')
    useragent = 'JsonFasIdentity/%s' % __version__
    cache_session = False

    def __init__(self,
                 visit_key=None,
                 user=None,
                 username=None,
                 password=None,
                 using_ssl=False):
        # The reason we have both _retrieved_user and _user is this:
        # _user is set if both the user is authenticated and a csrf_token is
        # present.
        # _retrieved_user actually caches the user info from the server.
        # Sometimes we have to determine if a user is only lacking a token,
        # then retrieved_user comes in handy.
        self._retrieved_user = None
        self.log = log
        self.visit_key = visit_key
        session_id = visit_key
        self._group_ids = frozenset()
        self.using_ssl = using_ssl
        if user:
            self._user = user
            self._user_retrieved = user
            self._groups = frozenset(
                [g['name'] for g in user['approved_memberships']])

        debug = config.get('jsonfas.debug', False)
        super(JsonFasIdentity, self).__init__(self.fas_url,
                                              useragent=self.useragent,
                                              debug=debug,
                                              username=username,
                                              password=password,
                                              session_id=session_id,
                                              cache_session=self.cache_session,
                                              retries=3)

        if self.debug:
            import inspect
            caller = inspect.getouterframes(inspect.currentframe())[1][3]
            self.log.debug('JsonFasIdentity.__init__ caller: %s' % caller)

        cherrypy.response.simple_cookie[self.cookie_name] = visit_key

        self.login(using_ssl)
        self.log.debug('Leaving JsonFasIdentity.__init__')

    def send_request(self, method, req_params=None, auth=False):
        '''Make an HTTP Request to a server method.

        We need to override the send_request provided by ``BaseClient`` to
        keep the visit_key in sync.
        '''
        self.log.debug('entering jsonfas send_request')
        if self.session_id != self.visit_key:
            # When the visit_key changes (because the old key had expired or
            # been deleted from the db) change the visit_key in our variables
            # and the session cookie to be sent back to the client.
            self.visit_key = self.session_id
            cherrypy.response.simple_cookie[self.cookie_name] = self.visit_key
        self.log.debug('leaving jsonfas send_request')
        return super(JsonFasIdentity, self).send_request(method,
                                                         req_params=req_params,
                                                         auth=auth,
                                                         retries=3)

    def __retrieve_user(self):
        '''Attempt to load the user from the visit_key.

        :returns: a user or None
        '''
        if self.debug:
            import inspect
            caller = inspect.getouterframes(inspect.currentframe())[2][3]
            self.log.debug('JSONFASPROVIDER.send_request caller: %s' % caller)

        # The cached value can be in four states:
        # Holds a user: we successfully retrieved it last time, return it
        # Holds None: we haven't yet tried to retrieve a user, do so now
        # Holds a session_id that is the same as our session_id, we unsuccessfully
        # tried to retrieve a session with this id already, return None
        # Holds a session_id that is different than the current session_id:
        # we tried with a previous session_id; try again with the new one.
        if self._retrieved_user:
            if isinstance(self._retrieved_user, basestring):
                if self._retrieved_user == self.session_id:
                    return None
                else:
                    self._retrieved_user = None
            else:
                return self._retrieved_user
        # I hope this is a safe place to double-check the SSL variables.
        # TODO: Double check my logic with this - is it unnecessary to
        # check that the username matches up?
        if self.using_ssl:
            if cherrypy.request.headers['X-Client-Verify'] != 'SUCCESS':
                self.logout()
                return None
            # Retrieve the user information differently when using ssl
            try:
                person = fas.person_by_username(self.username, auth=True)
            except Exception, e:  # pylint: disable-msg=W0703
                # :W0703: Any errors have to result in no user being set.  The
                # rest of the framework doesn't know what to do otherwise.
                self.log.warning(
                    b_('jsonfasprovider, ssl, returned errors'
                       ' from send_request: %s') % to_bytes(e))
                person = None
            self._retrieved_user = person or None
            return self._retrieved_user
        # pylint: disable-msg=W0702
        try:
            data = self.send_request('user/view', auth=True)
        except AuthError, e:
            # Failed to login with present credentials
            self._retrieved_user = self.session_id
            return None
Exemplo n.º 52
0
    def id(self, collection_id):  #pylint:disable-msg=C0103
        '''Return a page with information on a particular Collection

        :arg collection_id: Numeric id of the collection
        '''
        flash(
            _('This page is deprecated.  Use %(url)s instead.') % {
                'url':
                config.get('base_url_filter.base_url', 'http://localhost') +
                tg_url('/collection/name')
            })
        try:
            collection_id = int(collection_id)
        except ValueError:
            error = dict(status = False,
                    title = _('%(app)s -- Invalid Collection Id') %
                        {'app': self.app_title},
                    message =_('The collection_id you were linked to is not a' \
                            ' valid id.  If you received this error from a' \
                            ' link on the fedoraproject.org website, please' \
                            ' report it.'))
            if request.params.get('tg_format', 'html') != 'json':
                error['tg_template'] = 'pkgdb.templates.errors'
            return error

        ### FIXME: Want to return additional info:
        # date it was created (join log table: creation date)
        # The initial import doesn't have this information, though.
        try:
            #pylint:disable-msg=E1101
            collection_entry = Collection.query.options(
                    lazyload('listings2'), eagerload('status.locale'))\
                    .filter_by(id=collection_id).one()
        except InvalidRequestError:
            # Either the id doesn't exist or somehow it references more than
            # one value
            error = dict(
                status=False,
                title=_('%(app)s -- Invalid Collection Id') %
                {'app': self.app_title},
                message=_('The collection_id you were linked to, %(id)s,'
                          ' does not exist.  If you received this error from'
                          ' a link on the fedoraproject.org website, please'
                          ' report it.') % {'id': collection_id})
            if request.params.get('tg_format', 'html') != 'json':
                error['tg_template'] = 'pkgdb.templates.errors'
            return error

        # Why do we reformat the data returned from the database?
        # 1) We don't need all the information in the collection object
        # 2) We need statusname which is not in the specific table.
        collection = {
            'name': collection_entry.name,
            'version': collection_entry.version,
            'owner': collection_entry.owner,
            'summary': collection_entry.summary,
            'description': collection_entry.description,
            'statusname': collection_entry.status.locale['C'].statusname
        }

        # Retrieve the packagelist for this collection
        # pylint:disable-msg=E1101
        packages = Package.query.options(lazyload('listings2.people2'),
                lazyload('listings2.groups2')).join('listings2')\
                        .filter_by(collectionid=collection_id)\
                        .filter(Package.statuscode!=STATUS['Removed'])
        # pylint:enable-msg=E1101

        return dict(
            title='%s -- %s %s' %
            (self.app_title, collection['name'], collection['version']),
            collection=collection,
            packages=packages)
Exemplo n.º 53
0
def schedule():
    """ Schedule our periodic tasks """

    jobs = config.get('jobs')

    # Weekly repository cleanup
    if 'clean_repo' in jobs:
        log.debug("Scheduling clean_repo job")
        scheduler.add_interval_task(action=clean_repo,
                                    taskname="Clean update repositories",
                                    initialdelay=604800,
                                    interval=604800)

    # Daily nagmail
    if 'nagmail' in jobs:
        log.debug("Scheduling nagmail job")
        scheduler.add_weekday_task(action=nagmail,
                                   weekdays=range(1, 8),
                                   timeonday=(0, 0))

    # Fix invalid bug titles
    if 'fix_bug_titles' in jobs:
        log.debug("Scheduling fix_bug_titles job")
        scheduler.add_interval_task(action=fix_bug_titles,
                                    taskname='Fix bug titles',
                                    initialdelay=1200,
                                    interval=604800)

    # Warm up some data caches
    if 'cache_release_data' in jobs:
        log.debug("Scheduling cache_release_data job")
        scheduler.add_interval_task(action=cache_release_data,
                                    taskname='Cache release data',
                                    initialdelay=0,
                                    interval=43200)

    # If we're the masher, then handle the costly metric regenration
    if not config.get('masher') and 'refresh_metrics' in jobs:
        log.debug("Scheduling refresh_metrics job")
        scheduler.add_interval_task(action=refresh_metrics,
                                    taskname='Refresh our metrics',
                                    initialdelay=7200,
                                    interval=86400)

    # Approve updates that have been in testing for a certain amount of time
    if 'approve_testing_updates' in jobs:
        log.debug("Scheduling approve_testing_updates job")
        scheduler.add_interval_task(
            action=approve_testing_updates,
            # Run every 6 hours
            initialdelay=21600,
            interval=21600)
        #weekdays=range(1,8),
        #timeonday=(0,0))

    # Automatically expire buildroot overrides
    if 'approve_testing_updates' in jobs:
        log.debug("Scheduling expire_buildroot_overrides job")
        scheduler.add_interval_task(
            action=expire_buildroot_overrides,
            # Run every 6 hours
            initialdelay=3600,
            interval=3600)
Exemplo n.º 54
0
    def get_queued_command_details(self):
        lab_controller = identity.current.user.lab_controller
        max_running_commands = config.get('beaker.max_running_commands')
        if max_running_commands:
            running_commands = CommandActivity.query\
                    .join(CommandActivity.system)\
                    .filter(System.lab_controller == lab_controller)\
                    .filter(CommandActivity.status == CommandStatus.running)\
                    .count()
            if running_commands >= max_running_commands:
                return []
        query = CommandActivity.query\
                .join(CommandActivity.system)\
                .options(contains_eager(CommandActivity.system))\
                .filter(System.lab_controller == lab_controller)\
                .filter(CommandActivity.status == CommandStatus.queued)\
                .order_by(CommandActivity.id)
        if max_running_commands:
            query = query.limit(max_running_commands - running_commands)
        result = []
        for cmd in query:
            d = {
                'id': cmd.id,
                'action': cmd.action,
                'fqdn': cmd.system.fqdn,
                'delay': 0,
                'quiescent_period': cmd.quiescent_period
            }
            if cmd.delay_until:
                d['delay'] = max(
                    0, total_seconds(cmd.delay_until - datetime.utcnow()))
            # Fill in details specific to the type of command
            if cmd.action in (u'on', u'off', u'reboot', u'interrupt'):
                if not cmd.system.power:
                    cmd.abort(u'Power control unavailable for %s' % cmd.system)
                    continue
                d['power'] = {
                    'type': cmd.system.power.power_type.name,
                    'address': cmd.system.power.power_address,
                    'id': cmd.system.power.power_id,
                    'user': cmd.system.power.power_user,
                    'passwd': cmd.system.power.power_passwd,
                }
            elif cmd.action == u'configure_netboot':
                distro_tree_url = cmd.distro_tree.url_in_lab(
                    lab_controller, scheme=['http', 'ftp'])
                if not distro_tree_url:
                    cmd.abort(
                        u'No usable URL found for distro tree %s in lab %s' %
                        (cmd.distro_tree.id, lab_controller.fqdn))
                    continue

                if cmd.system.kernel_type.uboot:
                    by_kernel = ImageType.uimage
                    by_initrd = ImageType.uinitrd
                else:
                    by_kernel = ImageType.kernel
                    by_initrd = ImageType.initrd

                kernel = cmd.distro_tree.image_by_type(by_kernel,
                                                       cmd.system.kernel_type)
                if not kernel:
                    cmd.abort(u'Kernel image not found for distro tree %s' %
                              cmd.distro_tree.id)
                    continue
                initrd = cmd.distro_tree.image_by_type(by_initrd,
                                                       cmd.system.kernel_type)
                if not initrd:
                    cmd.abort(u'Initrd image not found for distro tree %s' %
                              cmd.distro_tree.id)
                    continue
                d['netboot'] = {
                    'arch': cmd.distro_tree.arch.arch,
                    'distro_tree_id': cmd.distro_tree.id,
                    'kernel_url': urlparse.urljoin(distro_tree_url,
                                                   kernel.path),
                    'initrd_url': urlparse.urljoin(distro_tree_url,
                                                   initrd.path),
                    'kernel_options': cmd.kernel_options or '',
                }
            result.append(d)
        return result
Exemplo n.º 55
0
def main():
    parser = OptionParser(description=__description__, version=__version__)
    parser.add_option('-c', '--config-file')
    parser.add_option('--debug', action='store_true',
                      help='Show detailed information about image creation')
    parser.add_option('--no-upload', dest='upload', action='store_false',
                      help='Skip uploading to Glance, leave image temp file on disk')
    parser.add_option('--os-username', help='OpenStack username')
    parser.add_option('--os-password', help='OpenStack password')
    parser.add_option('--os-tenant-name', help=SUPPRESS_HELP)
    parser.add_option('--os-project-name', help='OpenStack project name')
    parser.add_option('--os-project-domain-name', help='OpenStack project domain name')
    parser.add_option('--os-user-domain-name', help='OpenStack user domain name')
    parser.add_option('--image-visibility', help='OpenStack Image visibility',
                      type='choice',
                      choices=['public', 'private', 'shared', 'community'],
                      default='public',
                      )
    parser.set_defaults(debug=False, upload=True)
    options, args = parser.parse_args()
    load_config_or_exit(options.config_file)
    log_to_stream(sys.stderr, level=logging.DEBUG if options.debug else logging.WARNING)

    if options.upload:
        if not has_keystoneclient:
            raise RuntimeError('python-keystoneclient is not installed')
        if not has_glanceclient:
            raise RuntimeError('python-glanceclient is not installed')
        # Get a Glance client. This seems more difficult than it should be...
        username = options.os_username or os.environ.get('OS_USERNAME')
        if not username:
            parser.error('Specify username with --os-username or env[OS_USERNAME]')
        password = options.os_password or os.environ.get('OS_PASSWORD')
        if not password:
            parser.error('Specify password with --os-password or env[OS_PASSWORD]')
        project_name = options.os_project_name or os.environ.get('OS_PROJECT_NAME')
        # for backwards compat
        if not project_name:
            project_name = options.os_tenant_name or os.environ.get('OS_TENANT_NAME')
        if not project_name:
            parser.error('Specify project with --os-project-name or env[OS_PROJECT_NAME]')

        auth_url = config.get('openstack.identity_api_url')
        if not auth_url:
            parser.error('OpenStack Identity API URL is not set in the configuration')

        user_domain_name = options.os_user_domain_name or \
                           os.environ.get('OS_USER_DOMAIN_NAME')
        project_domain_name = options.os_project_domain_name or \
                              os.environ.get('OS_PROJECT_DOMAIN_NAME')

        log.debug('Authenticating to Keystone')
        keystone = keystoneclient.v3.client.Client(
            username=username,
            password=password,
            project_name=project_name,
            user_domain_name=user_domain_name,
            project_domain_name=project_domain_name,
            auth_url=auth_url)

        log.debug('Looking up Glance URL in service catalog')
        glance_url = keystone.service_catalog.url_for(service_type='image',
                                                      endpoint_type='publicURL')
        log.debug('Using Glance URL %s', glance_url)
        glance = glanceclient.v2.client.Client(glance_url, token=keystone.auth_token)
        # Generate and upload the image.
        with session.begin():
            upload_image(glance, visibility=options.image_visibility)
    else:
        print generate_image(delete=False).name
Exemplo n.º 56
0
 def can_edit_keystone_trust(self, user):
     """
     Is the given user permitted to change this user's OpenStack Keystone trust?
     """
     return bool(get('openstack.identity_api_url')) and self.can_edit(user)
Exemplo n.º 57
0
 def can_edit_ldap(self, user):
     return user.is_admin() and get('identity.ldap.enabled', False)
Exemplo n.º 58
0
suite.addTests(unittest.TestLoader().loadTestsFromModule(file_exists_state))

if __name__ == "__main__":
    #grab the config file
    if len(sys.argv) > 1:
        update_config(configfile=sys.argv[1], modulename="roboide.config")
    else:
        sys.exit('No config file specified')

    ide_run_cmd = './start-roboide.py'
    run_proc = subprocess.Popen([ide_run_cmd, sys.argv[1]],
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)

    #check that the IDE is running
    port = config.get('server.socket_port')
    host = config.get('server.socket_host')
    conn = httplib.HTTPConnection(host, port)
    done = False
    while not done:
        try:
            conn.connect()
            done = True
        except socket.error:
            print 'Connection refused on %s:%s. Waiting for the IDE to start.' % (
                host, port)
            print 'Retrying connection.'
            time.sleep(2)
    conn.close()

    #Run the tests
Exemplo n.º 59
0
def url(*args, **kw):
    if config.get('identity.provider') in ('sqlobjectcsrf', 'jsonfas2'):
        return csrf_url(*args, **kw)
    else:
        return tg_url(*args, **kw)
Exemplo n.º 60
0
)

visit_identity_table = Table('visit_identity', metadata,
    Column('visit_key', String(40), ForeignKey('visit.visit_key'),
        primary_key=True),
    Column('user_id', Integer, ForeignKey('people.id'), index=True),
    Column('ssl', Boolean)
)

serial_seq = Sequence('serial_seq')

#
# Mapped Classes
#

admin_group = config.get('admingroup', 'accounts')
system_group = config.get('systemgroup', 'fas-system')
thirdparty_group = config.get('thirdpartygroup', 'thirdparty')

class People(SABase):
    '''Records for all the contributors to Fedora.'''

    # Map the people fields that various classes of users are allowed to retrieve
    allow_fields = {
        # This is the complete list of fields
        'complete': ('id', 'username', 'human_name', 'gpg_keyid', 'ssh_key',
            'password', 'passwordtoken', 'password_changed', 'email',
            'emailtoken', 'unverified_email', 'comments', 'postal_address',
            'telephone', 'facsimile', 'affiliation', 'certificate_serial',
            'creation', 'internal_comments', 'ircnick', 'last_seen', 'status',
            'status_change', 'locale', 'timezone', 'latitude', 'longitude',