def _setup_firewall(self, ri, fw):
        client = self._get_vyatta_client(ri.router)

        fw_cmd_list = []

        # Create firewall
        fw_name = vyatta_utils.get_firewall_name(ri, fw)
        fw_cmd_list.append(
            vyatta_client.SetCmd(
                FW_NAME.format(parse.quote_plus(fw_name))))

        if fw.get('description'):
            fw_cmd_list.append(vyatta_client.SetCmd(
                FW_DESCRIPTION.format(
                    parse.quote_plus(fw_name),
                    parse.quote_plus(fw['description']))))

        # Set firewall state policy
        fw_cmd_list.append(vyatta_client.SetCmd(FW_ESTABLISHED_ACCEPT))
        fw_cmd_list.append(vyatta_client.SetCmd(FW_RELATED_ACCEPT))

        # Create firewall rules
        rule_num = 0
        for rule in fw['firewall_rule_list']:
            if not rule['enabled']:
                continue
            if rule['ip_version'] == 4:
                rule_num += 1
                fw_cmd_list += self._set_firewall_rule(fw_name, rule_num, rule)
            else:
                LOG.warn(_LW("IPv6 rules are not supported."))

        # Configure router zones
        zone_cmd_list = vyatta_utils.get_zone_cmds(client, ri, fw_name)
        client.exec_cmd_batch(fw_cmd_list + zone_cmd_list)
Esempio n. 2
0
    def _setup_optimizer(self, ri, opt):
        client = self._get_vyatta_client(ri.router)

        opt_cmd_list = []

        # Create optimizer
        opt_name = vyatta_utils.get_optimizer_name(ri, opt)
        opt_cmd_list.append(
            vyatta_client.SetCmd(
                FW_NAME.format(parse.quote_plus(opt_name))))

        if opt.get('description'):
            opt_cmd_list.append(vyatta_client.SetCmd(
                FW_DESCRIPTION.format(
                    parse.quote_plus(opt_name),
                    parse.quote_plus(opt['description']))))

        # Set optimizer state policy
        opt_cmd_list.append(vyatta_client.SetCmd(FW_ESTABLISHED_ACCEPT))
        opt_cmd_list.append(vyatta_client.SetCmd(FW_RELATED_ACCEPT))

        # Create optimizer rules
        rule_num = 0
        for rule in opt['optimizer_rule_list']:
            if not rule['enabled']:
                continue
            if rule['ip_version'] == 4:
                rule_num += 1
                opt_cmd_list += self._set_optimizer_rule(opt_name, rule_num, rule)
            else:
                LOG.warn(_LW("IPv6 rules are not supported."))

        # Configure router zones
        zone_cmd_list = vyatta_utils.get_zone_cmds(client, ri, opt_name)
        client.exec_cmd_batch(opt_cmd_list + zone_cmd_list)
Esempio n. 3
0
def get_raw_yaml_from_repo(repo, full_name, commit_sha):
    # type: (Repo_ish, Text, bytes) -> Any
    """Return decoded YAML data structure from
    the given file in *repo* at *commit_sha*.

    :arg commit_sha: A byte string containing the commit hash
    """

    from six.moves.urllib.parse import quote_plus
    cache_key = "%RAW%%2".join((
        CACHE_KEY_ROOT,
        quote_plus(repo.controldir()), quote_plus(full_name), commit_sha.decode(),
        ))

    import django.core.cache as cache
    def_cache = cache.caches["default"]

    result = None  # type: Optional[Any]
    # Memcache is apparently limited to 250 characters.
    if len(cache_key) < 240:
        result = def_cache.get(cache_key)
    if result is not None:
        return result

    yaml_str = expand_yaml_macros(
                repo, commit_sha,
                get_repo_blob(repo, full_name, commit_sha,
                    allow_tree=False).data)

    result = load_yaml(yaml_str)  # type: ignore

    def_cache.add(cache_key, result, None)

    return result
    def test_get_console_url(self, mock_get):
        token = "abcdefg123"
        callbackurl = ""
        mock_get.return_value = Mock(text=u'{"SigninToken": "%s"}' % token,
                                     status_code=200,
                                     reason="Ok")
        url_template = ("https://signin.aws.amazon.com/federation?"
                        "Action=login&"
                        "Issuer={callbackurl}&"
                        "Destination=https%3A%2F%2Fconsole.aws.amazon.com%2F&"
                        "SigninToken={token}")
        expected_url = url_template.format(callbackurl=quote_plus(callbackurl),
                                           token=token).encode()

        result = self.app.get('/account/testaccount/testrole/consoleurl')
        self.assertEqual(result.status_int, 200)
        self.assertEqual(result.body, expected_url)

        # Check if the Callback URL is set
        callbackurl = "https://www.foobar.invalid"
        expected_url = url_template.format(callbackurl=quote_plus(callbackurl),
                                           token=token).encode()
        result = self.app.get('/account/testaccount/testrole/consoleurl',
                              {'callbackurl': callbackurl})
        self.assertEqual(result.status_int, 200)
        self.assertEqual(result.body, expected_url)
Esempio n. 5
0
def get_raw_yaml_from_repo(repo, full_name, commit_sha):
    """Return decoded YAML data structure from
    the given file in *repo* at *commit_sha*.

    :arg commit_sha: A byte string containing the commit hash
    """

    from six.moves.urllib.parse import quote_plus
    cache_key = "%RAW%%2".join((
        quote_plus(repo.controldir()), quote_plus(full_name), commit_sha.decode()))

    import django.core.cache as cache
    def_cache = cache.caches["default"]
    result = None
    # Memcache is apparently limited to 250 characters.
    if len(cache_key) < 240:
        result = def_cache.get(cache_key)
    if result is not None:
        return result

    result = load_yaml(
            expand_yaml_macros(
                repo, commit_sha,
                get_repo_blob(repo, full_name, commit_sha).data))

    def_cache.add(cache_key, result, None)

    return result
Esempio n. 6
0
def tribler_urlencode_single(key, value):
    utf8_key = quote_plus(text_type(key).encode('utf-8'))
    # Convert bool values to ints
    if isinstance(value, bool):
        value = int(value)
    utf8_value = quote_plus(text_type(value).encode('utf-8'))
    return "%s=%s" % (utf8_key, utf8_value)
Esempio n. 7
0
def get_yaml_from_repo(repo, full_name, commit_sha, cached=True):
    """Return decoded, struct-ified YAML data structure from
    the given file in *repo* at *commit_sha*.

    See :class:`relate.utils.Struct` for more on
    struct-ification.
    """

    if cached:
        from six.moves.urllib.parse import quote_plus
        cache_key = "%%%2".join(
                (quote_plus(repo.controldir()), quote_plus(full_name),
                    commit_sha.decode()))

        import django.core.cache as cache
        def_cache = cache.caches["default"]
        result = None
        # Memcache is apparently limited to 250 characters.
        if len(cache_key) < 240:
            result = def_cache.get(cache_key)
        if result is not None:
            return result

    expanded = expand_yaml_macros(
            repo, commit_sha,
            get_repo_blob(repo, full_name, commit_sha).data)

    result = dict_to_struct(load_yaml(expanded))

    if cached:
        def_cache.add(cache_key, result, None)

    return result
Esempio n. 8
0
def _process_repo(repo, runtime_storage_inst, record_processor_inst, rcs_inst):
    uri = repo["uri"]
    LOG.info("Processing repo uri: %s", uri)

    LOG.debug("Processing blueprints for repo uri: %s", uri)
    bp_iterator = lp.log(repo)
    bp_iterator_typed = _record_typer(bp_iterator, "bp")
    processed_bp_iterator = record_processor_inst.process(bp_iterator_typed)
    runtime_storage_inst.set_records(processed_bp_iterator, utils.merge_records)

    LOG.debug("Processing bugs for repo uri: %s", uri)
    current_date = utils.date_to_timestamp("now")
    bug_modified_since = runtime_storage_inst.get_by_key("bug_modified_since-%s" % repo["module"])

    bug_iterator = bps.log(repo, bug_modified_since)
    bug_iterator_typed = _record_typer(bug_iterator, "bug")
    processed_bug_iterator = record_processor_inst.process(bug_iterator_typed)
    runtime_storage_inst.set_records(processed_bug_iterator, utils.merge_records)

    runtime_storage_inst.set_by_key("bug_modified_since-%s" % repo["module"], current_date)

    vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root)
    vcs_inst.fetch()

    branches = {repo.get("default_branch", "master")}
    for release in repo.get("releases"):
        if "branch" in release:
            branches.add(release["branch"])

    for branch in branches:
        LOG.debug("Processing commits in repo: %s, branch: %s", uri, branch)

        vcs_key = "vcs:" + str(parse.quote_plus(uri) + ":" + branch)
        last_id = runtime_storage_inst.get_by_key(vcs_key)

        commit_iterator = vcs_inst.log(branch, last_id)
        commit_iterator_typed = _record_typer(commit_iterator, "commit")
        processed_commit_iterator = record_processor_inst.process(commit_iterator_typed)
        runtime_storage_inst.set_records(processed_commit_iterator, _merge_commits)

        last_id = vcs_inst.get_last_id(branch)
        runtime_storage_inst.set_by_key(vcs_key, last_id)

        LOG.debug("Processing reviews for repo: %s, branch: %s", uri, branch)

        rcs_key = "rcs:" + str(parse.quote_plus(uri) + ":" + branch)
        last_id = runtime_storage_inst.get_by_key(rcs_key)

        review_iterator = rcs_inst.log(repo, branch, last_id, grab_comments=("ci" in repo))
        review_iterator_typed = _record_typer(review_iterator, "review")

        if "ci" in repo:  # add external CI data
            review_iterator_typed = _process_reviews(review_iterator_typed, repo["ci"], repo["module"], branch)

        processed_review_iterator = record_processor_inst.process(review_iterator_typed)
        runtime_storage_inst.set_records(processed_review_iterator, utils.merge_records)

        last_id = rcs_inst.get_last_id(repo, branch)
        runtime_storage_inst.set_by_key(rcs_key, last_id)
Esempio n. 9
0
def url_quote_plus(v, name='(Unknown name)', md={}):
    if six.PY2 and isinstance(v, unicode):
        # quote_plus does not handle unicode. Encoding to a "safe"
        # intermediate encoding before quoting, then unencoding the result.
        return quote_plus(v.encode('utf-8')).decode('utf-8')
    elif six.PY3 and isinstance(v, bytes):
        return quote_plus(v.decode('utf-8')).encode('utf-8')
    return quote_plus(str(v))
Esempio n. 10
0
def encode_uri(uri):
    split = list(urlsplit(uri))
    split[1] = split[1].encode('idna').decode('ascii')
    split[2] = quote_plus(split[2].encode('utf-8'), '/').decode('ascii')
    query = list((q, quote_plus(v.encode('utf-8')))
                 for (q, v) in parse_qsl(split[3]))
    split[3] = urlencode(query).decode('ascii')
    return urlunsplit(split)
Esempio n. 11
0
    def _generate_connection_uri(self):
        if self.use_proxy:
            if self.sql_proxy_use_tcp:
                if not self.sql_proxy_tcp_port:
                    self.reserve_free_tcp_port()
            if not self.sql_proxy_unique_path:
                self.sql_proxy_unique_path = self._generate_unique_path()

        database_uris = CONNECTION_URIS[self.database_type]
        ssl_spec = None
        socket_path = None
        if self.use_proxy:
            proxy_uris = database_uris['proxy']
            if self.sql_proxy_use_tcp:
                format_string = proxy_uris['tcp']
            else:
                format_string = proxy_uris['socket']
                socket_path = \
                    "{sql_proxy_socket_path}/{instance_socket_name}".format(
                        sql_proxy_socket_path=self.sql_proxy_unique_path,
                        instance_socket_name=self._get_instance_socket_name()
                    )
        else:
            public_uris = database_uris['public']
            if self.use_ssl:
                format_string = public_uris['ssl']
                ssl_spec = {
                    'cert': self.sslcert,
                    'key': self.sslkey,
                    'ca': self.sslrootcert
                }
            else:
                format_string = public_uris['non-ssl']
        if not self.user:
            raise AirflowException("The login parameter needs to be set in connection")
        if not self.public_ip:
            raise AirflowException("The location parameter needs to be set in connection")
        if not self.password:
            raise AirflowException("The password parameter needs to be set in connection")
        if not self.database:
            raise AirflowException("The database parameter needs to be set in connection")

        connection_uri = format_string.format(
            user=quote_plus(self.user) if self.user else '',
            password=quote_plus(self.password) if self.password else '',
            database=quote_plus(self.database) if self.database else '',
            public_ip=self.public_ip,
            public_port=self.public_port,
            proxy_port=self.sql_proxy_tcp_port,
            socket_path=self._quote(socket_path),
            ssl_spec=self._quote(json.dumps(ssl_spec)) if ssl_spec else '',
            client_cert_file=self._quote(self.sslcert) if self.sslcert else '',
            client_key_file=self._quote(self.sslkey) if self.sslcert else '',
            server_ca_file=self._quote(self.sslrootcert if self.sslcert else '')
        )
        self.log.info("DB connection URI %s", connection_uri.replace(
            quote_plus(self.password) if self.password else 'PASSWORD', 'XXXXXXXXXXXX'))
        return connection_uri
Esempio n. 12
0
def to_query(object, key=None):
    """ Dumps a dictionary into a nested query string."""
    object_type = type(object)
    if object_type is dict:
        return '&'.join([to_query(object[k], '%s[%s]' % (key, k) if key else k) for k in sorted(object)])
    elif object_type in (list, tuple):
        return '&'.join([to_query(o, '%s[]' % key) for o in object])
    else:
        return '%s=%s' % (quote_plus(str(key)), quote_plus(str(object)))
Esempio n. 13
0
 def get_display_url(self, data, trans):
     dataset_hash, user_hash = encode_dataset_user(trans, data, None)
     return url_for(controller='dataset',
                    action="display_application",
                    dataset_id=dataset_hash,
                    user_id=user_hash,
                    app_name=quote_plus(self.display_application.id),
                    link_name=quote_plus(self.id),
                    app_action=None)
Esempio n. 14
0
def get_repo_blob_data_cached(repo, full_name, commit_sha):
    # type: (Repo_ish, Text, bytes) -> bytes
    """
    :arg commit_sha: A byte string containing the commit hash
    """

    if isinstance(commit_sha, six.binary_type):
        from six.moves.urllib.parse import quote_plus
        cache_key = "%s%R%1".join((
            CACHE_KEY_ROOT,
            quote_plus(repo.controldir()),
            quote_plus(full_name),
            commit_sha.decode(),
            ".".join(str(s) for s in sys.version_info[:2]),
            ))  # type: Optional[Text]
    else:
        cache_key = None

    try:
        import django.core.cache as cache
    except ImproperlyConfigured:
        cache_key = None

    result = None  # type: Optional[bytes]
    if cache_key is None:
        result = get_repo_blob(repo, full_name, commit_sha,
                allow_tree=False).data
        assert isinstance(result, six.binary_type)
        return result

    # Byte string is wrapped in a tuple to force pickling because memcache's
    # python wrapper appears to auto-decode/encode string values, thus trying
    # to decode our byte strings. Grr.

    def_cache = cache.caches["default"]

    # Memcache is apparently limited to 250 characters.
    if len(cache_key) < 240:
        cached_result = def_cache.get(cache_key)

        if cached_result is not None:
            (result,) = cached_result
            assert isinstance(result, six.binary_type), cache_key
            return result

    result = get_repo_blob(repo, full_name, commit_sha,
            allow_tree=False).data
    assert result is not None

    if len(result) <= getattr(settings, "RELATE_CACHE_MAX_BYTES", 0):
        def_cache.add(cache_key, (result,), None)

    assert isinstance(result, six.binary_type)

    return result
Esempio n. 15
0
def get_repo_keys(memcached_inst):
    for repo in (memcached_inst.get('repos') or []):
        uri = repo['uri']
        branches = set(['master'])
        for release in repo.get('releases'):
            if 'branch' in release:
                branches.add(release['branch'])

        for branch in branches:
            yield 'vcs:' + str(parse.quote_plus(uri) + ':' + branch)
            yield 'rcs:' + str(parse.quote_plus(uri) + ':' + branch)
def test_oauth2_get_authorize_url_native():
    """
    Starts an auth flow with a NativeAppFlowManager, gets the authorize url
    validates expected results with both default and specified parameters.
    """
    ac = globus_sdk.AuthClient(client_id=CLIENT_ID)

    # default parameters for starting auth flow
    flow_manager = globus_sdk.auth.GlobusNativeAppFlowManager(ac)
    ac.current_oauth2_flow_manager = flow_manager

    # get url_and validate results
    url_res = ac.oauth2_get_authorize_url()
    expected_vals = [
        ac.base_url + "v2/oauth2/authorize?",
        "client_id=" + ac.client_id,
        "redirect_uri=" + quote_plus(ac.base_url + "v2/web/auth-code"),
        "scope=" + quote_plus(" ".join(DEFAULT_REQUESTED_SCOPES)),
        "state=" + "_default",
        "response_type=" + "code",
        "code_challenge=" + quote_plus(flow_manager.challenge),
        "code_challenge_method=" + "S256",
        "access_type=" + "online",
    ]
    for val in expected_vals:
        assert val in url_res

    # starting flow with specified paramaters
    flow_manager = globus_sdk.auth.GlobusNativeAppFlowManager(
        ac,
        requested_scopes="scopes",
        redirect_uri="uri",
        state="state",
        verifier=("a" * 43),
        refresh_tokens=True,
    )
    ac.current_oauth2_flow_manager = flow_manager

    # get url_and validate results
    url_res = ac.oauth2_get_authorize_url()
    verifier, remade_challenge = make_native_app_challenge("a" * 43)
    expected_vals = [
        ac.base_url + "v2/oauth2/authorize?",
        "client_id=" + ac.client_id,
        "redirect_uri=" + "uri",
        "scope=" + "scopes",
        "state=" + "state",
        "response_type=" + "code",
        "code_challenge=" + quote_plus(remade_challenge),
        "code_challenge_method=" + "S256",
        "access_type=" + "offline",
    ]
    for val in expected_vals:
        assert val in url_res
Esempio n. 17
0
def process_repo(repo, runtime_storage_inst, record_processor_inst):
    uri = repo['uri']
    LOG.debug('Processing repo uri %s' % uri)

    bp_iterator = lp.log(repo)
    bp_iterator_typed = _record_typer(bp_iterator, 'bp')
    processed_bp_iterator = record_processor_inst.process(
        bp_iterator_typed)
    runtime_storage_inst.set_records(processed_bp_iterator,
                                     utils.merge_records)

    vcs_inst = vcs.get_vcs(repo, cfg.CONF.sources_root)
    vcs_inst.fetch()

    rcs_inst = rcs.get_rcs(repo, cfg.CONF.review_uri)
    rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename,
                   username=cfg.CONF.ssh_username)

    branches = set(['master'])
    for release in repo.get('releases'):
        if 'branch' in release:
            branches.add(release['branch'])

    for branch in branches:
        LOG.debug('Processing repo %s, branch %s', uri, branch)

        vcs_key = 'vcs:' + str(parse.quote_plus(uri) + ':' + branch)
        last_id = runtime_storage_inst.get_by_key(vcs_key)

        commit_iterator = vcs_inst.log(branch, last_id)
        commit_iterator_typed = _record_typer(commit_iterator, 'commit')
        processed_commit_iterator = record_processor_inst.process(
            commit_iterator_typed)
        runtime_storage_inst.set_records(
            processed_commit_iterator, _merge_commits)

        last_id = vcs_inst.get_last_id(branch)
        runtime_storage_inst.set_by_key(vcs_key, last_id)

        LOG.debug('Processing reviews for repo %s, branch %s', uri, branch)

        rcs_key = 'rcs:' + str(parse.quote_plus(uri) + ':' + branch)
        last_id = runtime_storage_inst.get_by_key(rcs_key)

        review_iterator = rcs_inst.log(branch, last_id)
        review_iterator_typed = _record_typer(review_iterator, 'review')
        processed_review_iterator = record_processor_inst.process(
            review_iterator_typed)
        runtime_storage_inst.set_records(processed_review_iterator,
                                         utils.merge_records)

        last_id = rcs_inst.get_last_id(branch)
        runtime_storage_inst.set_by_key(rcs_key, last_id)
Esempio n. 18
0
def _get_tomahawk_url(metadata):
    """Generates URL for iframe with Tomahawk embedded player.

    See http://toma.hk/tools/embeds.php for more info.
    """
    if not ('artist' in metadata and 'title' in metadata):
        return None
    else:
        return "http://toma.hk/embed.php?artist={artist}&title={title}".format(
            artist=quote_plus(metadata['artist'].encode("UTF-8")),
            title=quote_plus(metadata['title'].encode("UTF-8")),
        )
Esempio n. 19
0
 def _alternatives(self, account, container, obj):
     # put S3 parts in dedicated container
     suffix = ("+segments" if container and container.endswith('+segments')
               else "")
     if obj is None:
         yield account, container, obj
     elif self.stop_at_first_match:
         # TODO(FVE): when supported, also pass account to con_builder()
         yield account, quote_plus(self.con_builder(obj)) + suffix, obj
     else:
         for alt_container in self.con_builder.alternatives(obj):
             yield account, quote_plus(alt_container) + suffix, obj
     raise StopIteration
Esempio n. 20
0
 def encode(self):
     delimiter = '&'
     pairs = []
     for key, value in self.params.iterallitems():
         utf8key = utf8(key, utf8(str(key)))
         utf8value = utf8(value, utf8(str(value)))
         quoted_key = quote_plus(utf8key, self.SAFE_KEY_CHARS)
         quoted_value = quote_plus(utf8value, self.SAFE_VALUE_CHARS)
         pair = '='.join([quoted_key, quoted_value])
         if value is None:  # Example: http://sprop.su/?param
             pair = quoted_key
         pairs.append(pair)
     return delimiter.join(pairs)
Esempio n. 21
0
 def url(self):
     base_url = self.trans.request.base
     if self.parameter.strip_https and base_url[: 5].lower() == 'https':
         base_url = "http%s" % base_url[5:]
     return "%s%s" % (base_url,
                      url_for(controller='dataset',
                              action="display_application",
                              dataset_id=self._dataset_hash,
                              user_id=self._user_hash,
                              app_name=quote_plus(self.parameter.link.display_application.id),
                              link_name=quote_plus(self.parameter.link.id),
                              app_action=self.action_name,
                              action_param=self._url))
def _get_youtube_query(metadata):
    """Generates a query string to search youtube for this song

    See https://developers.google.com/youtube/player_parameters#Manual_IFrame_Embeds
    for more info.
    """
    if not ('artist' in metadata and 'title' in metadata):
        return None
    else:
        return "{artist}+{title}".format(
            artist=quote_plus(metadata['artist'].encode("UTF-8")),
            title=quote_plus(metadata['title'].encode("UTF-8")),
        )
Esempio n. 23
0
    def get_messaging_history_errors_url(self, messaging_history_url):
        url_param_tuples = [
            ('startdate', (self.domain_now.date() - timedelta(days=6)).strftime('%Y-%m-%d')),
            ('enddate', self.domain_now.date().strftime('%Y-%m-%d')),
            (EventStatusFilter.slug, MessagingEvent.STATUS_ERROR),
        ]

        for event_type, description in EventTypeFilter.options:
            url_param_tuples.append((EventTypeFilter.slug, event_type))

        url_param_list = ['%s=%s' % (quote_plus(name), quote_plus(value)) for name, value in url_param_tuples]
        url_param_str = '&'.join(url_param_list)

        return '%s?%s' % (messaging_history_url, url_param_str)
    def _generate_connection_uri(self):
        if self.use_proxy:
            if self.sql_proxy_use_tcp:
                if not self.sql_proxy_tcp_port:
                    self.reserve_free_tcp_port()
            if not self.sql_proxy_unique_path:
                self.sql_proxy_unique_path = self._generate_unique_path()

        database_uris = CONNECTION_URIS[self.database_type]
        ssl_spec = None
        socket_path = None
        if self.use_proxy:
            proxy_uris = database_uris['proxy']
            if self.sql_proxy_use_tcp:
                format_string = proxy_uris['tcp']
            else:
                format_string = proxy_uris['socket']
                socket_path = \
                    "{sql_proxy_socket_path}/{instance_socket_name}".format(
                        sql_proxy_socket_path=self.sql_proxy_unique_path,
                        instance_socket_name=self._get_instance_socket_name()
                    )
        else:
            public_uris = database_uris['public']
            if self.use_ssl:
                format_string = public_uris['ssl']
                ssl_spec = {
                    'cert': self.sslcert,
                    'key': self.sslkey,
                    'ca': self.sslrootcert
                }
            else:
                format_string = public_uris['non-ssl']

        connection_uri = format_string.format(
            user=quote_plus(self.user),
            password=quote_plus(self.password),
            database=quote_plus(self.database),
            public_ip=self.public_ip,
            public_port=self.public_port,
            proxy_port=self.sql_proxy_tcp_port,
            socket_path=self._quote(socket_path),
            ssl_spec=self._quote(json.dumps(ssl_spec)) if ssl_spec else None,
            client_cert_file=self._quote(self.sslcert),
            client_key_file=self._quote(self.sslkey),
            server_ca_file=self._quote(self.sslrootcert)
        )
        self.log.info("DB connection URI {}".format(connection_uri.replace(
            quote_plus(self.password), 'XXXXXXXXXXXX')))
        return connection_uri
Esempio n. 25
0
    def instance_url(self):
        self.id = util.utf8(self.id)
        extn = quote_plus(self.id)
        if hasattr(self, "customer"):
            self.customer = util.utf8(self.customer)

            base = Customer.class_url()
            owner_extn = quote_plus(self.customer)

        else:
            raise error.InvalidRequestError(
                "Could not determine whether card_id %s is " "attached to a customer " "or a recipient." % self.id, "id"
            )

        return "%s/%s/cards/%s" % (base, owner_extn, extn)
Esempio n. 26
0
def selflink(query_dict, linkmode):
    """
    query_dict is a dict of all the cgi parameters
    linkmode is the name of the query field to include
    """
    l = ['query=' + quote_plus(linkmode)]
    for i in sorted(query_dict.keys()):
        v = query_dict[i]
        if v is None:
            continue
        if not isinstance(v, list):
            v = [v]
        for v in v:
            l.append(i + '=' + quote_plus(str(v)))
    return get_cgi_name() + "?" + ('&'.join(l))
 def _construct_console_url(self, signin_token, callback_url):
     """Construct and return string with URL to aws console"""
     # Create URL that will let users sign in to the console using the
     # sign-in token. This URL must be used within 15 minutes of when the
     # sign-in token was issued.
     request_url_template = (
         "https://signin.aws.amazon.com/federation"
         "?Action=login"
         "&Issuer={callbackurl}"
         "&Destination={destination}"
         "&SigninToken={signin_token}")
     return request_url_template.format(
         callbackurl=quote_plus(callback_url),
         destination=quote_plus("https://console.aws.amazon.com/"),
         signin_token=signin_token)
Esempio n. 28
0
def get_yaml_from_repo(repo, full_name, commit_sha, cached=True):
    # type: (Repo_ish, Text, bytes, bool) -> Any

    """Return decoded, struct-ified YAML data structure from
    the given file in *repo* at *commit_sha*.

    See :class:`relate.utils.Struct` for more on
    struct-ification.
    """

    if cached:
        try:
            import django.core.cache as cache
        except ImproperlyConfigured:
            cached = False
        else:
            from six.moves.urllib.parse import quote_plus
            cache_key = "%%%2".join(
                    (CACHE_KEY_ROOT,
                        quote_plus(repo.controldir()), quote_plus(full_name),
                        commit_sha.decode()))

            def_cache = cache.caches["default"]
            result = None
            # Memcache is apparently limited to 250 characters.
            if len(cache_key) < 240:
                result = def_cache.get(cache_key)
            if result is not None:
                return result

    yaml_bytestream = get_repo_blob(
            repo, full_name, commit_sha, allow_tree=False).data
    yaml_text = yaml_bytestream.decode("utf-8")

    if LINE_HAS_INDENTING_TABS_RE.search(yaml_text):
        raise ValueError("File uses tabs in indentation. "
                "This is not allowed.")

    expanded = expand_yaml_macros(
            repo, commit_sha, yaml_bytestream)

    yaml_data = load_yaml(expanded)  # type:ignore
    result = dict_to_struct(yaml_data)

    if cached:
        def_cache.add(cache_key, result, None)

    return result
Esempio n. 29
0
 def sn(self, query_str):
     """
     计算 sn
     """
     encoded_str = quote(query_str, safe=self.safe_chars)
     raw_str = encoded_str + self.__sk
     return hashlib.md5(quote_plus(raw_str).encode('utf-8')).hexdigest()
Esempio n. 30
0
 def get(self, request):
     """Render the certificates support view. """
     context = {
         "user_filter": unquote(quote_plus(request.GET.get("user", ""))),
         "course_filter": request.GET.get("course_id", "")
     }
     return render_to_response("support/certificates.html", context)
Esempio n. 31
0
def test_get_correct_authorization_url(redirect_url):
    # pylint:disable=redefined-outer-name
    fake_client_id = 'fake_client_id'
    fake_client_secret = 'fake_client_secret'
    oauth2 = OAuth2(
        client_id=fake_client_id,
        client_secret=fake_client_secret,
    )
    auth_url, csrf_token = oauth2.get_authorization_url(
        redirect_url=redirect_url)
    expected_auth_url_format = '{0}?state={1}&response_type=code&client_id={2}'
    if redirect_url:
        expected_auth_url_format += '&redirect_uri={3}'
    assert auth_url == expected_auth_url_format.format(
        API.OAUTH2_AUTHORIZE_URL,
        csrf_token,
        fake_client_id,
        urlparse.quote_plus((redirect_url or '').encode('utf-8')),
    )
    assert re.match('^box_csrf_token_[A-Za-z0-9]{16}$', csrf_token)
Esempio n. 32
0
 def _create_dir_marker(self, env, account, container, obj):
     """
     Create an empty object to mark a subdirectory. This is required to
     quickly recurse on subdirectories, since with this middleware they
     are stored on separate containers.
     """
     path = quote_plus(
         self.DELIMITER.join(('', 'v1', account, container, obj)))
     req = make_subrequest(env,
                           method='PUT',
                           path=path,
                           body='',
                           swift_source=self.SWIFT_SOURCE)
     req.headers['If-None-Match'] = '*'
     req.headers['Content-Length'] = '0'
     resp = req.get_response(self.app)
     if not resp.is_success:
         LOG.warn('%s: Failed to create directory placeholder in %s: %s',
                  self.SWIFT_SOURCE, container, resp.status)
     close_if_possible(resp.app_iter)
Esempio n. 33
0
    def get_global_account_limit(self, account, rse_expression):
        """
        List the account limit for the specific RSE expression.

        :param account:        The account name.
        :param rse_expression: The rse expression.
        """

        path = '/'.join([
            self.ACCOUNTS_BASEURL, account, 'limits', 'global',
            quote_plus(rse_expression)
        ])
        url = build_url(choice(self.list_hosts), path=path)
        res = self._send_request(url, type_='GET')
        if res.status_code == codes.ok:
            return next(self._load_json_data(res))
        exc_cls, exc_msg = self._get_exception(headers=res.headers,
                                               status_code=res.status_code,
                                               data=res.content)
        raise exc_cls(exc_msg)
Esempio n. 34
0
    def add_value(self, key, value):
        """
        Sends the request to add a value to a key.

        :param key: the name for key.
        :param value: the value.

        :return: True if value was created successfully.
        :raises Duplicate: if valid already exists.
        """

        path = '/'.join([self.META_BASEURL, quote_plus(key)]) + '/'
        data = dumps({'value': value})
        url = build_url(choice(self.list_hosts), path=path)
        r = self._send_request(url, type='POST', data=data)
        if r.status_code == codes.created:
            return True
        else:
            exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
            raise exc_cls(exc_msg)
Esempio n. 35
0
def expand(text, dictlist=[], valid=None, format=''):
    result = StringIO()
    textlist = re.split(var_pattern, text)
    for x in textlist:
        if x.startswith('%'):
            x = x[1:].split('/')
            name = x[0]
            if valid:
                if name not in valid:
                    result.write("(%s-NOT-VALID)" % name)
                    continue
                else:
                    this_format = valid[name]
            if len(x) > 1:
                this_format = x[1]
            else:
                this_format = format
            if name == '':
                result.write('%')
            else:
                val = 'UNDEF'
                for y in dictlist:
                    if name in y:
                        val = y[name]
                        break
                if this_format == '' or this_format == 'text':
                    result.write(str(val))
                elif this_format == 'cgi':
                    if isinstance(val, basestring):
                        val = quote_plus(val)
                    else:
                        val = urlencode(val)
                    result.write(val)
                elif this_format == 'html':
                    val = cgi.escape(str(val), quote=True)
                    result.write(val)
                else:
                    result.write(str(val))
        else:
            result.write(x)
    return result.getvalue()
Esempio n. 36
0
    def _check_aliveness(self, base_url, vhosts, custom_tags):
        """
        Check the aliveness API against all or a subset of vhosts. The API
        will return {"status": "ok"} and a 200 response code in the case
        that the check passes.
        """

        for vhost in vhosts:
            tags = ['vhost:{}'.format(vhost)] + custom_tags
            # We need to urlencode the vhost because it can be '/'.
            path = u'aliveness-test/{}'.format(quote_plus(vhost))
            aliveness_url = urljoin(base_url, path)
            aliveness_response = self._get_data(aliveness_url)
            message = u"Response from aliveness API: {}".format(aliveness_response)

            if aliveness_response.get('status') == 'ok':
                status = AgentCheck.OK
            else:
                status = AgentCheck.CRITICAL

            self.service_check('rabbitmq.aliveness', status, tags, message=message)
Esempio n. 37
0
    def delete_global_account_limit(self, account, rse_expression):
        """
        Sends the request to remove a global account limit.

        :param account:        The name of the account.
        :param rse_expression: The rse expression.

        :return: True if quota was removed successfully. False otherwise.
        :raises AccountNotFound: if account doesn't exist.
        """

        path = '/'.join([self.ACCOUNTLIMIT_BASEURL, 'global', account, quote_plus(rse_expression)])
        url = build_url(choice(self.list_hosts), path=path)

        r = self._send_request(url, type='DEL')

        if r.status_code == codes.ok:
            return True
        else:
            exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
            raise exc_cls(exc_msg)
Esempio n. 38
0
    def set_global_account_limit(self, account, rse_expression, bytes):
        """
        Sends the request to set a global account limit for an account.

        :param account:        The name of the account.
        :param rse_expression: The rse expression.
        :param bytes:          An integer with the limit in bytes.
        :return:               True if quota was created successfully else False.
        """

        data = dumps({'bytes': bytes})
        path = '/'.join([self.ACCOUNTLIMIT_BASEURL, 'global', account, quote_plus(rse_expression)])
        url = build_url(choice(self.list_hosts), path=path)

        r = self._send_request(url, type='POST', data=data)

        if r.status_code == codes.created:
            return True
        else:
            exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
            raise exc_cls(exc_msg)
Esempio n. 39
0
def calculate_new_path(dirname, base, ext, args):
    stuff = [
        base,
    ]
    for key, val in args.items():
        if key == "fm":
            continue
        if val is not None:
            if isinstance(val, six.string_types):
                # escape special characters in URLs for overlay / mask arguments
                val = parse.quote_plus(val)
            stuff.append("{}{}".format(key, val))

    fmt = args.get('fm')
    if fmt:
        ext = FORMAT_MAP[fmt]['extension']

    filename_with_args = "_".join(str(x) for x in stuff) + "." + ext
    # if we enable compression we may want to modify the filename here to include *.gz
    param_name = os.path.join(CACHE_DIR, dirname, filename_with_args)
    return param_name
Esempio n. 40
0
def unzip(path, dest, folder=None):
    ''' Unzip file. zipfile module seems to fail on android with badziperror.
    '''
    path = quote_plus(path)
    root = "zip://" + path + '/'

    if folder:

        xbmcvfs.mkdir(os.path.join(dest, folder))
        dest = os.path.join(dest, folder)
        root = get_zip_directory(root, folder)

    dirs, files = xbmcvfs.listdir(root)

    if dirs:
        unzip_recursive(root, dirs, dest)

    for file in files:
        unzip_file(os.path.join(root, file), os.path.join(dest, file))

    LOG.info("Unzipped %s", path)
Esempio n. 41
0
 def set_peek(self, dataset, is_multi_byte=False):
     if not dataset.dataset.purged:
         if hasattr(dataset, 'history_id'):
             params = {
                 "bundle": "display?id=%s&tofile=yes&toext=.zip" % dataset.id,
                 "buttonlabel": "Launch GMAJ",
                 "nobutton": "false",
                 "urlpause": "100",
                 "debug": "false",
                 "posturl": "history_add_to?%s" % "&".join("%s=%s" % (x[0], quote_plus(str(x[1]))) for x in [('copy_access_from', dataset.id), ('history_id', dataset.history_id), ('ext', 'maf'), ('name', 'GMAJ Output on data %s' % dataset.hid), ('info', 'Added by GMAJ'), ('dbkey', dataset.dbkey)])
             }
             class_name = "edu.psu.bx.gmaj.MajApplet.class"
             archive = "/static/gmaj/gmaj.jar"
             dataset.peek = create_applet_tag_peek(class_name, archive, params)
             dataset.blurb = 'GMAJ Multiple Alignment Viewer'
         else:
             dataset.peek = "After you add this item to your history, you will be able to launch the GMAJ applet."
             dataset.blurb = 'GMAJ Multiple Alignment Viewer'
     else:
         dataset.peek = 'file does not exist'
         dataset.blurb = 'file purged from disk'
Esempio n. 42
0
    def list_dids_extended(self, scope, filters, did_type='collection', long=False, recursive=False):
        """
        List all data identifiers in a scope which match a given pattern.

        :param scope: The scope name.
        :param filters: A nested dictionary of key/value pairs like [{'key1': 'value1', 'key2.lte': 'value2'}, {'key3.gte, 'value3'}].
                        Keypairs in the same dictionary are AND'ed together, dictionaries are OR'ed together. Keys should be suffixed
                        like <key>.<operation>, e.g. key1 >= value1 is equivalent to {'key1.gte': value}, where <operation> belongs to one
                        of the set {'lte', 'gte', 'gt', 'lt', 'ne' or ''}. Equivalence doesn't require an operator.
        :param did_type: The type of the did: 'all'(container, dataset or file)|'collection'(dataset or container)|'dataset'|'container'|'file'
        :param long: Long format option to display more information for each DID.
        :param recursive: Recursively list DIDs content.
        """
        path = '/'.join([self.DIDS_BASEURL, quote_plus(scope), 'dids', 'search_extended'])

        # stringify dates.
        if isinstance(filters, dict):   # backwards compatability for filters as single {}
            filters = [filters]
        for or_group in filters:
            for key, value in or_group.items():
                if isinstance(value, datetime):
                    or_group[key] = date_to_str(value)

        payload = {
            'type': did_type,
            'filters': filters,
            'long': long,
            'recursive': recursive
        }

        url = build_url(choice(self.list_hosts), path=path, params=payload)

        r = self._send_request(url, type_='GET')

        if r.status_code == codes.ok:
            dids = self._load_json_data(r)
            return dids
        else:
            exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
            raise exc_cls(exc_msg)
Esempio n. 43
0
    def _create_dir_marker(self, env, account, container, obj):
        """
        Create an empty object to mark a subdirectory. This is required to
        quickly recurse on subdirectories, since with this middleware they
        are stored on separate containers.
        """

        items = container.split(self.ENCODED_DELIMITER)

        while items:
            path = quote_plus(
                self.DELIMITER.join(('', 'v1', account, container, obj)))
            req = make_subrequest(env,
                                  method='PUT',
                                  path=path,
                                  body='',
                                  swift_source=self.SWIFT_SOURCE)
            req.headers['If-None-Match'] = '*'
            req.headers['Content-Length'] = '0'
            LOG.debug("%s: Create placeholder %s in %s", self.SWIFT_SOURCE,
                      obj, container)
            resp = req.get_response(self.app)
            if resp.status_int == HTTP_PRECONDITION_FAILED:
                LOG.debug('%s: directory placeholder already present '
                          'in %s', self.SWIFT_SOURCE, container)
                close_if_possible(resp.app_iter)
                break

            if not resp.is_success:
                LOG.warn(
                    '%s: Failed to create directory placeholder '
                    'in %s: %s', self.SWIFT_SOURCE, container, resp.status)
            close_if_possible(resp.app_iter)

            if not self.recursive_placeholders:
                break

            if items:
                obj = items.pop() + self.DELIMITER
                container = self.ENCODED_DELIMITER.join(items)
Esempio n. 44
0
 def _recursive_listing(self, env, account, ct_parts, header_cb):
     """
     For each subdirectory marker encountered, make a listing subrequest,
     and yield object list.
     """
     sub_path = quote_plus(
         self.DELIMITER.join(
             ('', 'v1', account, self.ENCODED_DELIMITER.join(ct_parts))))
     LOG.debug("%s: Recursively listing '%s'", self.SWIFT_SOURCE, sub_path)
     sub_req = make_subrequest(env.copy(),
                               method='GET',
                               path=sub_path,
                               body='',
                               swift_source=self.SWIFT_SOURCE)
     params = sub_req.params
     params['delimiter'] = self.DELIMITER
     params['limit'] = '10000'
     params['prefix'] = ''
     sub_req.params = params
     resp = sub_req.get_response(self.app)
     obj_prefix = ''
     if len(ct_parts) > 1:
         obj_prefix = self.DELIMITER.join(ct_parts[1:] + ('', ))
     if not resp.is_success or resp.content_length == 0:
         LOG.warn("Failed to recursively list '%s': %s", obj_prefix,
                  resp.status)
         return
     with closing_if_possible(resp.app_iter):
         items = json.loads(resp.body)
     header_cb(resp.headers)
     subdirs = [x['subdir'][:-1] for x in items if 'subdir' in x]
     for obj in items:
         if 'name' in obj:
             obj['name'] = obj_prefix + obj['name']
             yield obj
     for subdir in subdirs:
         for obj in self._recursive_listing(env, account,
                                            ct_parts + (subdir, ),
                                            header_cb):
             yield obj
Esempio n. 45
0
def make_franken_uri(path, qs):
    """Given two bytestrings, return a bytestring.

    We want to pass ASCII to Request. However, our friendly neighborhood WSGI
    servers do friendly neighborhood things with the Request-URI to compute
    PATH_INFO and QUERY_STRING. In addition, our friendly neighborhood browser
    sends "raw, unescaped UTF-8 bytes in the query during an HTTP request"
    (http://web.lookout.net/2012/03/unicode-normalization-in-urls.html).

    Our strategy is to try decoding to ASCII, and if that fails (we don't have
    ASCII) then we'll quote the value before passing to Request. What encoding
    are those bytes? Good question. The above blog post claims that experiment
    reveals all browsers to send UTF-8, so let's go with that? BUT WHAT ABOUT
    MAXTHON?!?!?!.

    """
    if path:
        try:
            path.decode('ASCII')  # NB: We throw away this unicode!
        except UnicodeDecodeError:

            # XXX How would we get non-ASCII here? The lookout.net post
            # indicates that all browsers send ASCII for the path.

            # Some servers (gevent) clobber %2F inside of paths, such
            # that we see /foo%2Fbar/ as /foo/bar/. The %2F is lost to us.
            parts = [urlparse.quote(x) for x in quoted_slash_re.split(path)]
            path = b"%2F".join(parts)

    if qs:
        try:
            qs.decode('ASCII')  # NB: We throw away this unicode!
        except UnicodeDecodeError:
            # Cross our fingers and hope we have UTF-8 bytes from MSIE. Let's
            # perform the percent-encoding that we would expect MSIE to have
            # done for us.
            qs = urlparse.quote_plus(qs)
        qs = b'?' + qs

    return path + qs
Esempio n. 46
0
    def scrape(self):
        page = self.lxmlize(COUNCIL_PAGE)

        members = page.xpath('//table[@id="MLAs"]//tr')[1:]
        assert len(members), 'No members found'
        for member in members:
            if 'Vacant' not in member.xpath('./td')[0].text_content():
                name = member.xpath('./td')[0].text_content().split('. ', 1)[1]
                party = member.xpath('./td')[1].text
                district = member.xpath('./td')[2].text_content()
                url = member.xpath('./td[1]/a/@href')[0]
                page = self.lxmlize(url)

                p = Person(primary_org='legislature', name=name, district=district, role='MLA', party=party)
                p.add_source(COUNCIL_PAGE)
                p.add_source(url)
                p.image = 'http://dbnwgw85c64xt.cloudfront.net/{}/185/259'.format(quote_plus(page.xpath('//div[contains(@class, "mla-image-cell")]/img/@src')[0]))
                self.get(p.image)  # trigger cache

                contact = page.xpath('//div[@id="mla-contact"]/div[2]')[0]
                website = contact.xpath('./div[3]/div[3]/div[2]/a')
                if website:
                    p.add_link(website[0].text_content())

                p.add_contact('address', ' '.join(contact.xpath('.//div[@class="col-md-4"][2]/div//text()')[1:9]), 'constituency')

                phone_leg = contact.xpath('.//span[@id="MainContent_ContentBottom_Property6"]//text()')
                if phone_leg:
                    p.add_contact('voice', phone_leg[0], 'legislature', area_code=306)

                phone_const = contact.xpath('.//div[@class="col-md-4"]/div[4]/span/span/text()')
                if phone_const:
                    p.add_contact('voice', phone_const[0], 'constituency', area_code=306)

                email = self.get_email(contact, error=False)
                if email:
                    p.add_contact('email', email)

                yield p
Esempio n. 47
0
    def contains_content_items(self, request, pk, course_run_ids, program_uuids):
        """
        Return whether or not the specified content is available to the EnterpriseCustomer.

        Multiple course_run_ids and/or program_uuids query parameters can be sent to this view to check
        for their existence in the EnterpriseCustomerCatalogs associated with this EnterpriseCustomer.
        At least one course run key or program UUID value must be included in the request.
        """
        enterprise_customer = self.get_object()

        # Maintain plus characters in course key.
        course_run_ids = [unquote(quote_plus(course_run_id)) for course_run_id in course_run_ids]

        contains_content_items = False
        for catalog in enterprise_customer.enterprise_customer_catalogs.all():
            contains_course_runs = not course_run_ids or catalog.contains_courses(course_run_ids)
            contains_program_uuids = not program_uuids or catalog.contains_programs(program_uuids)
            if contains_course_runs and contains_program_uuids:
                contains_content_items = True
                break

        return Response({'contains_content_items': contains_content_items})
Esempio n. 48
0
    def list_dids_extended(self,
                           scope,
                           filters,
                           did_type='collection',
                           long=False,
                           recursive=False):
        """
        List all data identifiers in a scope which match a given pattern. Extended version that goes through plugin mechanism.

        :param scope: The scope name.
        :param filters: A dictionary of key/value pairs like {'type': 'dataset', 'scope': 'test'}.
        :param did_type: The type of the did: 'all'(container, dataset or file)|'collection'(dataset or container)|'dataset'|'container'|'file'
        :param long: Long format option to display more information for each DID.
        :param recursive: Recursively list DIDs content.
        """
        path = '/'.join(
            [self.DIDS_BASEURL,
             quote_plus(scope), 'dids', 'search_extended'])
        payload = {}

        for k, v in list(filters.items()):
            if k in ('created_before', 'created_after'):
                payload[k] = date_to_str(v)
            else:
                payload[k] = v
        payload['long'] = long
        payload['type'] = did_type
        payload['recursive'] = recursive

        url = build_url(choice(self.list_hosts), path=path, params=payload)
        r = self._send_request(url, type_='GET')
        if r.status_code == codes.ok:
            dids = self._load_json_data(r)
            return dids
        else:
            exc_cls, exc_msg = self._get_exception(headers=r.headers,
                                                   status_code=r.status_code,
                                                   data=r.content)
            raise exc_cls(exc_msg)
Esempio n. 49
0
    def call_post_bootstrap(self, config):
        """
        runs a script after initdb or custom bootstrap script is called and waits until completion.
        """
        cmd = config.get('post_bootstrap') or config.get('post_init')
        if cmd:
            r = self._postgresql.config.local_connect_kwargs

            if 'host' in r:
                # '/tmp' => '%2Ftmp' for unix socket path
                host = quote_plus(r['host']) if r['host'].startswith('/') else r['host']
            else:
                host = ''

                # https://www.postgresql.org/docs/current/static/libpq-pgpass.html
                # A host name of localhost matches both TCP (host name localhost) and Unix domain socket
                # (pghost empty or the default socket directory) connections coming from the local machine.
                r['host'] = 'localhost'  # set it to localhost to write into pgpass

            if 'user' in r:
                user = r['user'] + '@'
            else:
                user = ''
                if 'password' in r:
                    import getpass
                    r.setdefault('user', os.environ.get('PGUSER', getpass.getuser()))

            connstring = 'postgres://{0}{1}:{2}/{3}'.format(user, host, r['port'], r['database'])
            env = self._postgresql.write_pgpass(r) if 'password' in r else None

            try:
                ret = self._postgresql.cancellable.call(shlex.split(cmd) + [connstring], env=env)
            except OSError:
                logger.error('post_init script %s failed', cmd)
                return False
            if ret != 0:
                logger.error('post_init script %s returned non-zero code %d', cmd, ret)
                return False
        return True
Esempio n. 50
0
    def __init__(self, args):
        self.args = args

        self.args.ssh_public_key = os.path.expanduser(
            self.args.ssh_public_key
        )

        if not self.args.gitlab_repo:
            self.args.gitlab_repo = self.args.github_repo
        (self.args.gitlab_namespace,
         self.args.gitlab_name) = self.args.gitlab_repo.split('/')
        self.args.gitlab_repo = parse.quote_plus(self.args.gitlab_repo)

        self.github = {
            'url': "https://api.github.com",
            'git': "https://github.com",
            'repo': self.args.github_repo,
            'token': self.args.github_token,
        }
        if self.args.branches:
            self.github['branches'] = self.args.branches.split(',')
        self.gitlab = {
            'git': self.args.gitlab_url.replace('http://', 'git@'),
            'host': self.args.gitlab_url,
            'name': self.args.gitlab_name,
            'namespace': self.args.gitlab_namespace,
            'url': self.args.gitlab_url + "/api/v4",
            'repo': self.args.gitlab_repo,
            'token': self.args.gitlab_token,
        }

        if self.args.verbose:
            level = logging.DEBUG
        else:
            level = logging.INFO

        logging.getLogger('github2gitlab').setLevel(level)

        self.tmpdir = "/tmp"
    def test_setup_firewall_internal(self):
        fake_rule = self._make_fake_fw_rule()
        fake_router_info = self._make_fake_router_info()
        fake_rule_cmd = 'fake-fw-rule0'
        fake_zone_configure_rules = ['fake-config-rule0']

        mock_api = mock.Mock()
        mock_api_gen = mock.Mock(return_value=mock_api)
        mock_get_firewall_rule = mock.Mock(return_value=[fake_rule_cmd])
        mock_get_zone_cmds = mock.Mock(return_value=fake_zone_configure_rules)
        with mock.patch.object(self.fwaas_driver, '_get_vyatta_client',
                               mock_api_gen), \
                mock.patch.object(vyatta_fwaas.vyatta_utils, 'get_zone_cmds',
                                  mock_get_zone_cmds), \
                mock.patch.object(self.fwaas_driver, '_set_firewall_rule',
                                  mock_get_firewall_rule):
            self.fwaas_driver._setup_firewall(fake_router_info,
                                              self.fake_firewall)

            mock_api_gen.assert_called_once_with(fake_router_info.router)
            mock_get_firewall_rule.assert_called_once_with(
                self.fake_firewall_name, 1, fake_rule)
            mock_get_zone_cmds.assert_called_once_with(mock_api,
                                                       fake_router_info,
                                                       self.fake_firewall_name)

            cmds = [
                vyatta_client.SetCmd(
                    vyatta_fwaas.FW_NAME.format(self.fake_firewall_name)),
                vyatta_client.SetCmd(
                    vyatta_fwaas.FW_DESCRIPTION.format(
                        self.fake_firewall_name,
                        parse.quote_plus(self.fake_firewall['description']))),
                vyatta_client.SetCmd(vyatta_fwaas.FW_ESTABLISHED_ACCEPT),
                vyatta_client.SetCmd(vyatta_fwaas.FW_RELATED_ACCEPT),
                fake_rule_cmd,
            ] + fake_zone_configure_rules
            mock_api.exec_cmd_batch.assert_called_once_with(cmds)
Esempio n. 52
0
    def get_connections_stat(self, instance, base_url, object_type, vhosts, limit_vhosts, custom_tags):
        """
        Collect metrics on currently open connection per vhost.
        """
        grab_all_data = True

        if self._limit_vhosts(instance):
            grab_all_data = False
            data = []
            for vhost in vhosts:
                url = "vhosts/{}/{}".format(quote_plus(vhost), object_type)
                try:
                    data += self._get_data(urljoin(base_url, url))
                except Exception as e:
                    # This will happen if there is no connection data to grab
                    self.log.debug("Couldn't grab connection data from vhost, %s: %s", vhost, e)

        # sometimes it seems to need to fall back to this
        if grab_all_data or not len(data):
            data = self._get_data(urljoin(base_url, object_type))

        stats = {vhost: 0 for vhost in vhosts}
        connection_states = defaultdict(int)
        for conn in data:
            if conn['vhost'] in vhosts:
                stats[conn['vhost']] += 1
                # 'state' does not exist for direct type connections.
                connection_states[conn.get('state', 'direct')] += 1

        for vhost, nb_conn in iteritems(stats):
            self.gauge('rabbitmq.connections', nb_conn, tags=['{}_vhost:{}'.format(TAG_PREFIX, vhost)] + custom_tags)

        for conn_state, nb_conn in iteritems(connection_states):
            self.gauge(
                'rabbitmq.connections.state',
                nb_conn,
                tags=['{}_conn_state:{}'.format(TAG_PREFIX, conn_state)] + custom_tags,
            )
    def test_oauth2_start_flow_specified(self):
        """
        Starts a GlobusNativeAppFlowManager with specified parameters,
        Confirms flow is initialized as expected, and can be used.
        """
        # starting with no flow
        self.assertIsNone(self.nac.current_oauth2_flow_manager)

        # confirms flow initialized with specified values
        flow = self.nac.oauth2_start_flow(requested_scopes="scopes",
                                          redirect_uri="uri",
                                          state="state",
                                          verifier=("v" * 43),
                                          refresh_tokens=True)
        self.assertIsInstance(flow, GlobusNativeAppFlowManager)
        self.assertEqual(flow.redirect_uri, "uri")
        self.assertEqual(flow.requested_scopes, "scopes")
        self.assertEqual(flow.state, "state")
        self.assertTrue(flow.refresh_tokens)

        # confirm client can get url via flow
        url_res = self.nac.oauth2_get_authorize_url()
        verifier, remade_challenge = make_native_app_challenge("v" * 43)
        expected_vals = [
            self.nac.base_url + "v2/oauth2/authorize?",
            "client_id=" + self.nac.client_id, "redirect_uri=" + "uri",
            "scope=" + "scopes", "state=" + "state",
            "code_challenge=" + quote_plus(remade_challenge),
            "access_type=" + "offline"
        ]
        for val in expected_vals:
            self.assertIn(val, url_res)

        # confirm client can try exchanging code for tokens via flow
        with self.assertRaises(AuthAPIError) as apiErr:
            self.nac.oauth2_exchange_code_for_tokens("invalid_code")
        self.assertEqual(apiErr.exception.http_status, 401)
        self.assertEqual(apiErr.exception.code, "Error")
Esempio n. 54
0
    def find(self, word):
        response = get(URL_WORD.format(quote_plus(word)))
        soup = BeautifulSoup(response.content)

        # from ipdb import set_trace; set_trace()
        resultados = soup.find('div', id='resultados')

        #div = resultados.find().find_all('div')[5]
        #varpts = div.find_all('span', class_='varpt')
        #wordfrom = varpts[0].text
        #part_of_speech = varpts[2].text
        definitions = [p.text for p in resultados.find_all('p')]
        part_of_speech = u', '.join(
            [part.text for part in resultados.find_all('categoria_ext_aao')])
        #part_of_speech = u'{0}'.format(part_of_speech)

        script = resultados.find('script').text
        wordId = re.search(r'verificaRelacionadasDef\((\d+)', script).group(1)

        synonims = self._get_synonyms(word, wordId)
        result = PriberamWord(word.decode('utf8'), part_of_speech, definitions,
                              synonims)
        return result
Esempio n. 55
0
    def contains_content_items(self, request, pk, course_run_ids, program_uuids):
        """
        Return whether or not the EnterpriseCustomerCatalog contains the specified content.

        Multiple course_run_ids and/or program_uuids query parameters can be sent to this view to check
        for their existence in the EnterpriseCustomerCatalog. At least one course run key
        or program UUID value must be included in the request.
        """
        enterprise_customer_catalog = self.get_object()

        # Maintain plus characters in course key.
        course_run_ids = [unquote(quote_plus(course_run_id)) for course_run_id in course_run_ids]

        contains_content_items = True
        if course_run_ids:
            contains_content_items = enterprise_customer_catalog.contains_courses(course_run_ids)
        if program_uuids:
            contains_content_items = (
                contains_content_items and
                enterprise_customer_catalog.contains_programs(program_uuids)
            )

        return Response({'contains_content_items': contains_content_items})
Esempio n. 56
0
    def add_scope(self, account, scope):
        """
        Sends the request to add a new scope.

        :param account: the name of the account to add the scope to.
        :param scope: the name of the new scope.
        :return: True if scope was created successfully.
        :raises Duplicate: if scope already exists.
        :raises AccountNotFound: if account doesn't exist.
        """

        path = '/'.join(
            [self.SCOPE_BASEURL, account, 'scopes',
             quote_plus(scope)])
        url = build_url(choice(self.list_hosts), path=path)
        r = self._send_request(url, type_='POST')
        if r.status_code == codes.created:
            return True
        else:
            exc_cls, exc_msg = self._get_exception(headers=r.headers,
                                                   status_code=r.status_code,
                                                   data=r.content)
            raise exc_cls(exc_msg)
Esempio n. 57
0
def dump_crypto_meta(crypto_meta):
    """
    Serialize crypto meta to a form suitable for including in a header value.

    The crypto-meta is serialized as a json object. The iv and key values are
    random bytes and as a result need to be base64 encoded before sending over
    the wire. Base64 encoding returns a bytes object in py3, to future proof
    the code, decode this data to produce a string, which is what the
    json.dumps function expects.

    :param crypto_meta: a dict containing crypto meta items
    :returns: a string serialization of a crypto meta dict
    """
    def b64_encode_meta(crypto_meta):
        return {
            name: (base64.b64encode(value).decode() if name in ('iv', 'key')
                   else b64_encode_meta(value) if isinstance(value, dict)
                   else value)
            for name, value in crypto_meta.items()}

    # use sort_keys=True to make serialized form predictable for testing
    return urlparse.quote_plus(
        json.dumps(b64_encode_meta(crypto_meta), sort_keys=True))
Esempio n. 58
0
    def scope_list(self, scope, name=None, recursive=False):
        """
        List data identifiers in a scope.

        :param scope: The scope name.
        :param name: The data identifier name.
        :param recursive: boolean, True or False.
        """

        payload = {}
        path = '/'.join([self.DIDS_BASEURL, quote_plus(scope), ''])
        if name:
            payload['name'] = name
        if recursive:
            payload['recursive'] = True
        url = build_url(choice(self.list_hosts), path=path, params=payload)

        r = self._send_request(url, type_='GET')
        if r.status_code == codes.ok:
            return self._load_json_data(r)
        else:
            exc_cls, exc_msg = self._get_exception(headers=r.headers, status_code=r.status_code, data=r.content)
            raise exc_cls(exc_msg)
Esempio n. 59
0
def build_candidates_list(election=conf['release']):
    election_path = os.path.join(CANDIDATE_PATH, election)
    if os.path.exists(election_path):
        project_list = os.listdir(election_path)
    else:
        project_list = []
    project_list.sort()
    candidates_lists = {}
    for project in project_list:
        project_prefix = os.path.join(CANDIDATE_PATH, election, project)
        file_list = filter(
            lambda x: x.endswith(".txt"),
            os.listdir(project_prefix),
        )
        candidates_list = []
        for candidate_file in file_list:
            filepath = os.path.join(project_prefix, candidate_file)
            candidates_list.append({
                'url':
                ('%s/%s/plain/%s' %
                 (CGIT_URL, ELECTION_REPO, quote_plus(filepath, safe='/'))),
                'ircname':
                candidate_file[:-4].replace('`', r'\`'),
                'email':
                get_email(filepath),
                'fullname':
                get_fullname(filepath)
            })

        candidates_list.sort(key=lambda x: x['fullname'])
        candidates_lists[project] = candidates_list

    return {
        'election': election,
        'projects': project_list,
        'candidates': candidates_lists
    }
Esempio n. 60
0
def test_mdx_metadata_freshness_period_expired():
    """Ensure that metadata is not refreshed if not expired."""

    entity_id = "http://xenosmilus.umdc.umu.se/simplesaml/saml2/idp/metadata.php"
    url = "http://mdx.example.com/entities/{}".format(
        parse.quote_plus(MetaDataMDX.sha1_entity_transform(entity_id)))

    responses.add(
        responses.GET,
        url,
        body=TEST_METADATA_STRING,
        status=200,
        content_type=SAML_METADATA_CONTENT_TYPE,
    )

    mdx = MetaDataMDX("http://mdx.example.com",
                      freshness_period="P0Y0M0DT0H2M0S")
    mdx._is_metadata_fresh = Mock(return_value=False)

    mdx.single_sign_on_service(entity_id, BINDING_HTTP_REDIRECT)
    assert entity_id in mdx.entity

    mdx.single_sign_on_service(entity_id, BINDING_HTTP_REDIRECT)
    assert len(responses.calls) == 2