Exemple #1
0
def from_docker_envvars(config):
    # linked postgres database (link name 'pg' or 'postgres')
    if 'PG_PORT' in os.environ:
        pg_url = urlparse(os.environ['PG_PORT'])

        if not pg_url.scheme == 'tcp':
            raise ValueError('Only tcp scheme supported for postgres')

        host, port = pg_url.netloc.split(':')

        uri = 'postgres://{user}:{password}@{host}:{port}/{database}'.format(
            user=os.environ.get('PG_ENV_POSTGRES_USER', 'postgres'),
            password=os.environ.get('PG_ENV_POSTGRES_PASSWORD', ''),
            host=host,
            port=port,
            database=os.environ.get('PG_ENV_POSTGRES_DB', 'postgres'))

        config['SQLALCHEMY_DATABASE_URI'] = uri

    if 'REDIS_PORT' in os.environ:
        redis_url = urlparse(os.environ['REDIS_PORT'])

        if not redis_url.scheme == 'tcp':
            raise ValueError('Only tcp scheme supported for redis')

        host, port = redis_url.netloc.split(':')

        uri = 'redis://{host}:{port}/0'.format(host=host, port=port, )

        config['REDIS_URL'] = uri
        config['REDIS_HOST'] = host
        config['REDIS_PORT'] = int(port)
Exemple #2
0
def parse_dcs(dcs):
    """
    Break up the provided dcs string
    >>> parse_dcs('localhost') == {'scheme': 'etcd', 'hostname': 'localhost', 'port': 4001}
    True
    >>> parse_dcs('localhost:8500') == {'scheme': 'consul', 'hostname': 'localhost', 'port': 8500}
    True
    >>> parse_dcs('zookeeper://localhost') == {'scheme': 'zookeeper', 'hostname': 'localhost', 'port': 2181}
    True
    """

    if not dcs:
        return {}

    parsed = urlparse(dcs)
    scheme = parsed.scheme
    if scheme == '' and parsed.netloc == '':
        parsed = urlparse('//' + dcs)

    if scheme == '':
        default_schemes = {'2181': 'zookeeper', '8500': 'consul'}
        scheme = default_schemes.get(str(parsed.port), 'etcd')

    port = parsed.port
    if port is None:
        default_ports = {'consul': 8500, 'zookeeper': 2181}
        port = default_ports.get(str(scheme), 4001)

    return {'scheme': str(scheme), 'hostname': str(parsed.hostname), 'port': int(port)}
Exemple #3
0
def url_distance(preprocessor, url1, url2):
    url1 = urlparse(url1)
    url2 = urlparse(url2)

    process_fn = lambda s: preprocessor(unquote(s))
    path1 = map(process_fn, url1.path.strip('/').split('/'))
    path2 = map(process_fn, url2.path.strip('/').split('/'))
    path_distance = levenshtein_array(path1, path2)

    query_distance = dict_distance(preprocessor,
        parse_qs(url1.query, True),
        parse_qs(url2.query, True)
    )

    domain_distance = 4 * levenshtein_array(
        (url1.hostname or '').split('.'),
        (url2.hostname or '').split('.')
    )

    return (
        domain_distance +
        path_distance +
        query_distance +
        (url1.fragment != url2.fragment)
    )
 def _connect(self, url=None, cdn=False):
     if not url:
         if cdn:
             if not self.cdn_url:
                 self.auth()
             url = self.cdn_url
         else:
             if not self.storage_url:
                 self.auth()
             url = self.storage_url
     parsed = urlparse.urlparse(url) if url else None
     http_proxy_parsed = \
         urlparse.urlparse(self.http_proxy) if self.http_proxy else None
     if not parsed and not http_proxy_parsed:
         return None, None
     netloc = (http_proxy_parsed if self.http_proxy else parsed).netloc
     if parsed.scheme == 'http':
         self.verbose('Establishing HTTP connection to %s', netloc)
         conn = self.HTTPConnection(netloc)
     elif parsed.scheme == 'https':
         self.verbose('Establishing HTTPS connection to %s', netloc)
         conn = self.HTTPSConnection(netloc)
     else:
         raise self.HTTPException(
             'Cannot handle protocol scheme %s for url %s' %
             (parsed.scheme, repr(url)))
     if self.http_proxy:
         self.verbose(
             'Setting tunnelling to %s:%s', parsed.hostname, parsed.port)
         conn._set_tunnel(parsed.hostname, parsed.port)
     return parsed, conn
Exemple #5
0
    def get_canonicalized_resource(self, req, service):
        # /bucket/keyname
        parsed_req_path = urlparse.urlparse(req.url).path
        assert service.endpoint is not None
        parsed_svc_path = urlparse.urlparse(service.endpoint).path
        # IMPORTANT:  this only supports path-style requests
        assert parsed_req_path.startswith(parsed_svc_path)
        resource = parsed_req_path[len(parsed_svc_path):]
        if parsed_svc_path.endswith('/'):
            # The leading / got stripped off
            resource = '/' + resource
        if not resource:
            # This resource does not address a bucket
            resource = '/'

        # Now append sub-resources, a.k.a. query string parameters
        if getattr(req, 'params', None):
            # A regular Request
            params = req.params
        else:
            # A PreparedRequest
            params = _get_params_from_url(req.url)
        if params:
            subresources = []
            for key, val in sorted(params.iteritems()):
                if key in self.HASHED_PARAMS:
                    if val is None:
                        subresources.append(key)
                    else:
                        print '{0}={1}'.format(key, val), key + '=' + val
                        subresources.append(key + '=' + val)
                if subresources:
                    resource += '?' + '&'.join(subresources)
        self.log.debug('canonicalized resource: %s', repr(resource))
        return resource
Exemple #6
0
def assert_urls_match(url_a, url_b):
    url_a = urlparse(url_a)
    url_b = urlparse(url_b)

    assert url_a.scheme == url_b.scheme
    assert url_a.netloc == url_b.netloc
    assert url_a.path == url_b.path
    assert cgi.parse_qs(url_a.query) == cgi.parse_qs(url_b.query)
Exemple #7
0
def assert_url_equal(url, expected, compare_host=False):
    """Compare url paths and query strings."""
    parsed = urlparse(six.text_type(url))
    parsed_expected = urlparse(six.text_type(expected))
    compare_url_part(parsed.path, parsed_expected.path)
    compare_url_part(parse_qs(parsed.query), parse_qs(parsed_expected.query))
    if compare_host:
        compare_url_part(parsed.netloc, parsed_expected.netloc)
Exemple #8
0
def is_safe_url(target):
    """ Checks that the target url is safe and sending to the current
    website not some other malicious one.
    """
    ref_url = urlparse.urlparse(flask.request.host_url)
    test_url = urlparse.urlparse(
        urlparse.urljoin(flask.request.host_url, target))
    return test_url.scheme in ('http', 'https') and \
        ref_url.netloc == test_url.netloc
Exemple #9
0
    def __init__(self, raw=None, _address=None):

        if raw:
            raw = _to_parser_input(raw)
            url = url_parser.parse(raw, lexer.clone())
            self._address = urlparse(url.address)
        elif _address:
            self._address = urlparse(_address)
        else:
            raise SyntaxError('failed to create UrlAddress: bad parameters')
def _rebase_url(url, base):
    base = list(urlparse.urlparse(base))
    url = list(urlparse.urlparse(url))
    if not url[0]:  # fix up schema
        url[0] = base[0] or "http"
    if not url[1]:  # fix up hostname
        url[1] = base[1]
        if not url[2].startswith('/'):
            url[2] = re.sub(r'/[^/]+$', '/', base[2]) + url[2]
    return urlparse.urlunparse(url)
Exemple #11
0
 def do_start(self):
     start_url = self.backend.start().url
     target_url = self.auth_handlers(start_url)
     response = requests.get(start_url)
     self.assertEqual(response.url, target_url)
     self.assertEqual(response.text, 'foobar')
     self.strategy.set_request_data(parse_qs(urlparse(start_url).query),
                                    self.backend)
     self.strategy.set_request_data(parse_qs(urlparse(target_url).query),
                                    self.backend)
     return self.backend.complete()
Exemple #12
0
 def parse_url(cls, url):
     parsed = urlparse(url)
     return cls(proxy_type=parsed.scheme,
                proxy_address=parsed.hostname,
                proxy_port=parsed.port,
                proxy_login=parsed.username,
                proxy_password=parsed.password)
Exemple #13
0
 def __new__(cls, urlstring):
     if isinstance(urlstring, binary_type):
         # here we make a safe-ish assumption it is  a utf-8 string
         urlstring = urlstring.decode('utf-8')
     url = super(URL, cls).__new__(cls, urlstring)
     url.parsed = urlparse(url)
     return url
Exemple #14
0
def get_agent(reactor, connect_timeout=None):
    """Return appropriate agent based on whether an http_proxy is used or not.

    :param connect_timeout: connection timeout in seconds
    :type connect_timeout: float
    :returns: :class:`twisted.web.client.ProxyAgent` when an http_proxy
        environment variable is present, :class:`twisted.web.client.Agent`
        otherwise.
    """

    # TODO: Would be nice to have https_proxy support too.
    http_proxy = os.environ.get('http_proxy')
    if http_proxy is None:
        return _twisted_web_client().Agent(
            reactor,
            connectTimeout=connect_timeout)

    parse_result = urlparse(http_proxy)
    http_proxy_endpoint = TCP4ClientEndpoint(
        reactor,
        parse_result.hostname,
        parse_result.port or 80,
        timeout=connect_timeout)

    return _twisted_web_client().ProxyAgent(http_proxy_endpoint)
Exemple #15
0
def _get_facets(ctx, appid, facets):
    if facets:
        return list(facets)
    url = urlparse(appid)
    if appid == '%s://%s' % (url.scheme, url.netloc):
        return [appid]
    ctx.fail('At least one facet is required unless appId is an origin')
Exemple #16
0
def _discover_auth_versions(session, auth_url):
    # discover the API versions the server is supporting base on the
    # given URL
    v2_auth_url = None
    v3_auth_url = None
    try:
        ks_discover = discover.Discover(session=session, auth_url=auth_url)
        v2_auth_url = ks_discover.url_for('2.0')
        v3_auth_url = ks_discover.url_for('3.0')
    except ks_exc.ClientException:
        # Identity service may not support discover API version.
        # Lets trying to figure out the API version from the original URL.
        path = urlparse.urlparse(auth_url).path
        path = path.lower()
        if path.startswith('/v3'):
            v3_auth_url = auth_url
        elif path.startswith('/v2'):
            v2_auth_url = auth_url
        else:
            # not enough information to determine the auth version
            msg = ('Unable to determine the Keystone version '
                   'to authenticate with using the given '
                   'auth_url. Identity service may not support API '
                   'version discovery. Please provide a versioned '
                   'auth_url instead.')
            raise exc.CommandError(msg)

    return v2_auth_url, v3_auth_url
Exemple #17
0
def dereference_reference(reference, context):
    parts = urlparse.urlparse(reference)
    if any((parts.scheme, parts.netloc, parts.path, parts.params, parts.query)):
        raise ValueError(
            MESSAGES['reference']['unsupported'].format(reference),
        )
    return jsonpointer.resolve_pointer(context, parts.fragment)
Exemple #18
0
    def request(self, url, method="GET", body=None, headers={}):
        fail_state = {
            'status':'400'
        }, "{}"

        parsed = urlparse(url)
        options = parse_qs(parsed.query)

        fn_name = str(active_call)
        if fn_name == 'get_authorize_login_url':
            return {
                'status': '200',
                'content-location':'http://example.com/redirect/login'
            }, None

        if not 'access_token' in options and not 'client_id' in options:
            fn_name += '_unauthorized'
        if 'self' in url and not 'access_token' in options:
            fn_name += '_no_auth_user'

        fl = open('fixtures/%s.json' % fn_name)
        content = fl.read()
        json_content = simplejson.loads(content)
        status = json_content['meta']['code']
        fl.close()
        return {
            'status': status
        }, content
Exemple #19
0
    def test_sidebar_extensions_links(self):
        response = self.client.get(reverse('browse.search-tools'))
        assert response.status_code == 200
        doc = pq(response.content)

        links = doc('#search-tools-sidebar a')

        assert sorted([a.text.strip() for a in links]) == (
            sorted(['Bookmarks', 'Dictionaries & Encyclopedias',
                    'Most Popular', 'Recently Added']))

        search_ext_url = urlparse(reverse('browse.extensions',
                                  kwargs=dict(category='search-tools')))

        assert urlparse(links[0].attrib['href']).path == search_ext_url.path
        assert urlparse(links[1].attrib['href']).path == search_ext_url.path
Exemple #20
0
    def get_etcd_client(config):
        if 'proxy' in config:
            config['use_proxies'] = True
            config['url'] = config['proxy']

        if 'url' in config:
            r = urlparse(config['url'])
            config.update({'protocol': r.scheme, 'host': r.hostname, 'port': r.port or 2379,
                           'username': r.username, 'password': r.password})
        elif 'host' in config:
            host, port = (config['host'] + ':2379').split(':')[:2]
            config['host'] = host
            if 'port' not in config:
                config['port'] = int(port)

        if config.get('cacert'):
            config['ca_cert'] = config.pop('cacert')

        if config.get('key') and config.get('cert'):
            config['cert'] = (config['cert'], config['key'])

        for p in ('discovery_srv', 'srv_domain'):
            if p in config:
                config['srv'] = config.pop(p)
        client = None
        while not client:
            try:
                client = Client(config)
            except etcd.EtcdException:
                logger.info('waiting on etcd')
                time.sleep(5)
        return client
Exemple #21
0
    def add_uri(self, uri, **kwargs):
        """

        .. WARNING::
            Deprecated, please use add_torrent.
        """
        if uri is None:
            raise ValueError('add_uri requires a URI.')
        # there has been some problem with T's built in torrent fetcher,
        # use a python one instead
        parsed_uri = urlparse(uri)
        torrent_data = None
        if parsed_uri.scheme in ['ftp', 'ftps', 'http', 'https']:
            torrent_file = urlopen(uri)
            torrent_data = torrent_file.read()
            torrent_data = base64.b64encode(torrent_data).decode('utf-8')
        if parsed_uri.scheme in ['file']:
            filepath = uri
            # uri decoded different on linux / windows ?
            if len(parsed_uri.path) > 0:
                filepath = parsed_uri.path
            elif len(parsed_uri.netloc) > 0:
                filepath = parsed_uri.netloc
            torrent_file = open(filepath, 'rb')
            torrent_data = torrent_file.read()
            torrent_data = base64.b64encode(torrent_data).decode('utf-8')
        warnings.warn('add_uri has been deprecated, please use add_torrent instead.', DeprecationWarning)
        if torrent_data:
            return self.add(torrent_data, **kwargs)
        else:
            return self.add(None, filename=uri, **kwargs)
Exemple #22
0
 def upload(self, filePath, description=None):
     """
     This operation uploads an item to the server. Each uploaded item is
     identified by a unique itemID. Since this request uploads a file,
     it must be a multi-part request as per IETF RFC1867.
     All uploaded items are subjected to the deletion rules set on the
     upload directory by the administrator of the server. Additionally,
     the administrator can explicitly delete an item as each uploaded
     item shows up in the list of all the uploaded items in Site
     Directory.
     Users can provide arguments to the upload operation as query
     parameters. The parameter details are provided in the parameters
     listed below.
     Inputs:
        filePath - The file to be uploaded.
        description	- An optional description for the uploaded item.
     """
     params = {
         "f" : "json"}
     if description is not None:
         params['description'] = str(description)
     url = self._url + "/upload"
     files = []
     files.append(('file', filePath, os.path.basename(filePath)))
     parsed = urlparse.urlparse(url)
     return self._post_multipart(host=parsed.hostname,
                                    selector=parsed.path,
                                    files = files,
                                    fields=params,
                                    port=parsed.port,
                                    securityHandler=self._securityHandler,
                                    ssl=parsed.scheme.lower() == 'https',
                                    proxy_port=self._proxy_port,
                                    proxy_url=self._proxy_url)
Exemple #23
0
    def assert_url(self, expected_url, url):
        """Check if two URL are same

        Assertions are called inside this this method. If anything is
        different, it will fail immediately.

        :param str expected_url: expected URL compare.
        :param str url: the URL to check if it is same as the expected URL.
        """
        url = urlparse(url)
        expected_url = urlparse(expected_url)

        self.assertEqual(expected_url.scheme, url.scheme)
        self.assertEqual(expected_url.netloc, url.netloc)
        self.assertEqual(expected_url.path, url.path)
        self.assertEqual(parse_qs(expected_url.query), parse_qs(url.query))
Exemple #24
0
    def __init__(self, value):
        if not isinstance(value, six.text_type):
            raise TypeError("value must be a unicode string")

        parsed = urllib_parse.urlparse(value)
        if not parsed.hostname:
            netloc = ""
        elif parsed.port:
            netloc = (
                idna.encode(parsed.hostname) +
                ":{0}".format(parsed.port).encode("ascii")
            ).decode("ascii")
        else:
            netloc = idna.encode(parsed.hostname).decode("ascii")

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        uri = urllib_parse.urlunparse((
            parsed.scheme,
            netloc,
            parsed.path,
            parsed.params,
            parsed.query,
            parsed.fragment
        )).encode("ascii")

        self._value = value
        self._encoded = uri
Exemple #25
0
def validate_image_proxies(node):
    """Check that the provided proxy parameters are valid.

    :param node: an Ironic node.
    :raises: InvalidParameterValue if any of the provided proxy parameters are
        incorrect.
    """
    invalid_proxies = {}
    for scheme in ('http', 'https'):
        proxy_param = 'image_%s_proxy' % scheme
        proxy = node.driver_info.get(proxy_param)
        if proxy:
            chunks = urlparse.urlparse(proxy)
            # NOTE(vdrok) If no scheme specified, this is still a valid
            # proxy address. It is also possible for a proxy to have a
            # scheme different from the one specified in the image URL,
            # e.g. it is possible to use https:// proxy for downloading
            # http:// image.
            if chunks.scheme not in ('', 'http', 'https'):
                invalid_proxies[proxy_param] = proxy
    msg = ''
    if invalid_proxies:
        msg += _("Proxy URL should either have HTTP(S) scheme "
                 "or no scheme at all, the following URLs are "
                 "invalid: %s.") % invalid_proxies
    no_proxy = node.driver_info.get('image_no_proxy')
    if no_proxy is not None and not utils.is_valid_no_proxy(no_proxy):
        msg += _(
            "image_no_proxy should be a list of host names, IP addresses "
            "or domain names to exclude from proxying, the specified list "
            "%s is incorrect. To denote a domain name, prefix it with a dot "
            "(instead of e.g. '.*').") % no_proxy
    if msg:
        raise exception.InvalidParameterValue(msg)
Exemple #26
0
def depaginate(api, result):
    """Depaginate the first (or only) page of a paginated result"""
    meta = result['meta']
    if meta['next'] is None:
        # No pages means we're done
        return result

    # make a copy of meta that we'll mess with and eventually return
    # since we'll be chewing on the 'meta' object with every new GET
    # same thing for objects, since we'll just be appending to it
    # while we pull more records
    ret_meta = meta.copy()
    ret_objects = result['objects']
    while meta['next']:
        # parse out url bits for constructing the new api req
        next_url = urlparse(meta['next'])
        # ugh...need to find the word after 'api/' in the next URL to
        # get the resource endpoint name; not sure how to make this better
        next_endpoint = next_url.path.strip('/').split('/')[-1]
        next_params = {k: v[0] for k, v in parse_qs(next_url.query).items()}
        result = getattr(api, next_endpoint).get(**next_params)
        ret_objects.extend(result['objects'])
        meta = result['meta']

    # fix meta up to not tell lies
    ret_meta['total_count'] = len(ret_objects)
    ret_meta['next'] = None
    ret_meta['limit'] = ret_meta['total_count']
    return {
        'meta': ret_meta,
        'objects': ret_objects
    }
Exemple #27
0
 def post(self, queue_name, messages, client_uuid, project=None):
     """Send messages to the subscribers."""
     if self.subscription_controller:
         if not isinstance(self.subscription_controller,
                           pooling.SubscriptionController):
             marker = None
             while True:
                 subscribers = self.subscription_controller.list(
                     queue_name, project, marker=marker)
                 for sub in next(subscribers):
                     LOG.debug("Notifying subscriber %r" % (sub,))
                     s_type = urllib_parse.urlparse(
                         sub['subscriber']).scheme
                     # If the subscriber doesn't contain 'confirmed', it
                     # means that this kind of subscriber was created before
                     # the confirm feature be introduced into Zaqar. We
                     # should allow them be subscribed.
                     if (self.require_confirmation and
                             not sub.get('confirmed', True)):
                         LOG.info(_LI('The subscriber %s is not '
                                      'confirmed.'), sub['subscriber'])
                         continue
                     for msg in messages:
                         msg['Message_Type'] = MessageType.Notification.name
                     self._execute(s_type, sub, messages)
                 marker = next(subscribers)
                 if not marker:
                     break
     else:
         LOG.error(_LE('Failed to get subscription controller.'))
Exemple #28
0
def host_validator(value):
    parts = urlparse.urlparse(value)
    host = parts.netloc or parts.path
    if value != host:
        raise serializers.ValidationError(
            "Invalid host: {0}, expected {1}".format(value, host),
        )
Exemple #29
0
    def __call__(self, value):
        parsed = urlparse(value)
        if parsed.scheme not in ("http", "https"):
            raise ValidationError(message=self.message)

        if parsed.hostname in ("127.0.0.1", "localhost"):
            raise ValidationError(message=self.message)
Exemple #30
0
    def _add_fake_throttling_action(
            self, view_class, verb='post', url=None, user=None,
            remote_addr=None):
        """Trigger the throttling classes on the API view passed in argument
        just like an action happened.

        Tries to be somewhat generic, but does depend on the view not
        dynamically altering throttling classes and the throttling classes
        themselves not deviating from DRF's base implementation."""
        # Create the fake request, make sure to use an 'unsafe' method by
        # default otherwise we'd be allowed without any checks whatsoever in
        # some of our views.
        path = urlparse(url).path
        factory = APIRequestFactory()
        fake_request = getattr(factory, verb)(path)
        fake_request.user = user
        fake_request.META['REMOTE_ADDR'] = remote_addr
        for throttle_class in view_class.throttle_classes:
            throttle = throttle_class()
            # allow_request() fetches the history, triggers a success/failure
            # and if it's a success it will add the request to the history and
            # set that in the cache. If it failed, we force a success anyway
            # to make sure our number of actions target is reached artifically.
            if not throttle.allow_request(fake_request, view_class()):
                throttle.throttle_success()
Exemple #31
0
def url2host(url):
    (host, sep, port) = urlparse(url).netloc.partition(':')
    return host
Exemple #32
0
def finalize(req, *opts):
    """
Prepares the working document for publication/rendering.

:param req: The request
:param opts: Options (not used)
:return: returns the working document with @Name, @cacheDuration and @validUntil set

Set Name, ID, cacheDuration and validUntil on the toplevel EntitiesDescriptor element of the working document. Unless
explicit provided the @Name is set from the request URI if the pipeline is executed in the pyFF server. The @ID is set
to a string representing the current date/time and will be prefixed with the string provided, which defaults to '_'. The
@cacheDuration element must be a valid xsd duration (eg PT5H for 5 hrs) and @validUntil can be either an absolute
ISO 8601 time string or (more comonly) a relative time on the form

.. code-block:: none

    \+?([0-9]+d)?\s*([0-9]+h)?\s*([0-9]+m)?\s*([0-9]+s)?


For instance +45d 2m results in a time delta of 45 days and 2 minutes. The '+' sign is optional.

If operating on a single EntityDescriptor then @Name is ignored (cf :py:mod:`pyff.pipes.builtins.first`).

**Examples**

.. code-block:: yaml

    - finalize:
        cacheDuration: PT8H
        validUntil: +10d
        ID: pyff
    """
    if req.t is None:
        raise PipeException("Your plumbing is missing a select statement.")

    e = root(req.t)
    if e.tag == "{%s}EntitiesDescriptor" % NS['md']:
        name = req.args.get('name', None)
        if name is None or 0 == len(name):
            name = req.args.get('Name', None)
        if name is None or 0 == len(name):
            name = req.state.get('url', None)
            if name and 'baseURL' in req.args:

                try:
                    name_url = urlparse(name)
                    base_url = urlparse(req.args.get('baseURL'))
                    name = "{}://{}{}".format(base_url.scheme, base_url.netloc,
                                              name_url.path)
                    log.debug("-------- using Name: %s" % name)
                except ValueError as ex:
                    log.debug(ex)
                    name = None
        if name is None or 0 == len(name):
            name = e.get('Name', None)

        if name:
            e.set('Name', name)

    now = datetime.utcnow()

    mdid = req.args.get('ID', 'prefix _')
    if re.match('(\s)*prefix(\s)*', mdid):
        prefix = re.sub('^(\s)*prefix(\s)*', '', mdid)
        _id = now.strftime(prefix + "%Y%m%dT%H%M%SZ")
    else:
        _id = mdid

    if not e.get('ID'):
        e.set('ID', _id)

    valid_until = str(req.args.get('validUntil', e.get('validUntil', None)))
    if valid_until is not None and len(valid_until) > 0:
        offset = duration2timedelta(valid_until)
        if offset is not None:
            dt = now + offset
            e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
        elif valid_until is not None:
            try:
                dt = iso8601.parse_date(valid_until)
                dt = dt.replace(tzinfo=None)  # make dt "naive" (tz-unaware)
                offset = dt - now
                e.set('validUntil', dt.strftime("%Y-%m-%dT%H:%M:%SZ"))
            except ValueError as ex:
                log.error("Unable to parse validUntil: %s (%s)" %
                          (valid_until, ex))

                # set a reasonable default: 50% of the validity
        # we replace this below if we have cacheDuration set
        req.state['cache'] = int(total_seconds(offset) / 50)

    cache_duration = req.args.get('cacheDuration',
                                  e.get('cacheDuration', None))
    if cache_duration is not None and len(cache_duration) > 0:
        offset = duration2timedelta(cache_duration)
        if offset is None:
            raise PipeException("Unable to parse %s as xs:duration" %
                                cache_duration)

        e.set('cacheDuration', cache_duration)
        req.state['cache'] = int(total_seconds(offset))

    return req.t
Exemple #33
0
def test_state_token(app_rest, monkeypatch):
    """Test state token."""
    # Mock session id
    monkeypatch.setattr('invenio_oauthclient._compat._create_identifier',
                        lambda: '1234')
    monkeypatch.setattr('invenio_oauthclient.views.client._create_identifier',
                        lambda: '1234')

    with app_rest.test_client() as client:
        # Ensure remote apps have been loaded (due to before first
        # request)
        client.get(url_for('invenio_oauthclient.rest_login',
                           remote_app='test'))
        mock_response(app_rest.extensions['oauthlib.client'], 'test')

        # Good state token
        state = serializer.dumps({
            'app': 'test',
            'sid': '1234',
            'next': None,
        })
        resp = client.get(
            url_for('invenio_oauthclient.rest_authorized',
                    remote_app='test',
                    code='test',
                    state=state))
        assert resp.status_code == 200

        outdated_serializer = TimedJSONWebSignatureSerializer(
            app_rest.config['SECRET_KEY'],
            expires_in=0,
        )

        # Bad state - timeout
        state1 = outdated_serializer.dumps({
            'app': 'test',
            'sid': '1234',
            'next': None,
        })
        # Bad state - app
        state2 = serializer.dumps(
            # State for another existing app (test_invalid exists)
            {
                'app': 'test_invalid',
                'sid': '1234',
                'next': None,
            })
        # Bad state - sid
        state3 = serializer.dumps(
            # State for another existing app (test_invalid exists)
            {
                'app': 'test',
                'sid': 'bad',
                'next': None,
            })
        time.sleep(1)
        for s in [state1, state2, state3]:
            resp = client.get(
                url_for('invenio_oauthclient.rest_authorized',
                        remote_app='test',
                        code='test',
                        state=s))
            assert resp.status_code == 302
            assert parse_qs(urlparse(resp.location).query)['code'][0] == '403'
def prepare_source_host(resource):
    if resource.service is not None and resource.service.method == INDEXED:
        return urlparse(resource.service.base_url).netloc
    else:
        return None
Exemple #35
0
 def test_cors_excludes_accounts_session_endpoint(self):
     assert re.match(
         settings.CORS_URLS_REGEX,
         urlparse(reverse_ns('accounts.session')).path,
     ) is None
Exemple #36
0
    def get_etcd_client(self, config, client_cls):
        if 'proxy' in config:
            config['use_proxies'] = True
            config['url'] = config['proxy']

        if 'url' in config:
            r = urlparse(config['url'])
            config.update({
                'protocol': r.scheme,
                'host': r.hostname,
                'port': r.port or 2379,
                'username': r.username,
                'password': r.password
            })
        elif 'hosts' in config:
            hosts = config.pop('hosts')
            default_port = config.pop('port', 2379)
            protocol = config.get('protocol', 'http')

            if isinstance(hosts, six.string_types):
                hosts = hosts.split(',')

            config['hosts'] = []
            for value in hosts:
                if isinstance(value, six.string_types):
                    config['hosts'].append(
                        uri(protocol,
                            split_host_port(value.strip(), default_port)))
        elif 'host' in config:
            host, port = split_host_port(config['host'], 2379)
            config['host'] = host
            if 'port' not in config:
                config['port'] = int(port)

        if config.get('cacert'):
            config['ca_cert'] = config.pop('cacert')

        if config.get('key') and config.get('cert'):
            config['cert'] = (config['cert'], config['key'])

        for p in ('discovery_srv', 'srv_domain'):
            if p in config:
                config['srv'] = config.pop(p)

        dns_resolver = DnsCachingResolver()

        def create_connection_patched(address,
                                      timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
                                      source_address=None,
                                      socket_options=None):
            host, port = address
            if host.startswith('['):
                host = host.strip('[]')
            err = None
            for af, socktype, proto, _, sa in dns_resolver.resolve(host, port):
                sock = None
                try:
                    sock = socket.socket(af, socktype, proto)
                    self.set_socket_options(sock, socket_options)
                    if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
                        sock.settimeout(timeout)
                    if source_address:
                        sock.bind(source_address)
                    sock.connect(sa)
                    return sock

                except socket.error as e:
                    err = e
                    if sock is not None:
                        sock.close()
                        sock = None

            if err is not None:
                raise err

            raise socket.error("getaddrinfo returns an empty list")

        urllib3.util.connection.create_connection = create_connection_patched

        client = None
        while not client:
            try:
                client = client_cls(config, dns_resolver)
                if 'use_proxies' in config and not client.machines:
                    raise etcd.EtcdException
            except etcd.EtcdException:
                logger.info('waiting on etcd')
                time.sleep(5)
        return client
Exemple #37
0
    def send_confirm_notification(self,
                                  queue,
                                  subscription,
                                  conf,
                                  project=None,
                                  expires=None,
                                  api_version=None,
                                  is_unsubscribed=False):
        # NOTE(flwang): If the confirmation feature isn't enabled, just do
        # nothing. Here we're getting the require_confirmation from conf
        # object instead of using self.require_confirmation, because the
        # variable from self object really depends on the kwargs when
        # initializing the NotifierDriver object. See bug 1655812 for more
        # information.
        if not conf.notification.require_confirmation:
            return

        key = conf.signed_url.secret_key
        if not key:
            LOG.error("Can't send confirm notification due to the value of"
                      " secret_key option is None")
            return
        url = "/%s/queues/%s/subscriptions/%s/confirm" % (api_version, queue,
                                                          subscription['id'])
        pre_url = urls.create_signed_url(key, [url],
                                         project=project,
                                         expires=expires,
                                         methods=['PUT'])
        message = None
        if is_unsubscribed:
            message_type = MessageType.UnsubscribeConfirmation.name
            message = ('You have unsubscribed successfully to the queue: %s, '
                       'you can resubscribe it by using confirmed=True.' %
                       queue)
        else:
            message_type = MessageType.SubscriptionConfirmation.name
            message = 'You have chosen to subscribe to the queue: %s' % queue

        messages = {}
        endpoint_dict = auth.get_public_endpoint()
        if endpoint_dict:
            wsgi_endpoint = endpoint_dict.get('zaqar')
            if wsgi_endpoint:
                wsgi_subscribe_url = urllib_parse.urljoin(wsgi_endpoint, url)
                messages['WSGISubscribeURL'] = wsgi_subscribe_url
            websocket_endpoint = endpoint_dict.get('zaqar-websocket')
            if websocket_endpoint:
                websocket_subscribe_url = urllib_parse.urljoin(
                    websocket_endpoint, url)
                messages['WebSocketSubscribeURL'] = websocket_subscribe_url
        messages.update({
            'Message_Type': message_type,
            'Message': message,
            'URL-Signature': pre_url['signature'],
            'URL-Methods': pre_url['methods'][0],
            'URL-Paths': pre_url['paths'][0],
            'X-Project-ID': pre_url['project'],
            'URL-Expires': pre_url['expires'],
            'SubscribeBody': {
                'confirmed': True
            },
            'UnsubscribeBody': {
                'confirmed': False
            }
        })
        s_type = urllib_parse.urlparse(subscription['subscriber']).scheme
        LOG.info(
            'Begin to send %(type)s confirm/unsubscribe notification.'
            ' The request body is %(messages)s', {
                'type': s_type,
                'messages': messages
            })

        self._execute(s_type, subscription, [messages], conf)
Exemple #38
0
 def discover(self, *args, **kwargs):
     rokus = []
     for device in discovery.discover(*args, **kwargs):
         o = urlparse(device.location)
         rokus.append(Roku(o.hostname, o.port))
     return rokus
Exemple #39
0
def _is_network_resource(uri):
    return urlparse.urlparse(uri).scheme.upper() in RAML_VALID_PROTOCOLS
    def handle_sk(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        challenge_url = sess.url.split("?")[0]
        challenges_txt = response_page.find('input', {
            'name': "id-challenge"
        }).get('value')

        facet_url = urllib_parse.urlparse(challenge_url)
        facet = facet_url.scheme + "://" + facet_url.netloc

        keyHandleJSField = response_page.find('div', {
            'jsname': 'C0oDBd'
        }).get('data-challenge-ui')
        startJSONPosition = keyHandleJSField.find('{')
        endJSONPosition = keyHandleJSField.rfind('}')
        keyHandleJsonPayload = json.loads(
            keyHandleJSField[startJSONPosition:endJSONPosition + 1])

        keyHandles = self.find_key_handles(
            keyHandleJsonPayload,
            base64.urlsafe_b64encode(base64.b64decode(challenges_txt)))
        appId = self.find_app_id(str(keyHandleJsonPayload))

        # txt sent for signing needs to be base64 url encode
        # we also have to remove any base64 padding because including including it will prevent google accepting the auth response
        challenges_txt_encode_pad_removed = base64.urlsafe_b64encode(
            base64.b64decode(challenges_txt)).strip('='.encode())

        u2f_challenges = [{
            'version':
            'U2F_V2',
            'challenge':
            challenges_txt_encode_pad_removed.decode(),
            'appId':
            appId,
            'keyHandle':
            keyHandle.decode()
        } for keyHandle in keyHandles]

        # Prompt the user up to attempts_remaining times to insert their U2F device.
        attempts_remaining = 5
        auth_response = None
        while True:
            try:
                auth_response_dict = u2f.u2f_auth(u2f_challenges, facet)
                auth_response = json.dumps(auth_response_dict)
                break
            except RuntimeWarning:
                logging.error("No U2F device found. %d attempts remaining",
                              attempts_remaining)
                if attempts_remaining <= 0:
                    break
                else:
                    input(
                        "Insert your U2F device and press enter to try again..."
                    )
                    attempts_remaining -= 1

        # If we exceed the number of attempts, raise an error and let the program exit.
        if auth_response is None:
            raise ExpectedGoogleException(
                "No U2F device found. Please check your setup.")

        payload = {
            'challengeId':
            response_page.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            response_page.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'continue':
            response_page.find('input', {
                'name': 'continue'
            }).get('value'),
            'scc':
            response_page.find('input', {
                'name': 'scc'
            }).get('value'),
            'sarp':
            response_page.find('input', {
                'name': 'sarp'
            }).get('value'),
            'checkedDomains':
            response_page.find('input', {
                'name': 'checkedDomains'
            }).get('value'),
            'pstMsg':
            '1',
            'TL':
            response_page.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            response_page.find('input', {
                'name': 'gxf'
            }).get('value'),
            'id-challenge':
            challenges_txt,
            'id-assertion':
            auth_response,
            'TrustDevice':
            'on',
        }
        return self.post(challenge_url, data=payload)
Exemple #41
0
 def _scrub_url(self, real_url, playback_url):
     real = urlparse(real_url)
     playback = urlparse(playback_url)
     self.scrubber.register_name_pair(real.netloc, playback.netloc)
Exemple #42
0
    def _get_cookie(self, netloc, ua, timeout):
        class NoRedirection(urllib.error.HTTPErrorProcessor):
            def http_response(self, request, response):
                return response

        def parseJSString(s):
            try:
                offset = 1 if s[0] == '+' else 0
                val = int(
                    eval(
                        s.replace('!+[]', '1').replace('!![]', '1').replace(
                            '[]', '0').replace('(', 'str(')[offset:]))
                return val
            except:
                pass

        cookies = cookielib.LWPCookieJar()
        opener = urllib.request.build_opener(
            NoRedirection, urllib.request.HTTPCookieProcessor(cookies))
        opener.addheaders = [('User-Agent', ua)]
        try:
            response = opener.open(netloc, timeout=int(timeout))
            result = response.read()
        except urllib.error.HTTPError as response:
            result = response.read()
            try:
                encoding = response.info().getheader('Content-Encoding')
            except:
                encoding = None
            if encoding == 'gzip':
                result = gzip.GzipFile(fileobj=BytesIO(result)).read()

        jschl = re.compile('name="jschl_vc" value="(.+?)"/>').findall(
            result)[0]
        init = re.compile('setTimeout\(function\(\){\s*.*?.*:(.*?)};').findall(
            result)[0]
        builder = re.compile(r"challenge-form\'\);\s*(.*)a.v").findall(
            result)[0]

        if '/' in init:
            init = init.split('/')
            decryptVal = parseJSString(init[0]) / float(parseJSString(init[1]))
        else:
            decryptVal = parseJSString(init)

        lines = builder.split(';')
        for line in lines:
            if len(line) > 0 and '=' in line:
                sections = line.split('=')
                if '/' in sections[1]:
                    subsecs = sections[1].split('/')
                    line_val = parseJSString(subsecs[0]) / float(
                        parseJSString(subsecs[1]))
                else:
                    line_val = parseJSString(sections[1])
                decryptVal = float(
                    eval('%.16f' % decryptVal + sections[0][-1] +
                         '%.16f' % line_val))

        answer = float('%.10f' % decryptVal) + len(urlparse(netloc).netloc)

        query = '%scdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (
            netloc, jschl, answer)

        if 'type="hidden" name="pass"' in result:
            passval = re.findall('name="pass" value="(.*?)"', result)[0]
            query = '%scdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (
                netloc, quote_plus(passval), jschl, answer)
            time.sleep(6)

        opener.addheaders = [
            ('User-Agent', ua), ('Referer', netloc),
            ('Accept',
             'text/html, application/xhtml+xml, application/xml, */*'),
            ('Accept-Encoding', 'gzip, deflate')
        ]

        response = opener.open(query)
        response.close()

        cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
        if 'cf_clearance' in cookie: self.cookie = cookie
Exemple #43
0
def get_zookeeper_host_path():
    zk_url = load_system_paasta_config()['zookeeper']
    parsed = urlparse(zk_url)
    return ZookeeperHostPath(host=parsed.netloc, path=parsed.path)
Exemple #44
0
 def _update_dns_cache(func, machines):
     for url in machines:
         r = urlparse(url)
         port = r.port or (443 if r.scheme == 'https' else 80)
         func(r.hostname, port)
Exemple #45
0
 def send(request, **_):
     assert "/oauth2/v2.0/token" not in request.url, 'mock "request_token" should prevent sending a token request'
     parsed = urlparse(request.url)
     tenant = parsed.path.split("/")[1]
     return get_discovery_response("https://{}/{}".format(
         parsed.netloc, tenant))
Exemple #46
0
 def send(request, **_):
     parsed = urlparse(request.url)
     tenant = parsed.path.split("/")[1]
     assert tenant in (first_tenant, second_tenant), 'unexpected tenant "{}"'.format(tenant)
     token = first_token if tenant == first_tenant else second_token
     return mock_response(json_payload=build_aad_response(access_token=token, refresh_token="**"))
 def _get_auth_client(self, authority=None, **kwargs):
     actual = urlparse(authority)
     assert actual.scheme == "https"
     assert actual.netloc == expected_netloc
Exemple #48
0
 def send(request, **_):
     parsed = urlparse(request.url)
     tenant = parsed.path.split("/")[1]
     token = expected_token if tenant == expected_tenant else expected_token * 2
     return mock_response(json_payload=build_aad_response(access_token=token, refresh_token="**"))
Exemple #49
0
def _decode_general_name(backend, gn):
    if gn.type == backend._lib.GEN_DNS:
        data = _asn1_string_to_bytes(backend, gn.d.dNSName)
        if data.startswith(b"*."):
            # This is a wildcard name. We need to remove the leading wildcard,
            # IDNA decode, then re-add the wildcard. Wildcard characters should
            # always be left-most (RFC 2595 section 2.4).
            decoded = u"*." + idna.decode(data[2:])
        else:
            # Not a wildcard, decode away. If the string has a * in it anywhere
            # invalid this will raise an InvalidCodePoint
            decoded = idna.decode(data)
            if data.startswith(b"."):
                # idna strips leading periods. Name constraints can have that
                # so we need to re-add it. Sigh.
                decoded = u"." + decoded

        return x509.DNSName(decoded)
    elif gn.type == backend._lib.GEN_URI:
        data = _asn1_string_to_ascii(backend, gn.d.uniformResourceIdentifier)
        parsed = urllib_parse.urlparse(data)
        hostname = idna.decode(parsed.hostname)
        if parsed.port:
            netloc = hostname + u":" + six.text_type(parsed.port)
        else:
            netloc = hostname

        # Note that building a URL in this fashion means it should be
        # semantically indistinguishable from the original but is not
        # guaranteed to be exactly the same.
        uri = urllib_parse.urlunparse(
            (parsed.scheme, netloc, parsed.path, parsed.params, parsed.query,
             parsed.fragment))
        return x509.UniformResourceIdentifier(uri)
    elif gn.type == backend._lib.GEN_RID:
        oid = _obj2txt(backend, gn.d.registeredID)
        return x509.RegisteredID(x509.ObjectIdentifier(oid))
    elif gn.type == backend._lib.GEN_IPADD:
        return x509.IPAddress(
            ipaddress.ip_address(_asn1_string_to_bytes(backend,
                                                       gn.d.iPAddress)))
    elif gn.type == backend._lib.GEN_DIRNAME:
        return x509.DirectoryName(
            _decode_x509_name(backend, gn.d.directoryName))
    elif gn.type == backend._lib.GEN_EMAIL:
        data = _asn1_string_to_ascii(backend, gn.d.rfc822Name)
        name, address = parseaddr(data)
        parts = address.split(u"@")
        if name or len(parts) > 2 or not address:
            # parseaddr has found a name (e.g. Name <email>) or the split
            # has found more than 2 parts (which means more than one @ sign)
            # or the entire value is an empty string.
            raise ValueError("Invalid rfc822name value")
        elif len(parts) == 1:
            # Single label email name. This is valid for local delivery. No
            # IDNA decoding can be done since there is no domain component.
            return x509.RFC822Name(address)
        else:
            # A normal email of the form [email protected]. Let's attempt to
            # decode the domain component and return the entire address.
            return x509.RFC822Name(parts[0] + u"@" + idna.decode(parts[1]))
    else:
        # otherName, x400Address or ediPartyName
        raise x509.UnsupportedGeneralNameType(
            "{0} is not a supported type".format(
                x509._GENERAL_NAMES.get(gn.type, gn.type)), gn.type)
Exemple #50
0
 def get_scheme(self, path):
     scheme = urlparse(path).scheme
     return scheme if re.match("^[a-zA-Z]+$", scheme) else None
Exemple #51
0
def test_token_getter_setter(app_rest, monkeypatch):
    """Test token getter setter."""
    # Mock session id
    monkeypatch.setattr('invenio_oauthclient._compat._create_identifier',
                        lambda: '1234')
    monkeypatch.setattr('invenio_oauthclient.views.client._create_identifier',
                        lambda: '1234')

    oauth = app_rest.extensions['oauthlib.client']

    # Mock user
    user = MagicMock()
    user.id = 1
    user.get_id = MagicMock(return_value=1)
    user.is_anonymous = False

    with app_rest.test_client() as c:
        login_user_via_session(c, user)
        # First call login to be redirected
        res = c.get(
            url_for('invenio_oauthclient.rest_login', remote_app='full'))
        assert res.status_code == 302
        assert res.location.startswith(oauth.remote_apps['full'].authorize_url)
        state = parse_qs(urlparse(res.location).query)['state'][0]

        # Mock resposen class
        mock_response(app_rest.extensions['oauthlib.client'], 'full')

        # Imitate that the user authorized our request in the remote
        # application.
        c.get(
            url_for(
                'invenio_oauthclient.rest_authorized',
                remote_app='full',
                code='test',
                state=state,
            ))

        # Assert if everything is as it should be.
        from flask import session as flask_session
        assert flask_session['oauth_token_full'] == \
            ('test_access_token', '')

        t = RemoteToken.get(1, 'fullid')
        assert t.remote_account.client_id == 'fullid'
        assert t.access_token == 'test_access_token'
        assert RemoteToken.query.count() == 1

        # Mock a new authorized request
        mock_response(app_rest.extensions['oauthlib.client'],
                      'full',
                      data={
                          'access_token': 'new_access_token',
                          'scope': "",
                          'token_type': 'bearer'
                      })

        c.get(
            url_for('invenio_oauthclient.rest_authorized',
                    remote_app='full',
                    code='test',
                    state=state))

        t = RemoteToken.get(1, 'fullid')
        assert t.access_token == 'new_access_token'
        assert RemoteToken.query.count() == 1

        val = token_getter(
            app_rest.extensions['oauthlib.client'].remote_apps['full'])
        assert val == ('new_access_token', '')

        # Disconnect account
        res = c.get(
            url_for(
                'invenio_oauthclient.rest_disconnect',
                remote_app='full',
            ))
        assert res.status_code == 302
        expected_url_args = {
            "message": "Successfully disconnected.",
            "code": 200
        }
        check_response_redirect_url_args(res, expected_url_args)

        # Assert that remote account have been removed.
        t = RemoteToken.get(1, 'fullid')
        assert t is None
        # TODO: Figure out what is leaving session open & blocked
        db.session.close()
Exemple #52
0
def download(name, image, url):

    if url == None: return

    from resources.lib.modules import control, cleantitle

    try:
        headers = dict(urllib_parse.parse_qsl(url.rsplit('|', 1)[1]))
    except:
        headers = dict('')

    url = url.split('|')[0]

    content = re.compile('(.+?)\sS(\d*)E\d*$').findall(name)
    try:
        transname = name.translate(None, '\/:*?"<>|').strip('.')
    except:
        transname = name.translate(str.maketrans('', '',
                                                 '\/:*?"<>|')).strip('.')
    transname = cleantitle.normalize(transname)
    levels = ['../../../..', '../../..', '../..', '..']

    if len(content) == 0:
        dest = control.setting('movie.download.path')
        dest = control.transPath(dest)
        for level in levels:
            try:
                control.makeFile(os.path.abspath(os.path.join(dest, level)))
            except:
                pass
        control.makeFile(dest)
        dest = os.path.join(dest, transname)
        control.makeFile(dest)
    else:
        dest = control.setting('tv.download.path')
        dest = control.transPath(dest)
        for level in levels:
            try:
                control.makeFile(os.path.abspath(os.path.join(dest, level)))
            except:
                pass
        control.makeFile(dest)
        try:
            transtvshowtitle = content[0][0].translate(None,
                                                       '\/:*?"<>|').strip('.')
        except:
            transtvshowtitle = content[0][0].translate(
                str.maketrans('', '', '\/:*?"<>|')).strip('.')
        dest = os.path.join(dest, transtvshowtitle)
        control.makeFile(dest)
        dest = os.path.join(dest, 'Season %01d' % int(content[0][1]))
        control.makeFile(dest)

    ext = os.path.splitext(urllib_parse.urlparse(url).path)[1][1:]
    if not ext in ['mp4', 'mkv', 'flv', 'avi', 'mpg']: ext = 'mp4'
    dest = os.path.join(dest, transname + '.' + ext)

    sysheaders = urllib_parse.quote_plus(json.dumps(headers))

    sysurl = urllib_parse.quote_plus(url)

    systitle = urllib_parse.quote_plus(name)

    sysimage = urllib_parse.quote_plus(image)

    sysdest = urllib_parse.quote_plus(dest)

    script = inspect.getfile(inspect.currentframe())
    cmd = 'RunScript(%s, %s, %s, %s, %s, %s)' % (
        script, sysurl, sysdest, systitle, sysimage, sysheaders)

    xbmc.executebuiltin(cmd)
def parse(item, quality=None):
    if quality is None:
        quality = get_quality()
        if quality == QUALITY_CUSTOM:
            quality = int(settings.getFloat('max_bandwidth') * 1000000)
    else:
        quality = int(quality)

    if quality in (QUALITY_DISABLED, QUALITY_SKIP):
        return

    url = item.path.split('|')[0]
    parse = urlparse(url.lower())

    if 'http' not in parse.scheme:
        return

    parser = None
    if item.inputstream and item.inputstream.check():
        is_ia = True
        if item.inputstream.manifest_type == 'mpd':
            parser = MPD()
        elif item.inputstream.manifest_type == 'hls':
            parser = M3U8()
    else:
        is_ia = False
        if parse.path.endswith('.m3u') or parse.path.endswith('.m3u8'):
            parser = M3U8()
            item.mimetype = 'application/vnd.apple.mpegurl'

    if not parser:
        return

    from .session import Session

    playlist_url = item.path.split('|')[0]

    try:
        resp = Session().get(playlist_url,
                             headers=item.headers,
                             cookies=item.cookies)
    except Exception as e:
        log.exception(e)
        result = False
    else:
        result = resp.ok

    if not result:
        gui.ok(
            _(_.QUALITY_PARSE_ERROR,
              error=_(_.QUALITY_HTTP_ERROR, code=resp.status_code)))
        return

    try:
        parser.parse(resp.text)
        qualities = parser.qualities()
    except Exception as e:
        log.exception(e)
        gui.ok(_(_.QUALITY_PARSE_ERROR, error=e))
        return

    if len(qualities) < 2:
        return

    if quality == QUALITY_ASK:
        quality = select_quality(qualities)
        if not quality:
            return False
        elif quality == QUALITY_SKIP:
            return

    min_bandwidth, max_bandwidth = parser.bandwidth_range(quality)
    set_settings(min_bandwidth, max_bandwidth, is_ia)
Exemple #54
0
 def url(self):
     url = self.sock.url(self.export_name)
     return urllib_parse.urlparse(url)
def test_open_no_create(mode):
    with pytest.raises(OSError) as e:
        missing = urllib_parse.urlparse("file:/no/such/path")
        with file.open(missing, mode):
            pass
    assert e.value.errno == errno.ENOENT
Exemple #56
0
    def scroll_pages(self, url, params, headers, additional_tags):
        # type: (str, Dict[str, Any], Dict[str, str], List[str]) -> Dict[str, Event]
        page = 1
        events = {}  # type: Event
        scroll = True
        sc_tags = ["api_url:{}".format(urlparse(self._api_url)[1])
                   ] + self._tags
        while scroll:
            params = copy.deepcopy(params)
            params["page"] = page
            self.log.debug("Fetching events page %s", page)
            try:
                res = self.http.get(url, params=params, headers=headers)
            except RequestException:
                self.log.exception(
                    "Error connecting to the Cloud Controller API")
                self.service_check(API_SERVICE_CHECK_NAME,
                                   CloudFoundryApiCheck.CRITICAL,
                                   tags=sc_tags)
                return events
            try:
                res.raise_for_status()
            except HTTPError:
                self.log.exception(
                    "Error querying list of events: response %s", res.text)
                self.service_check(API_SERVICE_CHECK_NAME,
                                   CloudFoundryApiCheck.CRITICAL,
                                   tags=sc_tags)
                return events
            try:
                payload = res.json()
            except ValueError:
                self.log.exception(
                    "Error decoding response from the Cloud Controller API: response %s",
                    res.text)
                self.service_check(API_SERVICE_CHECK_NAME,
                                   CloudFoundryApiCheck.CRITICAL,
                                   tags=sc_tags)
                return events

            # Memorize the last event guid at which we stopped in the previous check run
            # before we update it during this run
            last_event_guid = self._last_event_guid
            for cf_event in payload.get("resources", []):
                try:
                    dd_event, event_guid, event_ts = parse_event(
                        cf_event, self._api_version, additional_tags)
                except (ValueError, KeyError):
                    self.log.exception("Could not parse event %s", cf_event)
                    continue
                # Stop going through events if we've reached one we've already fetched or if we went back in time enough
                if event_guid == last_event_guid or int(
                        time.time()) - event_ts > MAX_LOOKBACK_SECONDS:
                    scroll = False
                    break
                # Store the event at which we want to stop on the next check run: the most recent of the current run
                if event_ts > self._last_event_ts:
                    self._last_event_guid = event_guid
                    self._last_event_ts = event_ts
                # Make sure we don't send duplicate events in case the pagination gets shifted by new events
                if event_guid not in events:
                    events[event_guid] = dd_event
            # Fetch next page if any
            next_url = get_next_url(payload, self._api_version)
            if not next_url or not scroll:
                break
            page += 1
        self.service_check(API_SERVICE_CHECK_NAME,
                           CloudFoundryApiCheck.OK,
                           tags=sc_tags)
        return events
Exemple #57
0
        def _do_get(self,
                    url,
                    param_dict,
                    securityHandler=None,
                    header=None,
                    proxy_url=None,
                    proxy_port=None,
                    compress=True):
            """
            Performs a standard GET method.
            Inputs:
               url - string of URI
               param_dict - parameters dictionary that holds key/values for
                            each function call.
               handler - security Handler object
               header - optional headers to add to a call
               proxy_url - URI/IP of the proxy
               proxy_port - port of the proxy
               compress - compression of the call
            """
            url = self._urlsafe(url=url)
            handlers = []
            opener = None
            param_dict, handler, cj = self.__processHandler(
                securityHandler=securityHandler, param_dict=param_dict)
            if self._referer_url is None:
                from six.moves import urllib_parse as urlparse
                self._referer_url = urlparse.urlparse(self._url).netloc
            default_headers = {
                'Referer': self._referer_url,
                'User-Agent': self._useragent
            }

            if not header is None:
                if isinstance(header, (list, tuple)):
                    for h in header:
                        headers[h[0]] = h[1]
                elif isinstance(header, dict):
                    for h in header.items():
                        default_headers[h[0]] = h[1]
            if compress:
                default_headers['Accept-encoding'] = 'gzip'
            if proxy_url is not None:
                if proxy_port is None:
                    proxy_port = 80
                proxies = {
                    "http": "http://%s:%s" % (proxy_url, proxy_port),
                    "https": "https://%s:%s" % (proxy_url, proxy_port)
                }
                handlers.append(ProxyHandler(proxies))
            if handler is not None:
                handlers.append(handler)
            if cj is not None:
                handlers.append(HTTPCookieProcessor(cookiejar=cj))
            handlers.append(AGOLRedirectHandler())
            if len(handlers) > 0:
                opener = urllib.request.build_opener(*handlers)
                urllib.request.install_opener(opener)
            data = urllib.parse.urlencode(param_dict)
            url = url + "?%s" % data
            if len(data) + len(url) > 1999:
                resp = urllib.request.Request(url=url,
                                              data=None,
                                              headers=default_headers,
                                              method='POST')
            else:
                resp = urllib.request.Request(url=url,
                                              data=None,
                                              headers=default_headers,
                                              method='GET')
            response = urllib.request.urlopen(resp)
            return_headers = response.getheaders()
            if response.getheader('Content-Encoding') == 'gzip':
                bi = BytesIO(response.read())
                gf = gzip.GzipFile(fileobj=bi, mode="rb")
                resp_data = gf.read().decode('utf-8')
            else:
                resp_data = response.read().decode('utf-8')
            result = json.loads(resp_data)
            if 'error' in result:
                if result['error']['message'] == 'Request not made over ssl':
                    if url.startswith('http://'):
                        url = url.replace('http://', 'https://')
                        return self._do_get(url=url,
                                            param_dict=param_dict,
                                            securityHandler=securityHandler,
                                            proxy_url=proxy_url,
                                            proxy_port=proxy_port,
                                            compress=compress)
            return result
Exemple #58
0
def request(url,
            close=True,
            redirect=True,
            error=False,
            verify=True,
            proxy=None,
            post=None,
            headers=None,
            mobile=False,
            XHR=False,
            limit=None,
            referer=None,
            cookie=None,
            compression=True,
            output='',
            timeout='20'):
    try:
        if not url:
            return

        handlers = []

        if proxy is not None:
            handlers += [
                urllib.request.ProxyHandler({'http': '%s' % (proxy)}),
                urllib.request.HTTPHandler
            ]
            opener = urllib.request.build_opener(*handlers)
            opener = urllib.request.install_opener(opener)

        if output == 'cookie' or output == 'extended' or not close:
            cookies = cookielib.LWPCookieJar()
            handlers += [
                urllib.request.HTTPHandler(),
                urllib.request.HTTPSHandler(),
                urllib.request.HTTPCookieProcessor(cookies)
            ]
            opener = urllib.request.build_opener(*handlers)
            opener = urllib.request.install_opener(opener)

        try:
            import platform
            node = platform.node().lower()
        except BaseException:
            node = ''

        if verify == False and sys.version_info >= (2, 7, 12):
            try:
                import ssl
                ssl_context = ssl._create_unverified_context()
                handlers += [urllib.request.HTTPSHandler(context=ssl_context)]
                opener = urllib.request.build_opener(*handlers)
                opener = urllib.request.install_opener(opener)
            except BaseException:
                pass

        if verify and ((2, 7, 8) < sys.version_info <
                       (2, 7, 12) or platform.uname()[1] == 'XboxOne'):
            try:
                import ssl
                ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = ssl.CERT_NONE
                handlers += [urllib.request.HTTPSHandler(context=ssl_context)]
                opener = urllib.request.build_opener(*handlers)
                opener = urllib.request.install_opener(opener)
            except BaseException:
                pass

        if url.startswith('//'):
            url = 'http:' + url

        _headers = {}

        try:
            _headers.update(headers)
        except BaseException:
            pass

        if 'User-Agent' in _headers:
            pass
        elif mobile:
            _headers['User-Agent'] = cache.get(randommobileagent, 1)
        else:
            _headers['User-Agent'] = cache.get(randomagent, 1)

        if 'Referer' in _headers:
            pass
        elif referer is not None:
            _headers['Referer'] = referer
        if 'Accept-Language' not in _headers:
            _headers['Accept-Language'] = 'en-US'
        if 'X-Requested-With' in _headers:
            pass
        elif XHR:
            _headers['X-Requested-With'] = 'XMLHttpRequest'
        if 'Cookie' in _headers:
            pass
        elif cookie is not None:
            _headers['Cookie'] = cookie
        if 'Accept-Encoding' in _headers:
            pass
        elif compression and limit is None:
            _headers['Accept-Encoding'] = 'gzip'

        if not redirect:

            class NoRedirectHandler(urllib.request.HTTPRedirectHandler):
                def http_error_302(self, req, fp, code, msg, headers):
                    infourl = addinfourl(fp, headers, req.get_full_url())
                    infourl.status = code
                    infourl.code = code
                    return infourl

                http_error_300 = http_error_302
                http_error_301 = http_error_302
                http_error_303 = http_error_302
                http_error_307 = http_error_302

            opener = urllib.request.build_opener(NoRedirectHandler())
            urllib.request.install_opener(opener)

            try:
                del _headers['Referer']
            except BaseException:
                pass

        url = utils.byteify(url)
        request = urllib.request.Request(url)

        if post is not None:
            if isinstance(post, dict):
                post = utils.byteify(post)
                post = urlencode(post)
            if len(post) > 0:
                request = urllib.request.Request(url, data=post)
            else:
                request.get_method = lambda: 'POST'
                request.has_header = lambda header_name: (
                    header_name == 'Content-type' or urllib.request.Request.
                    has_header(request, header_name))

        if limit == '0':
            request.get_method = lambda: 'HEAD'

        _add_request_header(request, _headers)
        response = urllib.request.urlopen(request, timeout=int(timeout))

        try:
            response = urllib.request.urlopen(request, timeout=int(timeout))
        except urllib.error.HTTPError as response:
            if response.code == 503:
                cf_result = response.read()
                try:
                    encoding = response.info().getheader('Content-Encoding')
                except BaseException:
                    encoding = None
                if encoding == 'gzip':
                    cf_result = gzip.GzipFile(
                        fileobj=BytesIO(cf_result)).read()

                if 'cf-browser-verification' in cf_result:
                    from cloudscraper2 import CloudScraper as cfscrape
                    _cf_lim = 0
                    while 'cf-browser-verification' in cf_result and _cf_lim <= 1:
                        _cf_lim += 1
                        netloc = '%s://%s/' % (urlparse(url).scheme,
                                               urlparse(url).netloc)
                        ua = _headers['User-Agent']

                        try:
                            cf = cache.get(cfscrape.get_cookie_string, 1,
                                           netloc, ua)[0]
                        except BaseException:
                            try:
                                cf = cfscrape.get_cookie_string(url, ua)[0]
                            except BaseException:
                                cf = None
                        finally:
                            _headers['Cookie'] = cf

                        request = urllib.request.Request(url, data=post)
                        _add_request_header(request, _headers)

                        try:
                            response = urllib.request.urlopen(
                                request, timeout=int(timeout))
                            cf_result = 'Success'
                        except urllib.error.HTTPError as response:
                            cache.remove(cfscrape.get_cookie_string, netloc,
                                         ua)
                            cf_result = response.read()
                else:
                    xbmc.log(
                        'Request-Error (%s): %s' % (str(response.code), url),
                        xbmc.LOGDEBUG)
                    if not error:
                        return
            else:
                xbmc.log('Request-Error (%s): %s' % (str(response.code), url),
                         xbmc.LOGDEBUG)
                if not error:
                    return

        if output == 'cookie':
            try:
                result = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except BaseException:
                pass
            try:
                result = cf
            except BaseException:
                pass
            if close:
                response.close()
            return result

        elif output == 'geturl':
            result = response.geturl()
            if close:
                response.close()
            return result

        elif output == 'headers':
            result = response.headers
            if close:
                response.close()
            return result

        elif output == 'location':
            result = response.headers
            if close:
                response.close()
            return result['Location']

        elif output == 'chunk':
            try:
                content = int(response.headers['Content-Length'])
            except BaseException:
                content = (2049 * 1024)
            if content < (2048 * 1024):
                return
            result = response.read(16 * 1024)
            if close:
                response.close()
            return result

        elif output == 'file_size':
            try:
                content = int(response.headers['Content-Length'])
            except BaseException:
                content = '0'
            response.close()
            return content

        if limit == '0':
            result = response.read(1 * 1024)
        elif limit is not None:
            result = response.read(int(limit) * 1024)
        else:
            result = response.read(5242880)

        try:
            encoding = response.headers['Content-Encoding']
        except BaseException:
            encoding = None
        if encoding == 'gzip':
            result = gzip.GzipFile(fileobj=BytesIO(result)).read()

        if b'sucuri_cloudproxy_js' in result:
            su = sucuri().get(result)

            _headers['Cookie'] = su

            request = urllib.request.Request(url, data=post)
            _add_request_header(request, _headers)

            response = urllib.request.urlopen(request, timeout=int(timeout))

            if limit == '0':
                result = response.read(224 * 1024)
            elif limit is not None:
                result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)

            try:
                encoding = response.info().getheader('Content-Encoding')
            except BaseException:
                encoding = None
            if encoding == 'gzip':
                result = gzip.GzipFile(fileobj=BytesIO(result)).read()

        if six.PY3 and isinstance(result, bytes):
            result = result.decode('utf-8')

        if output == 'extended':
            try:
                response_headers = dict([(item[0].title(), item[1])
                                         for item in response.info().items()])
            except BaseException:
                response_headers = response.headers
            response_code = str(response.code)
            try:
                cookie = '; '.join(
                    ['%s=%s' % (i.name, i.value) for i in cookies])
            except BaseException:
                pass
            try:
                cookie = cf
            except BaseException:
                pass
            if close:
                response.close()
            return (result, response_code, response_headers, _headers, cookie)
        else:
            if close:
                response.close()
            return result
    except Exception as e:
        xbmc.log('Request-Error: (%s) => %s' % (str(e), url), xbmc.LOGDEBUG)
        return
Exemple #59
0
    def handle_sk(self, sess):
        response_page = BeautifulSoup(sess.text, 'html.parser')
        challenge_url = sess.url.split("?")[0]

        challenges_txt = response_page.find('input', {
            'name': "id-challenge"
        }).get('value')
        challenges = json.loads(challenges_txt)

        facet_url = urllib_parse.urlparse(challenge_url)
        facet = facet_url.scheme + "://" + facet_url.netloc
        app_id = challenges["appId"]
        u2f_challenges = []
        for c in challenges["challenges"]:
            c["appId"] = app_id
            u2f_challenges.append(c)

        # Prompt the user up to attempts_remaining times to insert their U2F device.
        attempts_remaining = 5
        auth_response = None
        while True:
            try:
                auth_response = json.dumps(u2f.u2f_auth(u2f_challenges, facet))
                break
            except RuntimeWarning:
                print("No U2F device found. {} attempts remaining.".format(
                    attempts_remaining))
                if attempts_remaining <= 0:
                    break
                else:
                    input(
                        "Insert your U2F device and press enter to try again..."
                    )
                    attempts_remaining -= 1

        # If we exceed the number of attempts, raise an error and let the program exit.
        if auth_response is None:
            raise ExpectedGoogleException(
                "No U2F device found. Please check your setup.")

        payload = {
            'challengeId':
            response_page.find('input', {
                'name': 'challengeId'
            }).get('value'),
            'challengeType':
            response_page.find('input', {
                'name': 'challengeType'
            }).get('value'),
            'continue':
            response_page.find('input', {
                'name': 'continue'
            }).get('value'),
            'scc':
            response_page.find('input', {
                'name': 'scc'
            }).get('value'),
            'sarp':
            response_page.find('input', {
                'name': 'sarp'
            }).get('value'),
            'checkedDomains':
            response_page.find('input', {
                'name': 'checkedDomains'
            }).get('value'),
            'pstMsg':
            response_page.find('input', {
                'name': 'pstMsg'
            }).get('value'),
            'TL':
            response_page.find('input', {
                'name': 'TL'
            }).get('value'),
            'gxf':
            response_page.find('input', {
                'name': 'gxf'
            }).get('value'),
            'id-challenge':
            challenges_txt,
            'id-assertion':
            auth_response,
            'TrustDevice':
            'on',
        }
        return self.post(challenge_url, data=payload)
Exemple #60
0
def _build_network_relative_path(url):
    p = urlparse.urlparse(url)
    parse_result = urlparse.ParseResult(p.scheme, p.netloc,
                                        os.path.dirname(p.path) + '/', '', '',
                                        '')
    return urlparse.urlunparse(parse_result)