def test_create_flavor(self):
        content = self.get_token("admin", "admin")
        token = json.loads(content)["success"]["token"]
        data = {"name": "小型", "vcpus": 1, "ram": 64, "disk": 1, "ephemeral": 0, "swap": 0}
        h = httplib2.Http()
        resp, content = h.request(
            self.base_url + "flavors",
            "POST",
            urlencode(data),
            headers={"Content-Type": "application/x-www-form-urlencoded", "X-Auth-Token": token},
        )
        flavor = json.loads(content)["flavor"]
        self.assertTrue(flavor["id"], "test_list_flavor failed")

        data = {"name": "小型1", "vcpus": 11}
        resp, content = h.request(
            self.base_url + "flavors/" + "%s" % flavor["id"],
            "POST",
            urlencode(data),
            headers={"Content-Type": "application/x-www-form-urlencoded", "X-Auth-Token": token},
        )

        resp, content = h.request(
            self.base_url + "flavors/" + "%s" % flavor["id"],
            "DELETE",
            headers={"Content-Type": "application/x-www-form-urlencoded", "X-Auth-Token": token},
        )
def perform_uclan_request(uclan_email, uclan_password, view_state, event_validation, mon_date):
    url = urllib2.Request("https://apps13.uclan.ac.uk/WeeklyTimetable/")

    base64string = base64.encodestring("%s:%s" % (uclan_email, uclan_password)).replace("\n", "")
    url.add_header("Authorization", "Basic %s" % base64string)
    if view_state == "":
        params = urllib.urlencode(
            {"ctl00$ScriptManager1": "ctl00$MainContent$UpdatePanel1|ctl00$MainContent$dateChangeSubmit"}
        )
    else:
        params = urllib.urlencode(
            {
                "ctl00$ScriptManager1": "ctl00$MainContent$UpdatePanel1|ctl00$MainContent$dateChangeSubmit",
                "__EVENTTARGET": "",
                "__VIEWSTATEGENERATOR": "2912534F",
                "__LASTFOCUS": "",
                "__VIEWSTATE": view_state,
                "__EVENTVALIDATION": event_validation,
                "ctl00$MainContent$tbCurrentDate": mon_date,
                "ctl00$MainContent$dateChangeSubmit": "",
            }
        )
    request = urllib2.urlopen(url, params)
    response = request.read()
    result = BeautifulSoup(response)
    return result
Example #3
1
    def request(self, operation, parameters={}):
        """
        Sends the request to the Turk server and returns a response object.
        """

        if not self.signature or not self.accesskey:
            raise RuntimeError("Signature or access key missing")

        timestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
        hmacstr = hmac.new(config.signature, "AWSMechanicalTurkRequester" + operation + timestamp, hashlib.sha1)
        hmacstr = base64.encodestring(hmacstr.digest()).strip()

        logger.info("Request to MTurk: {0}".format(operation))
        for paramk, paramv in parameters.items():
            logger.debug("  {0}: {1}".format(paramk, paramv))

        baseurl = "/?" + urllib.urlencode(
            {
                "Service": "AWSMechanicalTurkRequester",
                "AWSAccessKeyId": config.accesskey,
                "Version": "2008-08-02",
                "Operation": operation,
                "Signature": hmacstr,
                "Timestamp": timestamp,
            }
        )
        url = baseurl + "&" + urllib.urlencode(parameters)
        url = "https://" + self.server + url

        req = urllib2.Request(url=url)
        data = urllib2.urlopen(req)

        response = Response(operation, data)
        return response
Example #4
1
def auth_flickr(request):
    from agro.sources import utils
    api, secret, url = 'e22dd4a81125531e047036ed1ab2a9e7', '72a484d250375bdf', ''
    token = ''
    user_name, user_id = '', ''

    frob = request.GET.get('frob', '')

    if frob:
        api_sig = md5.new('%sapi_key%sfrob%smethodflickr.auth.getToken' % (secret, api, frob)).hexdigest()
        params = urllib.urlencode({'api_key':api, 'frob':frob, 'method':'flickr.auth.getToken', 'api_sig':api_sig})
        res = utils.get_remote_data("http://api.flickr.com/services/rest/?" + params)

        if res.get("stat", "") == "fail":
            log.error("flickr retrieve failed.")
            log.error("%s" % res.get("stat"))
            return False

        #token = res.get('auth')
        auth_res = res.getchildren()[0]
        token = auth_res.find('token').text
        user = auth_res.find('user')
        user_name = user.get('username')
        user_id = user.get('nsid')

    else:
        if request.method == 'POST':
            perms = 'read'
            api_sig = md5.new('%sapi_key%sperms%s' % (secret, api, perms)).hexdigest()
            params = urllib.urlencode({'api_key':api, 'perms':perms, 'api_sig':api_sig})
            return HttpResponseRedirect('http://flickr.com/services/auth/?%s' % params)
        else:
            pass

    return render_to_response('flickr_auth.html', {'api':api, 'secret':secret, 'user_name':user_name, 'user_id':user_id, 'token':token,}, context_instance=RequestContext(request))
Example #5
1
def generate_map(path, spos, tpos, path_encoded=True):
    if spos is not None:
        markerA = "label:A|" + spos
        markerA = "markers=" + quote(markerA)

    if tpos is not None:
        markerB = "label:B|" + tpos
        markerB = "markers=" + quote(markerB)

    if path:
        args = {
            "size": "640x640",
            "sensor": "false",
            "path": "color:0xff0000ff|weight:5|" + ("enc:" if path_encoded else "") + path,
        }
        api = "http://maps.googleapis.com/maps/api/staticmap?" + urlencode(args)
        if spos is not None:
            api = api + "&" + markerA
        if tpos is not None:
            api = api + "&" + markerB
    else:
        args = {"size": "640x640", "sensor": "false"}
        api = "http://maps.googleapis.com/maps/api/staticmap?" + urlencode(args)
        if spos is not None:
            api = api + "&" + markerA
        if tpos is not None:
            api = api + "&" + markerB
    if len(api) > 2048:
        args = {"size": "640x640", "sensor": "false"}
        api = "http://maps.googleapis.com/maps/api/staticmap?" + urlencode(args)
        if spos is not None:
            api = api + "&" + markerA
        if tpos is not None:
            api = api + "&" + markerB
    return api if len(api) <= 2048 else None
Example #6
1
 def get_login_url(self, next=None):
     if not next:
         next = self.request.full_url()
     if not next.startswith("http://") and not next.startswith("https://"):
         next = urlparse.urljoin(self.request.full_url(), next)
     if self.get_argument("code", None):
         return (
             "http://"
             + self.request.host
             + self.reverse_url("login")
             + "?"
             + urllib.urlencode({"next": next, "code": self.get_argument("code")})
         )
     redirect_uri = (
         "http://" + self.request.host + self.reverse_url("login") + "?" + urllib.urlencode({"next": next})
     )
     if self.get_argument("code", None):
         args["code"] = self.get_argument("code")
     return "https://www.facebook.com/dialog/oauth?" + urllib.urlencode(
         {
             "client_id": options.facebook_app_id,
             "redirect_uri": redirect_uri,
             "scope": "offline_access,publish_actions",
         }
     )
def renren_set(email, password, text):
    import urllib2, cookielib, urllib
    from BeautifulSoup import BeautifulSoup

    cj = cookielib.CookieJar()
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    urllib2.install_opener(opener)
    postdata = urllib.urlencode(
        {
            "email": email,
            "password": password,
            "captcha_type": "web_login",
            "icode": "",
            "origURL": "http://www.renren.com/indexcon",
            "domain": "renren.com",
            "key_id": "1",
        }
    )
    req = urllib2.Request(url="http://www.renren.com/PLogin.do", data=postdata)
    response = urllib2.urlopen(req)
    content = response.read()
    j = content[content.find("script") :]
    rtk = j[j.find("get_check_x:") + 13 : j.find("get_check_x:") + 21]  # 寻找rtk,rtk是人人的一个校验码,每次登陆不同。
    newdata = urllib.urlencode({"_rtk": rtk, "channel": "renren", "content": text, "hostid": "729595072"})
    newreq = urllib2.Request(url="http://shell.renren.com/729595072/status", data=newdata)
    newcontent = urllib2.urlopen(newreq)
Example #8
0
    def test_POST_with_x_www_form_urlencoded_body_params_and_auth_header_unauthorized(self):
        """Test issue when user's request has authorization header and uses
        application/x-www-form-urlencoded content type with some
        request body parameters, but signature was generated without body
        params.
        """
        # get valid access token
        self._request_token()
        self._authorize_and_access_token_using_form()

        # init request params and headers
        get_params = {"foo": "bar"}
        body_params = {"some": "param", "other": "param"}
        content_type = "application/x-www-form-urlencoded"
        header = self._make_auth_header_with_HMAC_SHA1("post", "/oauth/photo/", get_params, {}, True)

        body = urllib.urlencode(body_params)

        response = self.c.post(
            # this is workaround to have both POST & GET params in this request
            "/oauth/photo/?%s" % urllib.urlencode(get_params),
            data=body,
            HTTP_AUTHORIZATION=header["Authorization"],
            content_type=content_type,
        )

        self.assertEqual(response.status_code, 401)
Example #9
0
    def test_vote_invalid_choices(self):

        self.login("jo@gmail.com", "secret4")

        poll_3 = config.orm.query(Poll).filter(Poll.start_dt == datetime.date(2020, 10, 1)).one()  # @UndefinedVariable
        self.assertFalse(poll_3.expired)
        self.assertEqual(len(poll_3.choices_by_user), 0)

        response = app.request(
            "/poll/vote",
            method="POST",
            data=urllib.urlencode({"poll_id": poll_3.id, "poll_user_choices": [0, 2]}, True),
        )  # @UndefinedVariable
        self.assertEqual(response.status, HTTP_FORBIDDEN)
        self.assertEqual(
            response.data, u"Un des entiers passes a la methode /poll/vote n'est pas compris dans l'intervalle [0, 1]"
        )

        response = app.request(
            "/poll/vote", method="POST", data=urllib.urlencode({"poll_id": poll_3.id, "poll_user_choices": [3]}, True)
        )  # @UndefinedVariable
        self.assertEqual(response.status, HTTP_FORBIDDEN)
        self.assertEqual(
            response.data, u"Un des entiers passes a la methode /poll/vote n'est pas compris dans l'intervalle [0, 1]"
        )

        self.assertEqual(len(poll_3.choices_by_user), 0)
def lookupCases(opener, name, court, division):
    cases = []

    data = urllib.urlencode({"category": division, "lastName": name, "courtId": court, "submitValue": "N"})
    cases_url = u"http://ewsocis1.courts.state.va.us/CJISWeb/Search.do"
    searchResults = opener.open(cases_url, data)
    html = searchResults.read()
    done = getCases(BeautifulSoup(html), name, cases)

    data = urllib.urlencode(
        {
            "courtId": court,
            "pagelink": "Next",
            "lastCaseProcessed": "",
            "firstCaseProcessed": "",
            "lastNameProcessed": "",
            "firstNameProcessed": "",
            "category": division,
            "firstCaseSerialNumber": 0,
            "lastCaseSerialNumber": 0,
            "searchType": "",
            "emptyList": "",
        }
    )

    count = 1
    search_url = u"http://ewsocis1.courts.state.va.us/CJISWeb/Search.do"
    while not done and count < 6:
        search_results = opener.open(search_url, data)
        html = search_results.read()
        done = getCases(BeautifulSoup(html), name, cases)
        count += 1
    return cases
Example #11
0
    def fql(self, query, args=None, post_args=None):
        """FQL query.
        Two reasons to have this method:
        1. Graph api does not expose some info fields of a user, e.g.
            a user's networks/affiliations, we have to fall back to old api.
        2. FQL is a strong tool.
        Example query: "SELECT affiliations FROM user WHERE uid = me()"
        """
        if not args:
            args = {}
        if self.access_token:
            if post_args is not None:
                post_args["access_token"] = self.access_token
            else:
                args["access_token"] = self.access_token
        post_data = None if post_args is None else urllib.urlencode(post_args)

        args["query"] = query
        args["format"] = "json"
        file = urllib2.urlopen("https://api.facebook.com/method/fql.query?" + urllib.urlencode(args), post_data)
        try:
            content = file.read()
            response = _parse_json(content)
            # Return a list if success, return a dictionary if failed
            if type(response) is dict and "error_code" in response:
                raise GraphAPIError(response["error_code"], response["error_msg"])
        except Exception, e:
            raise e
Example #12
0
    def api_request(self, path, args=None, post_args=None):
        """Fetches the given path in the Graph API.

        We translate args to a valid query string. If post_args is given,
        we send a POST request to the given path with the given arguments.
        """
        if not args:
            args = {}
        if self.access_token:
            if post_args is not None:
                post_args["access_token"] = self.access_token
            else:
                args["access_token"] = self.access_token
        if self.api_key:
            if post_args is not None:
                post_args["api_key"] = self.api_key
            else:
                args["api_key"] = self.api_key
        if post_args is not None:
            post_args["format"] = "json-strings"
        else:
            args["format"] = "json-strings"
        post_data = None if post_args is None else urllib.urlencode(post_args)
        file = urllib.urlopen("https://api.facebook.com/method/" + path + "?" + urllib.urlencode(args), post_data)
        try:
            response = _parse_json(file.read())
        finally:
            file.close()
        if response and response.get("error"):
            raise GraphAPIError(response["error"]["type"], response["error"]["message"])
        return response
Example #13
0
 def post(self):
     try:
         name = self.request.POST["name"]
         topic = MicroTopic.all().filter("name =", name).get()
         if not topic:
             raise ReatiweError("Topic %s does not exists." % name)
         if self.request.POST["mode"]:
             mode = self.request.POST["mode"]
         else:
             mode = "subscribe"
         form_fields = {
             "hub.mode": mode,
             "hub.callback": "%s/callback/%s" % (settings.SITE_URL, topic.name),
             "hub.topic": topic.url,
             "hub.verify": "sync",
             "hub.verify_token": topic.name,
         }
         result = 200
         url = self.request.POST["hub"]
         req = urllib2.Request(url, urllib.urlencode(form_fields))
         o = urlparse.urlparse(url)
         # superfeedr support
         if o.username and o.password:
             base64string = base64.encodestring("%s:%s" % (o.username, o.password))[:-1]
             authheader = "Basic %s" % base64string
             new_url = "%s://%s%s" % (o.scheme, o.hostname, o.path)
             req = urllib2.Request(new_url, urllib.urlencode(form_fields))
             req.add_header("Authorization", authheader)
         urllib2.urlopen(req)
     except DownloadError, e:
         logging.error("DownloadError: %s" % repr(e))
         pass
Example #14
0
def XML_ReadFromURL(address, path):
    address = g_param["Addr_PMS"]
    # address[0]+':'+str(address[1])  <- address should be this. ReadFromURL gets called with bad parameters?
    xargs = PlexAPI_getXArgs()
    if path.find("?") >= 0:
        path = path + "&" + urlencode(xargs)
    else:
        path = path + "?" + urlencode(xargs)

    XMLstring = GetURL(address, path)
    if XMLstring == False:
        dprint(__name__, 0, "No Response from Plex Media Server")
        return False

    # parse from memory
    XMLroot = etree.fromstring(XMLstring)

    # XML root to ElementTree
    XML = etree.ElementTree(XMLroot)

    dprint(__name__, 1, "====== received XML-PMS ======")
    dprint(__name__, 1, XML_prettystring(XML))
    dprint(__name__, 1, "====== XML-PMS finished ======")

    return XML
Example #15
0
    def do_call(self, url, parameters):
        """ Do Zoho API call.

        @param url: URL to be called

        @param parameters: Optional POST parameters.
        """
        # Do not mutate orginal dict
        parameters = parameters.copy()
        if self.ticket != None:
            parameters["ticket"] = self.ticket
        parameters["authtoken"] = self.authtoken
        parameters["scope"] = self.scope

        stringify(parameters)

        if logger.getEffectiveLevel() == logging.DEBUG:
            # Output Zoho API call payload
            logger.debug("Doing ZOHO API call:" + url)
            for key, value in parameters.items():
                logger.debug(key + ": " + value)
        self.parameters = parameters
        self.parameters_encoded = urllib.urlencode(parameters)
        request = urllib2.Request(url, urllib.urlencode(parameters))
        response = urllib2.urlopen(request).read()

        if logger.getEffectiveLevel() == logging.DEBUG:
            # Output Zoho API call payload
            logger.debug("ZOHO API response:" + url)
            logger.debug(response)

        return response
Example #16
0
File: url.py Project: apolmig/inbox
def url_concat(url, args, fragments=None):
    """Concatenate url and argument dictionary regardless of whether
    url has existing query parameters.

    >>> url_concat("http://example.com/foo?a=b", dict(c="d"))
    'http://example.com/foo?a=b&c=d'
    """

    if not args and not fragments:
        return url

    # Strip off hashes
    while url[-1] == "#":
        url = url[:-1]

    fragment_tail = ""
    if fragments:
        fragment_tail = "#" + urlencode(fragments)

    args_tail = ""
    if args:
        if url[-1] not in ("?", "&"):
            args_tail += "&" if ("?" in url) else "?"
        args_tail += urlencode(args)

    return url + args_tail + fragment_tail
Example #17
0
def microsoft_translate(text, sourceLang, destLang):
    if MS_TRANSLATOR_CLIENT_ID == "" or MS_TRANSLATOR_CLIENT_SECRET == "":
        return gettext("Error: translation service not configured.")
    try:
        # get access token
        params = urlencode(
            {
                "client_id": MS_TRANSLATOR_CLIENT_ID,
                "client_secret": MS_TRANSLATOR_CLIENT_SECRET,
                "scope": "http://api.microsofttranslator.com",
                "grant_type": "client_credentials",
            }
        )
        conn = httplib.HTTPSConnection("datamarket.accesscontrol.windows.net")
        conn.request("POST", "/v2/OAuth2-13", params)
        response = json.loads(conn.getresponse().read())
        token = response[u"access_token"]

        # translate
        conn = httplib.HTTPConnection("api.microsofttranslator.com")
        params = {"appId": "Bearer " + token, "from": sourceLang, "to": destLang, "text": text.encode("utf-8")}
        conn.request("GET", "/V2/Ajax.svc/Translate?" + urlencode(params))
        response = json.loads('{"response":' + conn.getresponse().read().decode("utf-8") + "}")
        return response["response"]
    except:
        raise
def fetch_html(url, get=None, post=None, headers=None, cookie_jar=None):
    """
    Fetches and returns the html at the given *url*, optionally using *get*, *post*,
    *header*, and *cookie_jar*. No scraping occurs. This function is used internally
    by :func:`scrapemark.scrape`. For the behavior of ``headers['User-Agent']`` and *cookie_jar*, read
    the :func:`scrapemark.scrape` documentation.
    """
    if get:
        if type(get) == str:
            get = cgi.parse_qs(get)
        l = list(urlparse.urlparse(url))
        g = cgi.parse_qs(l[4])
        g.update(get)
        l[4] = urllib.urlencode(g)
        url = urlparse.urlunparse(l)
    if post and type(post) != str:
        post = urllib.urlencode(post)
    if cookie_jar == None:
        cookie_jar = cookielib.CookieJar()
    if not headers:
        headers = {"User-Agent": user_agent}
    else:
        if "User-Agent" not in headers:
            headers["User-Agent"] = user_agent
    if verbose:
        print "fetching", url, "..."
    request = urllib2.Request(url, post, headers)
    request.add_header("Accept", "text/html")
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar))
    res = opener.open(request).read()
    if verbose:
        print "DONE fetching."
    return res
Example #19
0
    def test_POST_with_x_www_form_urlencoded_body_params_and_auth_header(self):
        """Test issue when user's request has authorization header and uses
        application/x-www-form-urlencoded content type with some
        request body parameters.

        note: In this case both POST and GET parameters should be included in
        signature base string, so we test GET and POST together
        note: behaviour defined in http://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
        """
        # get valid access token
        self._request_token()
        self._authorize_and_access_token_using_form()

        # init request params and headers
        get_params = {"foo": "bar"}
        body_params = {"some": "param", "other": "param"}
        content_type = "application/x-www-form-urlencoded"
        header = self._make_auth_header_with_HMAC_SHA1("post", "/oauth/photo/", get_params, body_params, True)

        body = urllib.urlencode(body_params)

        response = self.c.post(
            # this is workaround to have both POST & GET params in this request
            "/oauth/photo/?%s" % urllib.urlencode(get_params),
            data=body,
            HTTP_AUTHORIZATION=header["Authorization"],
            content_type=content_type,
        )

        self.assertEqual(response.status_code, 200)
Example #20
0
File: main.py Project: vsxed/megbot
        def definition(word=None):
            query = word.encode("utf-8")
            query = urllib.urlencode({"term": query})
            urbandictionary = "http://api.urbandictionary.com/v0/define?%s" % query
            response = urllib2.urlopen(urbandictionary)
            responseData = response.read()
            pData = json.loads(responseData)
            data = pData["list"]
            antworten = [
                "Denk dir doch keinen Schmarrn aus, du Opfer.",
                "Mann ey. Keine Ahnung, hab ich nicht gefunden…",
                "...aaaalter. Nicht jetzt, okay?",
                "Nope.",
                "Hab ich nicht gefunden.",
                "Gabs nicht.",
                "Weder auf Urban Dictionary, noch auf Wikipedia hab ich was finden können. Sicher, dass du das richtig geschrieben hast?",
                "Boah eh, geh doch den Frese damit nerven. Oder frag die Huppa. Die weiß das bestimmt.",
            ]

            if len(data) > 0 and not re.match("(?:\#){1,2}w[kpedia]*", word):
                first = data[0]
                reply(first["word"] + ": " + first["definition"])
            else:
                query = re.sub("(?:\-){1,2}w[kpedia]*", "", word)
                query = query.encode("utf-8")
                query = urllib.urlencode(
                    {
                        "format": "json",
                        "action": "query",
                        "exintro": "",
                        "redirects": 1,
                        "explaintext": "",
                        "prop": "extracts",
                        "titles": query,
                    }
                )
                wikipedia = "https://de.wikipedia.org/w/api.php?%s" % query
                response = urllib2.urlopen(wikipedia)
                responseData = response.read()
                pData = json.loads(responseData)
                data = pData["query"]
                data = data["pages"]
                data = data.values()[0]

                if len(data) == 4:
                    limit = 350
                    extract = data["extract"]
                    title = data["title"]
                    pageId = data["pageid"]
                    link = "https://de.wikipedia.org/?curid=" + str(pageId)

                    fact = (
                        (title + ": " + extract[:limit] + "... \n\nMehr? -> " + link)
                        if len(extract) >= limit
                        else title + ": " + extract
                    )

                    reply(fact)
                else:
                    reply(random.choice(antworten))
Example #21
0
def images_action():
    global Houses, Images

    queue = Queue.Queue()
    for i in range(5):
        t = ThreadUrl(queue)
        t.setDaemon(True)
        t.start()

    condition = Images.c.attempted == 0
    images = Images.select(condition).execute()
    for image in images:
        args = {"size": "f", "location": image["url"]}

        # fetch full-size image
        url = IMAGE_BASE + "?" + urllib.urlencode(args)
        queue.put(url)

        # fetch thumbnail
        if image["thumb"] == 1:
            args["size"] = "t"
            url = IMAGE_BASE + "?" + urllib.urlencode(args)
            queue.put(url)

        # set DB flag
        condition = Images.c.id == image["id"]
        Images.update().where(condition).values(attempted=1).execute()

    queue.join()
    images.close()
Example #22
0
    def _get_url(self, cmd, params, apikey):
        """Will prepare URL that connects to CloudByte."""

        if params is None:
            params = {}

        params["command"] = cmd
        params["response"] = "json"

        sanitized_params = {}

        for key in params:
            value = params[key]
            if value is not None:
                sanitized_params[key] = six.text_type(value)

        sanitized_params = urllib.urlencode(sanitized_params)
        url = "/client/api?%s" % sanitized_params

        LOG.debug("CloudByte URL to be executed: [%s].", url)

        # Add the apikey
        api = {}
        api["apiKey"] = apikey
        url = url + "&" + urllib.urlencode(api)

        return url
Example #23
0
def tiny_url(url):
    try:
        url = urlopen(ISGD % urlencode({"longurl": url})).read()
    except:
        url = urlopen(TINYURL % urlencode({"url": url})).read()
    finally:
        return url
Example #24
0
def fetch_html(url, get=None, post=None, headers=None, cookie_jar=None):
    if get:
        if type(get) == str:
            get = cgi.parse_qs(get)
        l = list(urlparse.urlparse(url))
        g = cgi.parse_qs(l[4])
        g.update(get)
        l[4] = urllib.urlencode(g)
        url = urlparse.urlunparse(l)
    if post and type(post) != str:
        post = urllib.urlencode(post)
    if cookie_jar == None:
        cookie_jar = cookielib.CookieJar()
    if not headers:
        headers = {"User-Agent": user_agent}
    else:
        if "User-Agent" not in headers:
            headers["User-Agent"] = user_agent
    if verbose:
        print "fetching", url, "..."
    request = urllib2.Request(url, post, headers)
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar))
    res = opener.open(request).read()
    if verbose:
        print "DONE fetching."
    return res
Example #25
0
    def get_authentication_tokens(self):
        """ So, you want to get an authentication url?

            l = LinkedinAPI(YOUR_CONFIG)
            auth_props = l.get_authentication_tokens()
            auth_url = auth_props['auth_url']
            print auth_url
        """

        body = urllib.urlencode({"oauth_callback": self.callback_url}) if self.callback_url else None
        resp, content = self.client.request(self.request_token_url, "POST", body=body)

        status = int(resp["status"])
        if status != 200:
            raise LinkedinAuthError(
                "There was a problem authenticating you. Error: %s, Message: %s" % (status, content)
            )

        request_tokens = dict(parse_qsl(content))

        auth_url_params = {"oauth_token": request_tokens["oauth_token"]}

        request_tokens["auth_url"] = self.authorize_url + "?" + urllib.urlencode(auth_url_params)

        return request_tokens
Example #26
0
    def get(self):
        verification_code = self.request.get("code")
        args = dict(client_id=FACEBOOK_APP_ID, redirect_uri=self.request.path_url)
        if verification_code:
            args["client_secret"] = secret.FACEBOOK_APP_SECRET
            args["code"] = verification_code
            response = cgi.parse_qs(
                urllib.urlopen("https://graph.facebook.com/oauth/access_token?" + urllib.urlencode(args)).read()
            )
            access_token = response["access_token"][-1]

            # Download the user profile and cache a local instance of the
            # basic profile info
            profile = json.load(
                urllib.urlopen("https://graph.facebook.com/me?" + urllib.urlencode(dict(access_token=access_token)))
            )

            logging.info(profile)  # log users profile

            id = str(profile["id"])
            user = AuthToken(id=id, name=profile["name"], token=access_token, service="facebook")
            if "hometown" in profile:
                location = profile["hometown"]["name"]
            user.url = db.Link(profile["link"])
            user.put()

            self.redirect("/new?fb=" + access_token)
        else:
            self.redirect("https://graph.facebook.com/oauth/authorize?" + urllib.urlencode(args))
Example #27
0
    def _process(self):
        from MaKaC.common.Configuration import Config
        from MaKaC.externUsers import ExtUserHolder
        from urllib import urlencode

        euh = ExtUserHolder()
        ah = user.AvatarHolder()
        newIdentityList = []
        for id in self._identityList:
            newId = id
            for authId in Config.getInstance().getAuthenticatorList():
                if id[: len(authId)] == authId:
                    dict = euh.getById(authId).getById(id.split(":")[1])
                    av = user.Avatar(dict)
                    newId = ah.add(av)
                    identity = dict["identity"](dict["login"], av)
                    try:
                        dict["authenticator"].add(identity)
                    except:
                        pass
                    av.activateAccount()
            newIdentityList.append("selectedPrincipals=%s" % newId)
        if self._addURL.find("?") != -1:
            targetURL = self._addURL + "&" + urlencode(self._params) + "&" + "&".join(newIdentityList)
        else:
            targetURL = self._addURL + "?" + urlencode(self._params) + "&" + "&".join(newIdentityList)
        self._redirect(targetURL)
    def fetch(self, path, *args, **kwargs):
        parse = kwargs.pop("parse", lambda r: r["results"][0])
        kwargs["api-key"] = self.apikey

        if not path.lower().startswith(self.BASE_URI):
            url = self.BASE_URI + "%s.json?" % path
            url = (url % args) + urllib.urlencode(kwargs)
        else:
            url = path + "?" + urllib.urlencode(kwargs)

        resp, content = self.http.request(url)
        if not resp.status in (200, 304):
            content = json.loads(content)
            errors = "; ".join(e["error"] for e in content["errors"])
            if resp.status == 404:
                raise NytNotFoundError(errors)
            else:
                raise NytCongressError(errors)

        result = json.loads(content)

        if callable(parse):
            result = parse(result)
            if DEBUG:
                result["_url"] = url
        return result
Example #29
0
    def request(self, url, method="GET", parameters=None):
        request_url = self.server_url + "/v" + self.api_version + url
        req_file = None
        parameters["token"] = self.api_token
        if method == "GET":
            opener = urllib2.build_opener()
            req_file = opener.open(request_url + "?" + urllib.urlencode(parameters))
        else:
            # check if there is an attachment
            if "attachment" in parameters and poster_loaded:
                opener = poster.streaminghttp.register_openers()
                datagen, headers = poster.encode.multipart_encode(parameters)
                req = urllib2.Request(request_url, datagen, headers)
            else:
                req = urllib2.Request(request_url, urllib.urlencode(parameters))
            req_file = urllib2.urlopen(req)

        # parse response
        try:
            json_response = str(req_file.read())
            response = json.loads(json_response)
            if req_file.code >= 400:
                raise TorApiException(response["error"])
        finally:
            req_file.close()

        # return
        return response
Example #30
0
def get_url(url, post={}, get={}):
    post_params = urllib.urlencode(post)
    get_params = urllib.urlencode(get)
    if get:
        get_params = "?" + get_params

    # add post, get data and headers
    url = "%s%s" % (url, get_params)
    if post_params:
        request = urllib2.Request(url, post_params)
    else:
        request = urllib2.Request(url)

    # for discogs
    request.add_header("Accept-Encoding", "gzip")
    request.add_header("User-Agent", USER_AGENT)

    url_sock = urllib2.urlopen(request)
    enc = get_encoding_from_socket(url_sock)

    # unzip the response if needed
    data = url_sock.read()
    if url_sock.headers.get("content-encoding", "") == "gzip":
        data = gzip.GzipFile(fileobj=StringIO(data)).read()
    url_sock.close()

    return data, enc