Example #1
0
def get_image(xmin, ymin, xsize, ysize, width, height):
  params = urllib.urlencode({
      'xmin': xmin,
      'ymin': ymin,
      'xsize': xsize,
      'ysize': ysize,
      'width': width,
      'height': height,
  })
  for i in range(3): # Retries
    instance_id = hash(params) % NUM_BACKENDS
    url = urlparse.urljoin(backends.get_url('renderer', instance=instance_id),
                           '/backend/render_tile?%s' % params)
    rpc = urlfetch.create_rpc(deadline=10.0)
    urlfetch.make_fetch_call(rpc, url)
    try:
      response = yield rpc
      if response.status_code not in (500, 0):
        break
    except (apiproxy_errors.DeadlineExceededError,
            urlfetch.DeadlineExceededError):
      pass
    logging.warn("Backend failed to render tile; retrying")
    # Wait a little before retrying
    time.sleep(0.2)
  assert response.status_code == 200, \
      "Expected status 200, got %s" % response.status_code
  raise tasklets.Return(
      response.content,
      int(response.headers['X-Operation-Cost']))
Example #2
0
    def _foke_http_post(self, dry_run_fail_rate):
        """foke http request from GCS foke server

    Args:
      dry_run_fail_rate: float, True/False rate 0.0 ~ 1.0

    Returns:
      True or False depend by fail rate
    """

        rpc = urlfetch.create_rpc()
        urlfetch.make_fetch_call(rpc,
                                 url='http://104.154.53.75',
                                 method=urlfetch.POST)

        try:
            result = rpc.get_result()
            r = true_false_pick(dry_run_fail_rate)
            if r:
                return 200, result.content

            else:
                return 400, '{"message": "fake fail"}'

        except urlfetch.DownloadError, e:
            return 400, e.message
Example #3
0
def track_event(event, properties=None):
    """
		A simple function for asynchronously logging to the mixpanel.com API on App Engine 
		(Python) using RPC URL Fetch object.
		@param event: The overall event/category you would like to log this data under
		@param properties: A dictionary of key-value pairs that describe the event
		See http://mixpanel.com/api/ for further detail. 
		@return Instance of RPC Object
	"""
    if properties == None:
        properties = {}
    if 'token' not in properties:
        properties['token'] = token

    params = {"event": event, "properties": properties}

    logging.info(params)

    data = base64.b64encode(json.dumps(params))
    request = "http://api.mixpanel.com/track/?data=" + data

    rpc = urlfetch.create_rpc()
    urlfetch.make_fetch_call(rpc, request)

    return rpc
Example #4
0
def track(event, properties=None):
    """
        A simple function for asynchronously logging to the mixpanel.com API on App Engine 
        (Python) using RPC URL Fetch object.
        @param event: The overall event/category you would like to log this data under
        @param properties: A dictionary of key-value pairs that describe the event
        See http://mixpanel.com/api/ for further detail. 
        @return Instance of RPC Object
        
        # Example usage:
        track("invite-friends",
             {"method": "email", "number-friends": "12", "ip": "123.123.123.123"})
    """
    if properties == None:
        properties = {}
    token = "'0ea4f90f7b8157d6dec15b1b26c39b38"
    if "token" not in properties:
        properties["token"] = token

    params = {"event": event, "properties": properties}

    data = base64.b64encode(json.dumps(params))
    request = "http://api.mixpanel.com/track/?data=" + data

    rpc = urlfetch.create_rpc()
    urlfetch.make_fetch_call(rpc, request)

    return rpc
Example #5
0
    def urlfetch(
        self,
        url,
        payload=None,
        method="GET",
        headers={},
        allow_truncated=False,
        follow_redirects=True,
        validate_certificate=None,
        deadline=None,
        callback=None,
    ):
        from google.appengine.api import urlfetch

        rpc = urlfetch.create_rpc(deadline=deadline, callback=callback)
        urlfetch.make_fetch_call(
            rpc,
            url,
            payload=payload,
            method=method,
            headers=headers,
            allow_truncated=allow_truncated,
            follow_redirects=follow_redirects,
            validate_certificate=validate_certificate,
        )
        result = yield rpc
        raise tasklets.Return(result)
 def query_by_bounds(self, query_list):
     # http://api.yelp.com/v2/search?term=food&bounds=37.900000,-122.500000|37.788022,-122.399797&limit=3
     # sw_lat, sw_lon, ne_lat, ne_lon, offset, term, limit        
     url_list = []
     
     for para in query_list:
         sw_lat, sw_lon, ne_lat, ne_lon, offset, term, limit = para
         url_params = {
             'sort': 2,
             'term': term.replace(' ', '+'),
             'bounds': "{},{}|{},{}".format(sw_lat, sw_lon, ne_lat, ne_lon),
             'limit': limit,
             'offset': offset,            
         }
         url_list.append(self.sign_url(API_HOST, SEARCH_PATH, url_params))
     
     rpc_list = []
     
     for url in url_list:
         rpc = urlfetch.create_rpc(deadline=10)
         urlfetch.make_fetch_call(rpc, url)
         rpc_list.append(rpc)
         
     for rpc in rpc_list:
         rpc.wait()
     
     response = []
     
     for rpc in rpc_list:
         result = rpc.get_result()
         if result.status_code == 200:
             response.append(result.content)
                         
     return response
Example #7
0
def RequestName(key):
    URL = "%s/Key2Name/" % AlarmUrl()
    logging.info('Key request send for %s to URL %s' % (key, URL))
    rpc = urlfetch.create_rpc()
    message = key
    # send the request to an SL object
    urlfetch.make_fetch_call(rpc, URL, payload=message, method="POST")
Example #8
0
def unsubscribe(email):
    """Unsubscribes an existing user from the list
    """
    json_payload = {
        "status": "unsubscribed"
    }
    headers = {
        "Authorization": "Basic %s" % base64.b64encode("username:"******"Content-Type": "application/x-www-form-urlencoded"
    }
    # Subscribers are stored as endpoints using MD5 hashing on lowercase emails
    formatted_email = email.lower()
    member_code = hashlib.md5(formatted_email).hexdigest()

    url = API_ROOT + 'lists/' + config.mailchimp_list_id + '/members/' + member_code
    rpc = urlfetch.create_rpc()
    urlfetch.make_fetch_call(
        rpc, url=url,
        payload=json.dumps(json_payload),
        method=urlfetch.PATCH,
        headers=headers)
    try:
        result = rpc.get_result()
        if result.status_code == 400:
            logging.warning(result.content)
        elif result.status_code == 200:
            logging.info('User unsubscribed to Mailchimp: {}'.format(email))
    except urlfetch.DownloadError:
        # Request timed out or failed.
        logging.error('Mailchimp list unsubscribe failed.')
        result = None
    return result
Example #9
0
File: utils.py Project: kaste/ndb-x
def urlfetch(url,
             payload=None,
             method='GET',
             headers={},
             allow_truncated=False,
             follow_redirects=True,
             validate_certificate=None,
             deadline=None,
             callback=None):
    fut = ndb.Future()
    rpc = _urlfetch.create_rpc(deadline=deadline, callback=callback)
    _urlfetch.make_fetch_call(rpc,
                              url,
                              payload=payload,
                              method=method,
                              headers=headers,
                              allow_truncated=allow_truncated,
                              follow_redirects=follow_redirects,
                              validate_certificate=validate_certificate)

    def _on_completion():
        try:
            result = rpc.get_result()
        except Exception, err:
            _, _, tb = sys.exc_info()
            fut.set_exception(err, tb)
        else:
Example #10
0
    def _fetch_scores(self):
        rpc = urlfetch.create_rpc()
        scores = {}
        urlfetch.make_fetch_call(rpc, 'http://www.nfl.com/liveupdate/scorestrip/scorestrip.json')

        self._check_db(2012, 2)

        try:
            result = rpc.get_result()
            if result.status_code == 200:
                counter = 100
                length = 0
                text = result.content

                while length != text.__len__():
                    length = text.__len__()
                    #text = text.replace(',,', ',')
                    text = text.replace(',,', ',0,')

                    # Prevent infinite loops
                    if counter != 0:
                        counter -= 1
                    else:
                        break

                scores = text
        except urlfetch.DownloadError:
            scores = {"Error": "An unexpected error occured"}

        return scores
Example #11
0
    def _fetch_async(self, urls):
        result = {}
        rpcs = []

        def handle_result(rpc):
            content = rpc.get_result()
            log.info(dir(content))
            log.info(content.__dict__)
            log.info(content.content)
            content = json.loads(content.content)
            result.update(content.items())

        def create_callback(rpc):
            return lambda: handle_result(rpc)

        for a_scope in [self._OAUTH2_USERPROFILE_URL]:
            url = url_concat(a_scope, access_token=self.access_token)
            log.info(url)
            rpc = urlfetch.create_rpc()
            rpc.callback = create_callback(rpc)
            urlfetch.make_fetch_call(rpc, url)
            rpcs.append(rpc)

        for rpc in rpcs:
            rpc.wait()

        return result
def main():
    """
  This method makes some queries against the own application api, during which the Memcache is hopefully going to be filled again.
  This should be called by a cronjob every 3 minutes ...
  """

    # Fetch API Locations with Genres
    genre_dict = utils.genres
    genre_dict.update({'all': ''})
    counter = -1
    queries = []
    for genre in genre_dict.keys():
        counter += 1
        queries.insert(counter, urlfetch.create_rpc())
        urlfetch.make_fetch_call(
            queries[counter], settings.APPLICATION_URL +
            "/api/locations/maxtracks?genre=" + genre)
        for i in range(0, 20):
            counter += 1
            #logging.info("Genre %s Counter %i i %i" % (genre, counter, i))
            queries.insert(counter, urlfetch.create_rpc())
            urlfetch.make_fetch_call(
                queries[counter], settings.APPLICATION_URL +
                "/api/locations/?&limit=10&offset=%i&genre=%s" % (i, genre))

    # Fetch top Cities
    # TODO

    counter2 = 0
    for query in queries:
        #logging.info("waiting for counter "+str(counter2))
        counter2 += 1
        query.wait()
def make_fetch(url, data=None, headers=None, method=None):
    try:
        from google.appengine.api import urlfetch
    except ImportError:
        raise EnvironmentError('The App Engine APIs are not available.')

    urlfetch.set_default_fetch_deadline(60)
    rpc = urlfetch.create_rpc()
    if method == "POST":
        urlfetch.make_fetch_call(rpc, url, method="POST",
                                 headers=headers, payload=data,
                                 validate_certificate=True)
    elif method == "GET":
        urlfetch.make_fetch_call(rpc, url)

    try:
        result = rpc.get_result()
        if result.status_code == 200:
            logging.info("Successful urlfetch for {}".format(method))
            text = result.content
            return text
        else:
            logging.error('Returned status code {}'.format(result.status_code))
            logging.error(result)

    except urlfetch.DownloadError as e:
        logging.error(str(e))

    except urlfetch.Error as e:
        logging.error(str(e))
Example #14
0
def send(template_data={}, **kwargs):
    # Determine if message should send
    if util.is_development() and not config.should_deliver_smtp_dev:
        logging.info('\n\n---------------------------------------------------')
        logging.info('Email not sent, config.should_deliver_smtp_dev False.')
        logging.info('Email params:')
        logging.info(kwargs)
        logging.info(template_data)
        if kwargs['template'] == 'authorship_email.html':
            logging.info(
                '/{entity_type}/{entity_id}/accept_authorship?uid={to_uid}&token={token}'
                .format(**template_data)
            )
        logging.info('\n---------------------------------------------------\n')
        return

    subject = render(kwargs['subject'], **template_data)

    # Determine if using html string or a template
    body = ''
    if 'body' in kwargs:
        body = render(kwargs['body'], **template_data)
    elif 'template' in kwargs:
        body = render_template(kwargs['template'], **template_data)

    # JSON for Mandrill HTTP POST request
    sv = ndb.Key('SecretValue', 'mandrill_api_key').get()
    api_key = getattr(sv, 'value', config.mandrill_api_key)
    if not util.is_development() and api_key is config.mandrill_api_key:
        logging.error("No mandrill api_key set in production!")
    json_mandrill = {
        "key": api_key,
        "message": {
            "html": body,
            "subject": subject,
            "from_email": config.from_server_email_address,
            "from_name": "BELE Library",
            "inline_css": True,
            "to": format_to_address(kwargs['to_address'])
        }
    }

    # URL for Mandrill HTTP POST request
    url = "https://mandrillapp.com/api/1.0/messages/send.json"
    rpc = urlfetch.create_rpc()
    urlfetch.make_fetch_call(rpc, url=url,
        payload=json.dumps(json_mandrill),
        method=urlfetch.POST,
        headers={'Content-Type': 'application/x-www-form-urlencoded'})
    try:
        result = rpc.get_result()
        logging.info(u"...{}".format(result.status_code))
        logging.info(result.content)
        if result.status_code == 200:
            text = result.content
    except urlfetch.DownloadError:
        # Request timed out or failed.
        logging.error('Email failed to send.')
        result = None
    return result
Example #15
0
    def send(self):
        """ Attempt to send the WebhookRequest.

        Returns:
            NotificationResponse: content/status_code
        """
        # Build the request
        headers = {
            'Content-Type': 'application/json',
            'X-TBA-Version': '{}'.format(WEBHOOK_VERSION)
        }
        message_json = self.json_string()
        # Generate checksum
        headers['X-TBA-Checksum'] = self._generate_webhook_checksum(
            message_json)

        from google.appengine.api import urlfetch
        rpc = urlfetch.create_rpc()

        from tbans.models.requests.notifications.notification_response import NotificationResponse
        try:
            urlfetch.make_fetch_call(rpc,
                                     self.url,
                                     payload=message_json,
                                     method=urlfetch.POST,
                                     headers=headers)
            return NotificationResponse(200, None)
        except Exception, e:
            # https://cloud.google.com/appengine/docs/standard/python/refdocs/google.appengine.api.urlfetch
            return NotificationResponse(500, str(e))
    def get(self):
        apiKey = "e9554b45d938eba3af70e453dbfbc3c2"
        units = self.request.get("units")
        lat = self.request.get("lat", '0')
        lng = self.request.get("lng", '0')

        url = "https://api.forecast.io/forecast/" + apiKey + "/" + lat + "," + lng + "/?"
        if units == "si":
            url += "&units=si"
        elif units == "us":
            url += "&units=us"
        else:
            print "EzeServer: Unit not recognized"

        self.response.headers.add_header("Access-Control-Allow-Origin", "*")
        self.response.headers['Content-Type'] = 'application/json'

        rpc = urlfetch.create_rpc()
        urlfetch.make_fetch_call(rpc, url)

        try:
            result = rpc.get_result()
            if result.status_code == 200:
                weatherForecastText = result.content
                weatherForecastJson = json.loads(weatherForecastText)
                self.response.write(weatherForecastJson)
                                    
        except urlfetch.DownloadError:
            print "Download Error"
Example #17
0
 def get_result(self):
     #生成器,逐个返回成功的结果,失败的不再返回,内部记录logs
     while self.actives:
         rpc, url, args = self.actives.pop(0)
         success = False
         try:
             result = rpc.get_result()
         except apiproxy_errors.DeadlineExceededError:
             self.log.warn('async fetch timeout:%s' % url)
             continue
         except urlfetch.DownloadError as e:
             self.log.warn("async fetch error(%s):%s" % (str(e), url))
             continue  #仅记录log,不用返回错误信息给上级
         except Exception as e:
             self.log.warn('%s:%s' % (type(e), url))
             continue
         else:
             success = True
         finally:  #启动一个新的异步请求
             if self.pendings:
                 newrpc, newurl, newargs = self.pendings.pop(0)
                 self.actives.append((newrpc, newurl, newargs))
                 urlfetch.make_fetch_call(newrpc,
                                          newurl,
                                          allow_truncated=False,
                                          follow_redirects=True,
                                          validate_certificate=False)
         if success:
             yield result, url, args
Example #18
0
def alert_match_closed_async(match):
    """
    Send a message to slack when a match is closed
    :param match: the match that was closed
    """
    if settings.IS_DEV_APPSERVER:
        # Don't post to slack if on local environment
        raise ndb.Return(None)

    headers = {
        'Content-type': 'application/json',
    }
    payload = json.dumps(get_message_data(match))

    rpc = urlfetch.create_rpc()
    urlfetch.make_fetch_call(
        rpc,
        'https://hooks.slack.com/services/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',
        payload=payload,
        method=urlfetch.POST,
        headers=headers
    )
    try:
        result = yield rpc
    except Exception as e:
        # not a huge deal if slack isn't notified, just don't bring down the rest of the request
        logging.error(e.message)
        result = None
    raise ndb.Return(result)
Example #19
0
    def _https_connection_gae(self,
                              method,
                              relative_url,
                              query_dict,
                              body=None):
        from google.appengine.api import urlfetch
        if (method == "GET"):
            method = urlfetch.GET
        elif (method == "POST"):
            method = urlfetch.POST
        elif (method == "PUT"):
            method = urlfetch.PUT
        elif (method == "DELETE"):
            method = urlfetch.DELETE

        header = self._create_oauth_header(query_dict)
        headers = {'Authorization': header}
        if (body):
            headers["Content-Type"] = "text/xml"

        url = self._get_url(relative_url)

        rpc = urlfetch.create_rpc(deadline=10.0)
        urlfetch.make_fetch_call(rpc,
                                 url,
                                 method=method,
                                 headers=headers,
                                 payload=body)

        return rpc.get_result().content
Example #20
0
    def get(self):
        obj = {'googlevideos':[]}
        url = "http://search.lib.virginia.edu/catalog.json?f%5Bformat_facet%5D%5B%5D=Online&f%5Bformat_facet%5D%5B%5D=Video&f%5Bformat_facet%5D%5B%5D=Streaming+Video&facet.limit=500"
        resp = urlfetch.fetch(url,deadline=60).content
        catlist = json.loads(resp)['facet_counts']['facet_fields']['digital_collection_facet']
        categories = dict(zip(catlist[0::2], catlist[1::2]))
        temp = {}

        def handle_result(rpc):
            result = rpc.get_result()
            resultset = json.loads(result.content)['response']['docs']
            for doc in resultset:
                for coll in doc['digital_collection_facet']:
                    if coll in temp:
                        kalturaurl = doc['url_display'][0]
                        kalturainfo = {}
                        if ('uiconf' in kalturaurl):
                            m = re.search(".*/wid/_(.*)/uiconf_id/(.*)/entry_id/(.*)", kalturaurl)
                            kalturainfo = {'wid':m.group(1),'uiconfid':m.group(2),'entryid':m.group(3),
                                           'card':'http://cdn.kaltura.com/p/0/thumbnail/entry_id/' + m.group(3) + '/width/80/height/80/type/1/quality/72'}
                        else:
                            m = re.search(".*/wid/_(.*)/entry_id/(.*)\|\|.*", kalturaurl)
                            kalturainfo = {'wid':m.group(1),'entryid':m.group(2),
                                           'card':'http://cdn.kaltura.com/p/0/thumbnail/entry_id/' + m.group(2) + '/width/80/height/80/type/1/quality/72'}
                        item = {'description':doc['date_coverage_display'][0],
                                'sources':['foo.mp4'],
                                'background':'bg.jpg',
                                'title':doc['title_display'][0],
                                'studio':doc['source_facet'][0]}
                        item.update(kalturainfo)
                        temp[coll]['videos'].append(item)

        # Use a helper function to define the scope of the callback.
        def create_callback(rpc):
            return lambda: handle_result(rpc)

        rpcs = []
        #for url in urls:
        for cat, count in categories.iteritems():
            temp[cat] = {"category":cat,"videos":[]}
            rpc = urlfetch.create_rpc()
            rpc.callback = create_callback(rpc)
            url = "http://search.lib.virginia.edu/catalog.json?f%5Bdigital_collection_facet%5D%5B%5D=" 
            url += cat.replace(' ','+')  
            url += "&f%5Bformat_facet%5D%5B%5D=Online&f%5Bformat_facet%5D%5B%5D=Video&f%5Bformat_facet%5D%5B%5D=Streaming+Video&facet.limit=500&search_field=keyword&sort=score+desc%2C+year_multisort_i+desc"
            urlfetch.make_fetch_call(rpc, url)
            rpcs.append(rpc)

        # ...

        # Finish all RPCs, and let callbacks process the results.
        for rpc in rpcs:
            rpc.wait()

        for cat,item in temp.iteritems():
            obj['googlevideos'].append(item)
        #http://search.lib.virginia.edu/catalog.json?f%5Bformat_facet%5D%5B%5D=Online&f%5Bformat_facet%5D%5B%5D=Video&f%5Bformat_facet%5D%5B%5D=Streaming+Video&facet.limit=500
        out = json.dumps(obj)
        #http://search.lib.virginia.edu/catalog.json?f%5Bformat_facet%5D%5B%5D=Online&f%5Bformat_facet%5D%5B%5D=Video&f%5Bformat_facet%5D%5B%5D=Streaming+Video&facet.limit=500
        self.response.out.write(out)
Example #21
0
def _StartBackendSearchCall(mr,
                            query_project_names,
                            shard_key,
                            invalidation_timestep,
                            deadline=None,
                            failfast=True):
    """Ask a backend to query one shard of the database."""
    shard_id, subquery = shard_key
    backend_host = modules.get_hostname(module='besearch')
    url = 'http://%s%s' % (backend_host,
                           framework_helpers.FormatURL(
                               mr,
                               urls.BACKEND_SEARCH,
                               projects=','.join(query_project_names),
                               q=subquery,
                               start=0,
                               num=mr.start + mr.num,
                               logged_in_user_id=mr.auth.user_id or 0,
                               me_user_id=mr.me_user_id,
                               shard_id=shard_id,
                               invalidation_timestep=invalidation_timestep))
    logging.info('\n\nCalling backend: %s', url)
    rpc = urlfetch.create_rpc(deadline=deadline or settings.backend_deadline)
    headers = _MakeBackendRequestHeaders(failfast)
    # follow_redirects=False is needed to avoid a login screen on googleplex.
    urlfetch.make_fetch_call(rpc, url, follow_redirects=False, headers=headers)
    return rpc
Example #22
0
    def get(self):
        rpc = urlfetch.create_rpc(deadline=3)
        rpc.callback = self._create_callback(rpc)
        urlfetch.make_fetch_call(rpc, FEED_URL)

        rpc.wait()
        self.response.out.write('fetch blogs')
Example #23
0
    def send(self, msg):

        rpc = urlfetch.create_rpc()
        urlfetch.make_fetch_call(rpc,
                                 self.addr,
                                 payload=msg,
                                 method=urlfetch.PUT)
Example #24
0
def doAsyncFetch():
  rpc = urlfetch.create_rpc(deadline=120) # 1 second deadline. 
  urlfetch.make_fetch_call(rpc, 'http://fake-response.appspot.com/?sleep=110')
  logging.info('doing nothing')
  time.sleep(90)
  result = rpc.get_result() # Throws error here. 
  logging.info(result.status_code)
Example #25
0
    def get(self):

        # Prepare RPC.
        #
        # We set the callback attribute of the RPC object after the RPC object
        # has been created, so we can pass the RPC object to
        # create_callback().
        rpc = urlfetch.create_rpc()
        rpc.callback = create_callback(self, rpc)
        urlfetch.make_fetch_call(rpc,
                                 'http://ae-book.appspot.com/blog/atom.xml')

        # Do other things.
        time.sleep(2)

        # Tell RPCs to finish up.
        rpc.wait()

        self.response.write('''
        <p>Try these:</p>
        <ul>
          <li><a href="/">a simple async call</a></li>
          <li><a href="/callbackobj">using a callback object</a></li>
          <li><a href="/callbackfunc">using a callback function</a></li>
        </ul>
        ''')

        self.response.write('<p>The time is: %s</p>' %
                            str(datetime.datetime.now()))
Example #26
0
    def upload_images(self):

        if self._storage_token is None:
            self._storage_token = self._refresh_token(self.storage_scope)

        logger.debug("Token = {}".format(self.storage_scope))

        rpc = create_rpc(deadline=300)  # TODO Change this to acceptable delay

        url = "https://www.googleapis.com/upload/storage/v1/b/{bucket}" \
              "/o?uploadType=media&name={name}".format(
                    bucket=self.storage_bucket,
                    name=self.storage_images + "/{}.jpeg".format(self.file_name)
                )

        make_fetch_call(rpc,
                        url,
                        method=POST,
                        headers={
                            "Content-Type":
                            "image/jpeg",
                            "Authorization":
                            self.token.format(access_token=self._storage_token)
                        },
                        payload=b64decode(self.image))

        return rpc
Example #27
0
  def make_async_request(self, url, token="", secret="", additional_params=None,
                         protected=False, method=urlfetch.GET, headers={}):
    """Make Request.

    Make an authenticated request to any OAuth protected resource.

    If protected is equal to True, the Authorization: OAuth header will be set.

    A urlfetch response object is returned.
    """

    payload = self.prepare_request(url, token, secret, additional_params,
                                   method)

    if method == urlfetch.GET:
      url = "%s?%s" % (url, payload)
      payload = None

    if protected:
      headers["Authorization"] = "OAuth"

    rpc = urlfetch.create_rpc(deadline=10.0)
    urlfetch.make_fetch_call(rpc, url, method=method, headers=headers,
                             payload=payload)
    return rpc
Example #28
0
    def upload_input_html(self):
        if self.html_input is None:
            logger.info("No input to HTML text found")
            return

        if self._storage_token is None:
            self._storage_token = self._refresh_token(self.storage_scope)

        logger.debug("Token = {}".format(self._storage_token))
        rpc = create_rpc(deadline=300)  # TODO Change this to acceptable delay

        url = "https://www.googleapis.com/upload/storage/v1/b/{bucket}" \
              "/o?uploadType=media&name={name}".format(
                bucket=self.storage_bucket,
                name=self.storage_html_input + "/{}.json".format(
                    self.file_name)
                )

        logger.info("make_fetch_all started...")
        make_fetch_call(rpc,
                        url,
                        method=POST,
                        headers={
                            "Content-Type":
                            "application/json",
                            "Authorization":
                            self.token.format(access_token=self._storage_token)
                        },
                        payload=dumps(self.html_input))
        return rpc
Example #29
0
    def _fetch_scores(self):
        response = {}
        result = {}
        rpc = urlfetch.create_rpc()

        urlfetch.make_fetch_call(rpc, URL_SCOREBOARD)
        try:
            response = rpc.get_result()
            if response.status_code == HTTP_OK:
                counter = 100
                length = 0
                text = response.content

                while length != text.__len__():
                    length = text.__len__()
                    text = text.replace(',,', ',0,')

                    # Prevent infinite loops
                    if counter != 0:
                        counter -= 1
                    else:
                        break

                result = text
            else:
                result = {'status_code': response.status_code}
        except urlfetch.DownloadError:
            result = {'Error': 'An unexpected error occurred.'}

        return result
Example #30
0
def send_message(user, message, async_rpc=None):
  url = "https://www.imified.com/api/bot/"
  
  form_fields = {
    "botkey": config['imbot_key'],    # Your bot key goes here.
    "apimethod": "send",  # the API method to call.
    "userkey": user.userkey,  # User Key to lookup with getuser.
    "msg" : message, #the message
  }
  
  base64string = base64.encodestring('%s:%s' % (config['imbot_username'], config['imbot_password']))[:-1]
  authString = 'Basic %s' % base64string
  
  form_data = urllib.urlencode(form_fields)
  
  if async_rpc:
    urlfetch.make_fetch_call(async_rpc, url=url, payload=form_data, method=urlfetch.POST, headers={'AUTHORIZATION' : authString})
  else:
    response = urlfetch.fetch(url=url, payload=form_data, method=urlfetch.POST, headers={'AUTHORIZATION' : authString}) 
    if response.status_code == 200:
      # all good
      logging.debug('IM send response (for message "%s") content: %s' % (message, response.content))
      return True
    else:
      logging.error('There was an error sending IM. status code: %s' % response.status_code)
      #self.response.out.write(response.headers)
      return False
Example #31
0
    def get(self):

        # Prepare RPC.
        #
        # We set the callback attribute of the RPC object after the RPC object
        # has been created, so we can pass the RPC object to
        # create_callback().
        rpc = urlfetch.create_rpc()
        rpc.callback = create_callback(self, rpc)
        urlfetch.make_fetch_call(rpc, "http://ae-book.appspot.com/blog/atom.xml")

        # Do other things.
        time.sleep(2)

        # Tell RPCs to finish up.
        rpc.wait()

        self.response.write(
            """
        <p>Try these:</p>
        <ul>
          <li><a href="/">a simple async call</a></li>
          <li><a href="/callbackobj">using a callback object</a></li>
          <li><a href="/callbackfunc">using a callback function</a></li>
        </ul>
        """
        )

        self.response.write("<p>The time is: %s</p>" % str(datetime.datetime.now()))
Example #32
0
    def load(self, realm, frealm, toonlist, data, groupstats):

        classes = ClassEntry.get_mapping()
        oauth_headers = get_oauth_headers()

        # Request all of the toon data from the blizzard API and determine the
        # group's ilvls, armor type counts and token type counts.  subs are not
        # included in the counts, since they're not really part of the main
        # group. The Blizzard API has a limit of 100 calls per second. Keep a
        # count and if we hit 100 calls, we'll wait a half second before
        # continuing. If someone has more than 100 toons in their list, they
        # should be slapped.
        toon_count = 0
        for toon in toonlist:
            toonname = toon.name
            toonrealm = toon.realm
            if toonrealm == realm:
                toonfrealm = frealm
            else:
                toonfrealm = Realm.query_realm(toonrealm)

            # TODO: this object can probably be a class instead of another dict
            newdata = dict()
            data.append(newdata)

            # a realm is received in the json data from the API, but we need to
            # pass the normalized value to the next stages.  ignore this field
            # from the data.
            newdata['toonrealm'] = toonrealm
            newdata['toonfrealm'] = toonfrealm
            newdata['status'] = toon.status
            newdata['role'] = toon.role

            url = 'https://us.api.blizzard.com/profile/wow/character/%s/%s?namespace=profile-us&locale=en_US' % (
                toonrealm, urllib.quote(toonname.encode('utf-8').lower()))

            # create the rpc object for the fetch method.  the deadline
            # defaults to 5 seconds, but that seems to be too short for the
            # Blizzard API site sometimes.  setting it to 10 helps a little
            # but it makes page loads a little slower.
            rpc = urlfetch.create_rpc(10)
            rpc.callback = self.create_callback(rpc, toonname, newdata,
                                                groupstats, classes)
            urlfetch.make_fetch_call(rpc, url, headers=oauth_headers)
            newdata['rpc'] = rpc

            toon_count = toon_count + 1
            if toon_count > 100:
                time.sleep(0.5)
                toon_count = 0

        # Now that all of the RPC calls have been created, loop through the data
        # dictionary one more time and wait for each fetch to be completed. Once
        # all of the waits finish, then we have all of the data from the
        # Blizzard API and can loop through all of it and build the page.
        start = time.time()
        for entry in data:
            entry['rpc'].wait()
        end = time.time()
        logging.info("Time spent retrieving data: %f seconds" % (end - start))
Example #33
0
def check_sites(site_configurations):

    # Query all the sites we've been given to check
    request_rpc_futures = []
    for site in site_configurations:
        rpc = urlfetch.create_rpc()
        urlfetch.make_fetch_call(rpc, site.target)
        request_rpc_futures.append((
            site,
            rpc,
        ))

    # Process the results and write out the status update
    status_futures = []
    for site, rpc in site_configurations:
        status = SiteCheck(parent=site.key(), status=SiteStatus.ONLINE)

        try:
            result = rpc.get_result()

            if result.status_code != 200:
                status.status = SiteStatus.ERROR

            if result.content != "":
                status.status = SiteStatus.UNEXPECTED

        except urlfetch.DownloadError:
            status.status = SiteStatus.OFFLINE

        if not SiteStatus.available(status.status):
            pass  # TODO Send an alert if required

        status_futures.append(status.put_async())

    ndb.Future.wait_all(status_futures)
Example #34
0
def track(event, properties=None):
    """
        A simple function for asynchronously logging to the mixpanel.com API on App Engine 
        (Python) using RPC URL Fetch object.
        @param event: The overall event/category you would like to log this data under
        @param properties: A dictionary of key-value pairs that describe the event
        See http://mixpanel.com/api/ for further detail. 
        @return Instance of RPC Object
        
        # Example usage:
        track("invite-friends",
             {"method": "email", "number-friends": "12", "ip": "123.123.123.123"})
    """
    if properties == None:
        properties = {}
    token = "'0ea4f90f7b8157d6dec15b1b26c39b38"
    if "token" not in properties:
        properties["token"] = token
    
    params = {"event": event, "properties": properties}
        
    data = base64.b64encode(json.dumps(params))
    request = "http://api.mixpanel.com/track/?data=" + data
    
    rpc = urlfetch.create_rpc()
    urlfetch.make_fetch_call(rpc, request)
    
    return rpc
Example #35
0
    def __init__(self):
        rpc = urlfetch.create_rpc(deadline=60)
        urlfetch.make_fetch_call(rpc, self.url)

        rpcs = []

        try:
            result = rpc.get_result()
            if result.status_code == 200:
                content = EncodingHelper.getEncodedContent(result)
                soup = BeautifulSoup(content)

                events = soup.find(id='hp-articles').findChildren('a')

                for event in events:
                    structuredEvent = {}
                    structuredEvent['source'] = self.url
                    structuredEvent['url'] = self.url+event.get('href')
                    structuredEvent['title'] = event.findChild('h2').string
                    structuredEvent['img'] = event.findChild('img').get('src')
                    structuredEvent['place'] = event.findChild('div', attrs={"class": "hp-article-title"}).string
                    self.structuredEvents.append(structuredEvent)

                    innerRpc = urlfetch.create_rpc(deadline=60)
                    innerRpc.callback = self.create_callback(innerRpc)
                    urlfetch.make_fetch_call(innerRpc, structuredEvent['url'], follow_redirects=False)
                    rpcs.append(innerRpc)

        except urlfetch.DownloadError:
            self.response.write("chyba stahovani")

        for irpc in rpcs:
            irpc.wait()
    def post(self):
        def handle_result(rpc, id):
            result = rpc.get_result()
            if result.status_code == 200:
                update_station(id, result.content)
            elif result.status_code == 403:
                logging.error('403 fetching station')
                mail.send_mail("bug@" + app_identity.get_application_id() + ".appspotmail.com",
                               to="*****@*****.**",
                               subject="Access denied",
                               body="Access denied for app " + app_identity.get_application_id())
            else:
                logging.error(str(result.status_code) + ' fetching station')
                logging.error('Unable to reach webservice ' 
                              + str(result.status_code) 
                              + ' for content : ' 
                              + result.content 
                              + ' for station ' 
                              + id)

	# Use a helper function to define the scope of the callback.
        def create_callback(rpc, id):
            return lambda: handle_result(rpc, id)

	def update_station(id, content):
            soup = BeautifulStoneSoup(content)
            #try:
            parsed_station = soup.station
            to_update = stations[int(id)]
            to_update.availableBikes = int(parsed_station.available.string)
            to_update.freeSlots = int(parsed_station.free.string)
            to_update.payment = bool(int(parsed_station.ticket.string))   
            #except:
            #   logging.error('error parsing station with content ' + content)
            #  mail.send_mail("bug@" + app_identity.get_application_id() + ".appspotmail.com",
            #                to="*****@*****.**",
            #               subject="Parsing Error",
            #              body='Error while parsing ' + id + ' with content ' + content)

        url = self.request.get('update_url')
        update_ids = [id for id in self.request.get('update_ids').split('-')]
	stations = get_stations()
        #Should not append as we check before launching update
        if stations is None:
            return
	rpcs = []
        try:
            for id in update_ids:
                rpc = urlfetch.create_rpc(deadline = 10)
                rpc.callback = create_callback(rpc, id)
                urlfetch.make_fetch_call(rpc, url + '/' + id)
                rpcs.append(rpc)
            for rpc in rpcs:
                rpc.wait()
            memcache.set('stations', stations)
        except urlfetch.DownloadError:
            logging.error('Time out fetching stations')
            self.error(500)
            return
        self.response.out.write("<html><body><p>OK</p></body></html>")
Example #37
0
 def post(self):
     from google.appengine.api import urlfetch
     from google.appengine.api import memcache
     rpc = urlfetch.create_rpc()
     
     
     list_url = cgi.escape(self.request.get('url'))
     list_url = list_url.strip()
     if list_url[-1] == "/":
         list_url = list_url[:-1]
     if list_url.find("lists/") >= 0:
         list_url = list_url.replace("lists/","")
     split    = list_url.split("/")
     og       = "http://twitter.com/%s/%s" % (split[-2],split[-1])
     json_url = "http://twitter.com/%s/lists/%s/statuses.json" % (split[-2],split[-1])
     rss_url  = "http://twiterlist2rss.appspot.com/%s/lists/%s/statuses.rss" % (split[-2],split[-1])
     urlfetch.make_fetch_call(rpc, rss_url)       
     template_values = {
         "posted":True,
         "og":og,
         "json_url":json_url,
         "rss_url":rss_url           
     }
     path = os.path.join(os.path.dirname(__file__),'templates' ,'index.html')
     self.response.out.write(template.render(path, template_values))
Example #38
0
    def _fetch_scores(self):
        response = {}
        result = {}
        rpc = urlfetch.create_rpc()

        urlfetch.make_fetch_call(rpc, URL_SCOREBOARD)
        try:
            response = rpc.get_result()
            if response.status_code == HTTP_OK:
                counter = 100
                length = 0
                text = response.content

                while length != text.__len__():
                    length = text.__len__()
                    text = text.replace(',,', ',0,')
                    
                    # Prevent infinite loops
                    if counter != 0:
                        counter -= 1
                    else:
                        break

                result = text
            else:
                result = {
                    'status_code': response.status_code
                    }
        except urlfetch.DownloadError:
            result = {
                'Error': 'An unexpected error occurred.'
                }

        return result
Example #39
0
    def get_mdp_data_async(self, access_token, endpoint):
        url = u'https://www.mydigipass.com/oauth/%s' % endpoint
        logging.debug('Creating RPC item for %s', url)

        rpc = urlfetch.create_rpc(deadline=20)
        urlfetch.make_fetch_call(rpc, url, headers=dict(Authorization=u'Bearer %s' % access_token))
        return rpc
Example #40
0
def track_event(event,properties=None):
	"""
		A simple function for asynchronously logging to the mixpanel.com API on App Engine 
		(Python) using RPC URL Fetch object.
		@param event: The overall event/category you would like to log this data under
		@param properties: A dictionary of key-value pairs that describe the event
		See http://mixpanel.com/api/ for further detail. 
		@return Instance of RPC Object
	"""
	if properties == None:
		properties = {}
	if 'token' not in properties:
		properties['token'] = token
	
	params = {"event": event, "properties": properties}
	
	
	logging.info(params)
		
	data = base64.b64encode(json.dumps(params))
	request = "http://api.mixpanel.com/track/?data=" + data
	
	rpc = urlfetch.create_rpc()
	urlfetch.make_fetch_call(rpc, request)

	return rpc
Example #41
0
 def fetch_urls(cls,url_list):
   rpcs = []
   for url in url_list:
     rpc = urlfetch.create_rpc(deadline=5.0)
     urlfetch.make_fetch_call(rpc, url,method = urlfetch.HEAD)
     rpcs.append(rpc)
     
   result = {}
   while len(rpcs) > 0:
     rpc = apiproxy_stub_map.UserRPC.wait_any(rpcs)
     rpcs.remove(rpc)
     request_url = rpc.request.url()
     try:
       final_url = rpc.get_result().final_url
     except AttributeError:
       final_url = request_url
     except (DownloadError,InvalidURLError,apiproxy_errors.DeadlineExceededError):
       final_url  = None        
     except UnicodeDecodeError: #Funky url with very evil characters
       final_url = unicode(rpc.get_result().final_url,'utf-8')
       
     result[request_url] = final_url
   
   logging.info('Returning results: %s' %result)
   return result
Example #42
0
def check_failed_repos():
    """Check repository name consistency between Carto and GitHub."""
    failed_repos = []
    all_repos = get_all_repos()
    repos = {}
    headers = {
        'User-Agent': 'VertNet',  # Authenticate as VertNet
        'Accept': 'application/vnd.github.v3+json',
        'Authorization': 'token {0}'.format(apikey('gh'))
    }

    for repo in all_repos:
        orgname = repo[0]
        reponame = repo[1]

        if orgname is None or reponame is None:
            failed_repos.append(repo)
            continue

        rpc = urlfetch.create_rpc()
        url = '/'.join([ghb_url, 'repos', orgname, reponame])
#        print 'url: %s' % url
        urlfetch.set_default_fetch_deadline(URLFETCH_DEADLINE)
        urlfetch.make_fetch_call(rpc, url, headers=headers)
        repos[repo] = rpc
        result = rpc.get_result()
        content = json.loads(result.content)
        try:
            name = content['name']
        except KeyError, e:
            logging.info('GitHub repository %s not found' % url)
#            print 'KeyError: %s' % e 
            failed_repos.append((orgname,reponame))
Example #43
0
def RequestName(key):
    URL = "%s/Key2Name/" % AlarmUrl()
    logging.info('Key request send for %s to URL %s' % (key, URL))
    rpc = urlfetch.create_rpc()
    message = key
    # send the request to an SL object
    urlfetch.make_fetch_call(rpc, URL, payload=message, method="POST")
    def _track_call(self, api_action, api_details=''):
        # Creates asynchronous call
        rpc = urlfetch.create_rpc()

        analytics_id = Sitevar.get_by_id("google_analytics.id")
        if analytics_id is None:
            logging.warning("Missing sitevar: google_analytics.id. Can't track API usage.")
        else:
            GOOGLE_ANALYTICS_ID = analytics_id.contents['GOOGLE_ANALYTICS_ID']
            params = urllib.urlencode({
                'v': 1,
                'tid': GOOGLE_ANALYTICS_ID,
                'cid': '1',
                't': 'event',
                'ec': 'api',
                'ea': api_action,
                'el': api_details,
                'ev': 1,
                'ni': 1
            })

            # Sets up the call
            analytics_url = 'http://www.google-analytics.com/collect'
            urlfetch.make_fetch_call(rpc=rpc,
                url=analytics_url,
                payload=params,
                method=urlfetch.POST,
                headers={'Content-Type': 'application/x-www-form-urlencoded'})
    def send(self):
        """ Attempt to send the WebhookRequest.

        Returns:
            NotificationResponse: content/status_code
        """
        # Build the request
        headers = {
            'Content-Type': 'application/json',
            'X-TBA-Version': '{}'.format(WEBHOOK_VERSION)
        }
        message_json = self.json_string()
        # Generate checksum
        headers['X-TBA-Checksum'] = self._generate_webhook_checksum(message_json)

        from google.appengine.api import urlfetch
        rpc = urlfetch.create_rpc()

        from tbans.models.requests.notifications.notification_response import NotificationResponse
        try:
            urlfetch.make_fetch_call(rpc, self.url, payload=message_json, method=urlfetch.POST, headers=headers)
            return NotificationResponse(200, None)
        except Exception, e:
            # https://cloud.google.com/appengine/docs/standard/python/refdocs/google.appengine.api.urlfetch
            return NotificationResponse(500, str(e))
 def requestDateInfo(self):
     rpc = urlfetch.create_rpc()
     urlfetch.make_fetch_call(
         rpc,
         'http://api.timezonedb.com/v2/get-time-zone?key=%s&format=json&by=zone&zone=%s'
         % (Crenditals.TIMEZONEDB_API_KEY, self.timezone))
     return rpc
Example #47
0
 def __fetch(self, url):
     rpc = urlfetch.create_rpc()
     promise = Promise(lambda: rpc.wait())
     rpc.callback = lambda: promise.resolve(rpc.get_result())
     full_url = self.base_url + url
     urlfetch.make_fetch_call(rpc, full_url)
     return promise
Example #48
0
    def _pages(self, soup, html, page1_url):

        base_url = '/'.join(page1_url.split('/')[:-1]) + '/%s.html'

        # a <select> tag has options that each points to a page
        opts = soup.find('select', class_='m').find_all('option')
        urls = [base_url % opt['value'] for opt in opts]

        # Page 1 has already been fetched (stored in this html param, duh!)
        # so let's save ourselves an http request
        pages_htmls = [html]
        urls = urls[1:-1]  # also remove last one since it's a comments page

        rpcs = []
        for url in urls:
            rpc = urlfetch.create_rpc()
            urlfetch.make_fetch_call(rpc, url)
            rpcs.append(rpc)

        # Finish all RPCs
        for rpc in rpcs:
            result = rpc.get_result()
            if result.status_code != 200:
                # TODO: should retry instead of panicking
                raise PyError(result.content)
            pages_htmls.append(result.content)

        returns = []
        for page_html in pages_htmls:
            soup = BeautifulSoup(page_html)
            img_url = soup.find('div', id='viewer').find('img').attrs['src']
            returns.append(img_url)
        return returns
Example #49
0
    def get(self):
        # [START urlfetch-rpc-callback]
        def handle_result(rpc):
            result = rpc.get_result()
            self.response.write(result.content)
            logging.info("Handling RPC in callback: result {}".format(result))

        urls = ['http://www.google.com',
                'http://www.github.com',
                'http://www.travis-ci.org']
        rpcs = []
        for url in urls:
            rpc = urlfetch.create_rpc()
            rpc.callback = functools.partial(handle_result, rpc)
            urlfetch.make_fetch_call(rpc, url)
            rpcs.append(rpc)

        # ... do other things ...

        # Finish all RPCs, and let callbacks process the results.

        for rpc in rpcs:
            rpc.wait()

        logging.info("Done waiting for RPCs")
Example #50
0
def get_image(xmin, ymin, xsize, ysize, width, height):
    params = urllib.urlencode({
        'xmin': xmin,
        'ymin': ymin,
        'xsize': xsize,
        'ysize': ysize,
        'width': width,
        'height': height,
    })
    for i in range(3):  # Retries
        instance_id = hash(params) % NUM_BACKENDS
        url = urlparse.urljoin(
            backends.get_url('renderer', instance=instance_id),
            '/backend/render_tile?%s' % params)
        rpc = urlfetch.create_rpc(deadline=10.0)
        urlfetch.make_fetch_call(rpc, url)
        try:
            response = yield rpc
            if response.status_code not in (500, 0):
                break
        except (apiproxy_errors.DeadlineExceededError,
                urlfetch.DeadlineExceededError):
            pass
        logging.warn("Backend failed to render tile; retrying")
        # Wait a little before retrying
        time.sleep(0.2)
    assert response.status_code == 200, \
        "Expected status 200, got %s" % response.status_code
    raise tasklets.Return(response.content,
                          int(response.headers['X-Operation-Cost']))
Example #51
0
  def make_async_request(self, url, token="", secret="", additional_params=None,
                         protected=False, method=urlfetch.GET, headers={}):
    """Make Request.

    Make an authenticated request to any OAuth protected resource.

    If protected is equal to True, the Authorization: OAuth header will be set.

    A urlfetch response object is returned.
    """

    payload = self.prepare_request(url, token, secret, additional_params,
                                   method)

    if method == urlfetch.GET:
      url = "%s?%s" % (url, payload)
      payload = None

    if protected:
      headers["Authorization"] = "OAuth"

    rpc = urlfetch.create_rpc(deadline=10.0)
    urlfetch.make_fetch_call(rpc, url, method=method, headers=headers,
                             payload=payload)
    return rpc
Example #52
0
    def get_access_token(self, verifier=None):
        """
        After user has authorized the request token, get access token
        with user supplied verifier.
        """
        try:
            rpc = urlfetch.create_rpc(deadline=10)
            url = self._get_oauth_url('access_token')

            # build request
            request = oauth.OAuthRequest.from_consumer_and_token(
                self._consumer,
                token=self.request_token, http_url=url,
                verifier=str(verifier)
            )
            request.sign_request(self._sigmethod, self._consumer, self.request_token)

            # send request
            urlfetch.make_fetch_call(rpc, 
                                     url, 
                                     headers=request.to_header(), 
                                     )
            resp = rpc.get_result()
            self.access_token = oauth.OAuthToken.from_string(resp.content)
            return self.access_token
        except Exception, e:
            raise TweepError(e)
Example #53
0
 def set_up(self):
     self._rpc = urlfetch.create_rpc(deadline=40)
     urlfetch.make_fetch_call(self._rpc,
                              self._url,
                              self._post_data,
                              method=urlfetch.POST,
                              validate_certificate=True)
Example #54
0
    def get_xauth_access_token(self, username, password):
        """
        Get an access token from an username and password combination.
        In order to get this working you need to create an app at
        http://twitter.com/apps, after that send a mail to [email protected]
        and request activation of xAuth for it.
        """
        try:
            rpc = urlfetch.create_rpc(deadline=10)
            url = self._get_oauth_url('access_token', secure=True) # must use HTTPS
            request = oauth.OAuthRequest.from_consumer_and_token(
                oauth_consumer=self._consumer,
                http_method='POST', http_url=url,
                parameters = {
		            'x_auth_mode': 'client_auth',
		            'x_auth_username': username,
		            'x_auth_password': password
                }
            )
            request.sign_request(self._sigmethod, self._consumer, None)
            urlfetch.make_fetch_call(rpc, 
                                     url, 
                                     headers=request.to_header(), 
                                     )
            resp = rpc.get_result()
            self.access_token = oauth.OAuthToken.from_string(resp.content)
            return self.access_token
        except Exception, e:
            raise TweepError(e)
Example #55
0
    def make_async_request(self, url, token="", secret="", additional_params=None,
                   protected=False, method=urlfetch.GET):
        """Make Request.

        Make an authenticated request to any OAuth protected resource.

        If protected is equal to True, the Authorization: OAuth header will be set.

        A urlfetch response object is returned.
        """
        
        (scm, netloc, path, params, query, _) = urlparse.urlparse(url)
        url = None
        query_params = None
        if query:
            query_params = dict([(k,v) for k,v in parse_qsl(query)])
            additional_params.update(query_params)
        url = urlparse.urlunparse(('https', netloc, path, params, '', ''))
        
        payload = self.prepare_request(url, token, secret, additional_params, method)

        if method == urlfetch.GET:
            url = "%s?%s" % (url, payload)
            payload = None
        headers = {"Authorization": "OAuth"} if protected else {}

        rpc = urlfetch.create_rpc(deadline=10.0)
        urlfetch.make_fetch_call(rpc, url, method=method, headers=headers, payload=payload)
        return rpc
Example #56
0
def trackGARequests(path, remoteAddr, referer=''):
    logging.debug('trackRSSRequests: calling GA GIF service')

    var_utmac = AppConfig.googleAnalyticsKey  # enter the new urchin code
    var_utmhn = AppConfig.appDomain  # enter your domain
    var_utmn = str(random.randint(1000000000, 9999999999))  # random request number
    var_cookie = str(random.randint(10000000, 99999999))  # random cookie number
    var_random = str(random.randint(1000000000, 2147483647))  # number under 2147483647
    var_today = str(int(time.time()))  # today
    var_referer = referer  # referer url
    var_uservar = '-'  # enter your own user defined variable
    var_utmp = '%s/%s' % (path, remoteAddr)  # this example adds a fake page request to the (fake) rss directory (the viewer IP to check for absolute unique RSS readers)
    #build URL
    urchinUrl = MutableString()
    urchinUrl = 'http://www.google-analytics.com/__utm.gif?utmwv=1&utmn=' + var_utmn
    urchinUrl += '&utmsr=-&utmsc=-&utmul=-&utmje=0&utmfl=-&utmdt=-&utmhn='
    urchinUrl += var_utmhn + '&utmr=' + var_referer + '&utmp=' + var_utmp
    urchinUrl += '&utmac=' + var_utmac + '&utmcc=__utma%3D' + var_cookie
    urchinUrl += '.' + var_random + '.' + var_today + '.' + var_today + '.'
    urchinUrl += var_today + '.2%3B%2B__utmb%3D' + var_cookie
    urchinUrl += '%3B%2B__utmc%3D' + var_cookie + '%3B%2B__utmz%3D' + var_cookie
    urchinUrl += '.' + var_today
    urchinUrl += '.2.2.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B__utmv%3D'
    urchinUrl += var_cookie + '.' + var_uservar + '%3B'

    #async request to GA's GIF service
    rpcGA = None
    try:
        rpcGA = urlfetch.create_rpc()
        urlfetch.make_fetch_call(rpcGA, urchinUrl)
    except Exception, exT:
        logging.error('trackRSSRequests: Errors calling GA GIF service : %s' % exT)