def auth_flickr(request): from agro.sources import utils api, secret, url = 'e22dd4a81125531e047036ed1ab2a9e7', '72a484d250375bdf', '' token = '' user_name, user_id = '', '' frob = request.GET.get('frob', '') if frob: api_sig = md5.new('%sapi_key%sfrob%smethodflickr.auth.getToken' % (secret, api, frob)).hexdigest() params = urllib.urlencode({'api_key':api, 'frob':frob, 'method':'flickr.auth.getToken', 'api_sig':api_sig}) res = utils.get_remote_data("http://api.flickr.com/services/rest/?" + params) if res.get("stat", "") == "fail": log.error("flickr retrieve failed.") log.error("%s" % res.get("stat")) return False #token = res.get('auth') auth_res = res.getchildren()[0] token = auth_res.find('token').text user = auth_res.find('user') user_name = user.get('username') user_id = user.get('nsid') else: if request.method == 'POST': perms = 'read' api_sig = md5.new('%sapi_key%sperms%s' % (secret, api, perms)).hexdigest() params = urllib.urlencode({'api_key':api, 'perms':perms, 'api_sig':api_sig}) return HttpResponseRedirect('http://flickr.com/services/auth/?%s' % params) else: pass return render_to_response('flickr_auth.html', {'api':api, 'secret':secret, 'user_name':user_name, 'user_id':user_id, 'token':token,}, context_instance=RequestContext(request))
def retrieve(force, **args): username = args['account'] api_key,secret = args['api_key'] url = "http://ws.audioscrobbler.com/2.0/?method=user.getrecenttracks&user=%s&api_key=%s&limit=%s" % (username, api_key, 1000) song_resp = utils.get_remote_data(url) last_update = datetime.datetime.fromtimestamp(0) if force: log.info("Forcing update of all available songs.") else: try: last_update = Song.objects.filter(owner_user=username)[0].timestamp except Exception, e: log.debug("%s", e)
def exe_method(self, method, **kwargs): kwargs['method'] = '%s.%s' % (self.method,method) kwargs['api_key'] = self.api_key kwargs['format'] = self.format kwargs['nojsoncallback']= self.nojsoncallback url = "http://api.flickr.com/services/rest/?" for k,v in kwargs.iteritems(): kwargs[k] = v if self.signed: kwargs['auth_token'] = self.token params = self.encode_and_sign(**kwargs) res = utils.get_remote_data(url + params, rformat='json') else: res = utils.get_remote_data(url + urllib.urlencode(kwargs), rformat='json') if res.get("stat", "") == "fail": log.error("flickr retrieve failed.") log.error("%s" % res.get("stat")) return False return res
def reverse_geocode(lat, lng): """ Geonames response ----------------- {'address': {'distance': '0.03', 'countryCode': 'US', 'placename': 'Lawrence', 'lat': '38.946397849397954', 'street': 'W 21st St', 'streetNumber': '1446', 'postalcode': '66046', 'lng': '-95.25217028671905', 'adminName2': 'Douglas', 'adminCode1': 'KS', 'adminCode2': '045', 'adminName1': 'Kansas' } } ----------------- """ kwargs = {'lat':lat, 'lng':lng} res = utils.get_remote_data(NEARBY_ADDRESS_GEONAMES_URL + urllib.urlencode(kwargs), rformat='json') if not res: log.error("geonames failed.") return False, False, False, False, False address = "%s %s" % (res['address']['streetNumber'], res['address']['street']) zip = res['address']['postalcode'] city = res['address']['placename'] state = res['address']['adminCode1'] country = res['address']['countryCode'] return address, zip, city, state, country
if rformat == 'atom': dateparse = iso8601.parse_date if args['processors']: processors = args['processors'] last_update = datetime.datetime.fromtimestamp(0) if force: log.info("Forcing update of all entries available.") else: try: last_update = MODEL.objects.filter(owner_user=username).order_by('-timestamp')[0].timestamp except Exception, e: log.debug('%s', e) e = utils.get_remote_data(url, rformat=rformat) if e: for entry in e['entries']: dt = dateparse(entry['published']) if rformat == 'atom': # these are time-zone aware, where everything else is time-zone naive, # to compare, we must strip the TZ dt = datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) if dt > last_update: log.info("working with entry => %s" % entry['title']) model_entry, created = MODEL.objects.get_or_create( timestamp = dt, title = entry['title'], source_type = source_type,
username, password = username url = "http://twitter.com/statuses/user_timeline/%s.json" % username last_id = 0 if force: log.info("Forcing update of all tweets available.") else: try: last_id = Tweet.objects.filter(owner_user=username).order_by('-tweet_id')[0].tweet_id except Exception, e: log.debug('%s', e) log.debug("Last id processed: %s", last_id) if not password: tweets = utils.get_remote_data(url, rformat="json", username=username) else: tweets = utils.get_remote_data(url, rformat="json", username=username, password=password) if not tweets: log.warning('no tweets returned, twitter possibly overloaded.') return for t in tweets: if t['id'] > last_id: log.info("Working with %s.", t['id']) tweet_text = smart_unicode(t['text']) owner_user = smart_unicode(t['user']['screen_name']) url = "http://twitter.com/%s/statuses/%s" % (owner_user, t['id'])
rformat = 'json' last_update = datetime.datetime.fromtimestamp(0) if force: if password: url = "https://api.del.icio.us/v1/posts/all" rformat = "rss" log.info("Forcing update of all bookmarks available.") else: try: last_update = Bookmark.objects.filter(owner_user=username).order_by('-timestamp')[0].timestamp except Exception, e: log.debug('%s', e) if force and password: marks = utils.get_remote_data(url, rformat=rformat, username=username, password=password) else: marks = utils.get_remote_data(url, rformat=rformat) if marks: for mark in marks: if password and force: _handle_rss_bookmark(mark, username) continue dt = utils.parsedate(mark['dt']) if dt > last_update: _handle_bookmark(mark, dt, username) else: log.warning("No more bookmarks, stopping...") break