def get_name_for_user_job(user, job): k = conn.keys(get_redis_key(user.id, "*")) for v in k: o = conn.get(v).split(":")[0] if o == str(job.id): return v.split(":")[2] return "ERR"
def overrideLatestTweetOUT(latesttweetid): #FORCE OVERRIDE? sudo = request.args.get('sudo', default=0, type=int) response = '' latesttweetid = int(latesttweetid) #get value from Redis try: LATEST_TWEET_REDIS = int(conn.get('LATEST_TWEET_OUT_PROCESSED')) print "Reading from Redis - LATEST_TWEET_OUT_PROCESSED: %d" % LATEST_TWEET_REDIS if latesttweetid > LATEST_TWEET_REDIS or sudo == 1: #set value to Redis conn.set('LATEST_TWEET_OUT_PROCESSED', latesttweetid) response += "OK -- set %d as latest tweet OUT on Redis" % latesttweetid else: response += "Equally or more recent tweet OUT found on Redis: %d" % LATEST_TWEET_REDIS #if couldn't find latest tweet, set it anyway except Exception as err: conn.set('LATEST_TWEET_OUT_PROCESSED', latesttweetid) response += "Couldn't retrieve latest tweet OUT from Redis -- %s" % str( err) response += "<hr>" response += "Set %d as latest tweet OUT on Redis" % latesttweetid return response
def scan_db(): the_time = datetime.now().strftime("%A, %d %b %Y %l:%M %p") text = "<h1>{}</h1>".format(the_time) for key in conn.keys("scd:*"): line = conn.get(key) text += "<p>{} | {}</p>".format(key, line) return text
def getLatestTweetInFromRedis(): #get values from Redis try: LATEST_TWEET_IN_PROCESSED = int(conn.get('LATEST_TWEET_IN_PROCESSED')) print "Reading from Redis - LATEST_TWEET_IN_PROCESSED: %d" % LATEST_TWEET_IN_PROCESSED return LATEST_TWEET_IN_PROCESSED except Exception as err: print "Unable to retrieve latest tweet IN from Redis -- %s" % str(err) return None
def get_job_info(user, name): c = conn.get(get_redis_key(user.id, name)) if(c == None): return False c = c.split(":") j = JOB_OWNER if(c[1]=="2"): j = JOB_WORKER return (Job.objects.get(id=c[0]), j)
def get_results(): try: # Initialize a employee list teams = [] # create a instances for filling up employee list for key in conn.keys("scd:*"): teams = {'score': conn.get(key), 'team': key} teams.append(scores) # convert to json data jsonStr = json.dumps(teams) except Exception, e: print str(e)
def check_email(): """Check the mail account and lint new mail.""" server = smtplib.SMTP("smtp.gmail.com", 587) server.ehlo() server.starttls() server.ehlo() server.login(user, password) g = gmail.login(user, password) # Check for unread messages. unread = g.inbox().mail(unread=True) # Submit a job to lint each email sent to [email protected]. Record the # resulting job_ids somewhere (in Redis, I suppose), keyed by a hash of the # email. for u in unread: u.fetch() signature = u.fr.decode("utf-8") + u.subject.decode("utf-8") + u.body.decode("utf-8") hash = hashlib.sha256(signature.encode("utf-8")).hexdigest() if user_to in u.to or user_to in u.headers.get("Cc", []): job_id = conn.get(hash) if not job_id: # If the email hasn't been sent for processing, send it. r = requests.post(api_url, data={"text": u.body}) conn.set(hash, r.json()["job_id"]) else: # Otherwise, check whether the results are ready, and if so, # reply with them. r = requests.get(api_url, params={"job_id": job_id}) if r.json()["status"] == "success": reply = quoted(u.body) errors = r.json()["data"]["errors"] reply += "\r\n\r\n".join([json.dumps(e) for e in errors]) msg = MIMEMultipart() msg["From"] = "{} <{}>".format(name, user) msg["To"] = u.fr msg["Subject"] = "Re: " + u.subject msg.add_header("In-Reply-To", u.headers["Message-ID"]) msg.add_header("References", u.headers["Message-ID"]) body = reply + "\r\n\r\n--\r\n" + tagline + "\r\n" + url msg.attach(MIMEText(body, "plain")) text = msg.as_string() server.sendmail(user, u.fr, text) # Mark the email as read. u.read() u.archive()
def check_email(): """Check the mail account and lint new mail.""" server = smtplib.SMTP("smtp.gmail.com", 587) server.ehlo() server.starttls() server.ehlo() server.login(user, password) g = gmail.login(user, password) # Check for unread messages. unread = g.inbox().mail(unread=True) # Submit a job to lint each email sent to [email protected]. Record the # resulting job_ids somewhere (in Redis, I suppose), keyed by a hash of the # email. for u in unread: u.fetch() signature = (u.fr.decode('utf-8') + u.subject.decode('utf-8') + u.body.decode('utf-8')) hash = hashlib.sha256(signature.encode('utf-8')).hexdigest() if user_to in u.to or user_to in u.headers.get('Cc', []): job_id = conn.get(hash) if not job_id: # If the email hasn't been sent for processing, send it. r = requests.post(api_url, data={"text": u.body}) conn.set(hash, r.json()["job_id"]) print("Email {} sent for processing.".format(hash)) else: # Otherwise, check whether the results are ready, and if so, # reply with them. r = requests.get(api_url, params={"job_id": job_id}) if r.json()["status"] == "success": reply = quoted(u.body) errors = r.json()['data']['errors'] reply += "\r\n\r\n".join([json.dumps(e) for e in errors]) msg = MIMEMultipart() msg["From"] = "{} <{}>".format(name, user) msg["To"] = u.fr msg["Subject"] = "Re: " + u.subject if u.headers.get('Message-ID'): msg.add_header("In-Reply-To", u.headers['Message-ID']) msg.add_header("References", u.headers['Message-ID']) body = reply + "\r\n\r\n--\r\n" + tagline + "\r\n" + url msg.attach(MIMEText(body, "plain")) text = msg.as_string() server.sendmail(user, u.fr, text) # Mark the email as read. u.read() u.archive() print("Email {} has been replied to.".format(hash))
def getFolloweeData(user_id, client_id, access_token): # Try the cache first cache_try = conn.get('ig_cache:'+user_id) if cache_try is not None: results = pickle.loads(cache_try) results['from_cache'] = True return results results = {} # Call the IG servers payload = { 'client_id': client_id, 'access_token': access_token, } r = requests.get('https://api.instagram.com/v1/users/'+user_id+'/media/recent/', params=payload) # Extract recent media from the response recent_media = [] if r.status_code == 200: # If not, this user is probably visible to followers only for item in r.json()['data']: recent_media.append({'time':float(item['created_time']), 'user': item['user']['username'], 'likes': int(item['likes']['count'])}) results['last_photo_time'] = recent_media[0]['time'] # Calculate photos per day days = {} most_days_back = datetime.now().date()-datetime.fromtimestamp(recent_media[-1]['time']).date() most_days_back = most_days_back.days for day in xrange(0,most_days_back): days[day] = 0 for photo in recent_media: photo_day = datetime.now().date()-datetime.fromtimestamp(photo['time']).date() photo_day = photo_day.days if photo_day < most_days_back: days[photo_day] += 1 num_of_photos = 0 for day in xrange(0,most_days_back): num_of_photos += days[day] results['photos_per_day'] = format(num_of_photos / float(most_days_back), '.2f') # Calculate likes per photo likes = 0 for photo in recent_media: likes += photo['likes'] results['likes_per_photo'] = format(likes / float(len(recent_media)), '.2f') # Additional metadata results['username'] = recent_media[0]['user'] results['user_id'] = user_id conn.set('ig_cache:'+user_id ,pickle.dumps(results)) conn.expire('ig_cache:'+user_id, os.environ['CACHE_LIFE']) return results
def get_last_work_request(user): return conn.get(get_last_work_request_key(user.id))
def user_has_last_work_request(user): return (conn.get(get_last_work_request_key(user.id)) != None)
def user_has_work(user): return (conn.get(get_has_work_key(user.id)) != None)
def user_has_name(user, name): return (conn.get(get_redis_key(user.id, name)) != None)