def emailJob(): print "**** SENDING EMAIL *****" keywordDicts = dict() for kwdict in keywordCounters: keywordDicts[kwdict] = keywordCounters[kwdict].getKeywordCount() message = emailer.getMessageFromKeywords(keywordDicts) emailer.sendEmail(config.email['to'], message)
def send_emails(student_dict): ''' INPUT: Dictionary of student data OUTPUT: None (emails sent) ''' for student_email in student_dict.keys(): sendEmail(student_dict[student_email][0], student_email, student_dict[student_email][2])
def openTradeTrailing(instrument): ##Current price chunk rawCandle = requests.get(accountDetails.getBaseURL() + "/instruments/" + instrument + "/candles", headers=accountDetails.getHeaders(), params=createQuery("M5", 3)) parsedCandle = json.loads(json.dumps(rawCandle.json())) ##curentPrice is live price -- only using completed candles## currentPrice1 = float(parsedCandle["candles"][2]["mid"]["c"]) bestStopFloat = float(backtestStream.getBestStop(candles, RSIs)) takeProfitPrice = "{0:.4f}".format(bestStopFloat + currentPrice1) mktOrderParamsTrailing["instrument"] = instrument mktOrderParamsTrailing["order"]["takeProfitOnFill"]["price"] = takeProfitPrice result = requests.post(accountDetails.getBaseURL() + "/accounts/" + accountDetails.accountID + "/orders", headers = accountDetails.getHeaders(), json = mktOrderParamsTrailing) print(result) print("Trade entered with the following parameters:") ##get entry price## resultBody = json.loads(json.dumps(result.json())) print(resultBody) entryPrice = float(resultBody["orderFillTransaction"]["price"]) print("Entry price: " + str(entryPrice)) print("Take Profit: " + str(float(resultBody["orderCreateTransaction"]["takeProfitOnFill"]["price"]) - entryPrice)) print("Stop Loss: " + str(float(resultBody["orderCreateTransaction"]["stopLossOnFill"]["distance"]))) ##get tradeId## tradeId = resultBody["orderFillTransaction"]["id"] tradeIsOpen = True currentTime = time.clock() exitTime = currentTime + 60 emailer.sendEmail(instrument,entryPrice) while tradeIsOpen: time.sleep(400) tradeIsOpenA = checkForOpen(instrument) if not tradeIsOpenA[0]: tradeIsOpen = False else: print("Trade is open. PL: " + str(tradeIsOpenA[1])) print(result)
def exit_handler(): error_msg = 'Server has interrupted harvesting process. \n' error_msg = error_msg + 'Process terminated at: ' + time.strftime( '%c') + '\n' error_msg = error_msg + 'Location: ' + settings.location + ', Quadrant: ' + settings.region_quadrant + '\n' error_msg = error_msg + 'VM: ' + settings.vm_ip + '\n' error_msg = error_msg + 'Process Id: ' + str(proc_id) + '\n' error_msg = error_msg + 'Total tweets received: %d' % listnerTweet.tweet_count writeLog(error_msg) emailer.sendEmail(message=str(error_msg)) writeLog("--------------------------------------")
def exit_handler(): error_msg = 'Server has interrupted harvesting process. \n' error_msg = error_msg + 'Process terminated at: ' + time.strftime('%c') + '\n' error_msg = error_msg + 'Location: ' + settings.location + ', Quadrant: ' + settings.region_quadrant + '\n' error_msg = error_msg + 'VM: ' + settings.vm_ip + '\n' error_msg = error_msg + 'Process Id: ' + str(proc_id) + '\n' error_msg = error_msg + 'Total tweets received: %d' % listnerTweet.tweet_count writeLog(error_msg) try: emailer.sendEmail(message=str(notice_msg)) except Exception, err: sys.stdout.write("Error sending mail proceeding with harvesting without notification")
def on_data(self, data): #Load Json from Twitter API tweet = json.loads(data) try: tweet["_id"] = str(tweet['id']) #Get the ID doc = db.save(tweet) #Svae tweet into CouchDB #print("Obtained Tweet ID: " + str(tweet['id'])) self.tweet_count += 1 if (self.tweet_count % 10000 == 0): #Notify when 10000 new tweets have been stored on database msg_update = '10K new tweets on database: ' + settings.database emailer.sendEmail(message=str(msg_update)) except: writeLog("Twitter API error") pass return True
def on_data(self, data): #Load Json from Twitter API tweet = json.loads(data) try: tweet["_id"] = str(tweet['id']) #Get the ID lang = tweet['lang'] name = tweet['user']['name'] #Gender Analysis: name_list = name.split() name = name_list[0] gender = Genderizer.detect(firstName=name) tweet['user']['gender'] = gender #Sentiment Analysis analysed_result = classifier.doSentimentAnalysis(str( tweet['text'])) if str(lang) == 'en': #only analyse english texts if not hasAlreadySentiment(tweet): tweet = updateSentimentDoc(tweet, analysed_result["sentiment"], analysed_result["polarity"], analysed_result["subjectivity"]) self.processed_tweets += 1 else: self.ignored_tweets += 1 else: #otherwise ignore it! self.ignored_tweets += 1 #Update place coordinates to work with GeoJson tweet = updatePlaceDoc(tweet) doc = db.save(tweet) #Save tweet into CouchDB # print("Obtained Tweet ID: " + str(tweet['id'])) self.tweet_count += 1 if (self.tweet_count % 10000 == 0): #Notify when 10000 new tweets have been stored on database msg_update = '10K new tweets on database: ' + settings.database emailer.sendEmail(message=str(msg_update)) except: writeLog("Twitter API error") pass return True
def open_trade(instrument, params): try: result = requests.post(accountDetails.getBaseURL() + "/accounts/" + accountDetails.accountID + "/orders", headers=accountDetails.getHeaders(), json=params) resultBody = json.loads(json.dumps(result.json())) print(resultBody) entryPrice = float(resultBody["orderFillTransaction"]["price"]) emailer.sendEmail(instrument, entryPrice) print("Entry price: " + str(entryPrice)) except: print("Unable to enter trade...") traceback.print_exc()
for ind, arr in enumerate(arrivals_ScS): ScS_arr = arr.time print(ScS_arr) dt = ScS_arr - ScS_ref dt_arr[i][j] = dt print(i, j) del arrivals_ScS dir = '../Data/PeakData/' with open(dir + name + '_' + str(int(distance)) + '_2d_dt.csv', 'w') as csvfile: writer = csv.writer(csvfile) [writer.writerow(r) for r in dt_arr] message = """Your code has finished computing the traveltime grid for: Depth Range: 5 - %s km (Steps: %s km) Velocity reduction: %s - %s %% (Steps: %s %%) Distance: %s degrees """ % (str(height), str(heightStep), str(maxdv * 100), str(mindv * 100), str(dvStep * 100), str(distance)) emailer.sendEmail(email, message) plt.show()
def runScrapper(): sendEmail(neweggParser('gpu')) print("scrapping")
itemsOnPage += 120 if itemsOnPage > totItemsAsInt: processSoup(soup) break #NOTE YOU STILL HAVE TO DEAL WITH PARAMETER PASSING FROM CMDLIN counter = 0 items.sort(key=lambda Item: float(Item.price)) for item in items: if float(item.price) <= float(maxPrice) and query.lower( ) in item.title.lower(): if item.city is None: print(item.title + ",$" + item.price + "," + "None", file=newItemFile) else: print(item.title + ",$" + item.price + "," + item.city, file=newItemFile) counter += 1 if counter >= int(numProducts): break newItemFile.close() newF = open("newFile.txt", "r") oldF = open("oldFile.txt", "r") if not filecmp.cmp('newFile.txt', 'oldFile.txt', shallow=False): with open('newFile.txt', 'r') as file: message = file.read() sendEmail(message) with open('newFile.txt', 'r') as firstfile, open('oldFile.txt', 'w') as secondfile: for line in firstfile: secondfile.write(line)
atexit.register(exit_handler) signal(SIGTERM, exit_handler) #API authentication auth = OAuthHandler(settings.consumer_key, settings.consumer_secret) auth.set_access_token(settings.access_token, settings.access_secret) twitterStream = Stream(auth, listnerTweet) server = couchdb.Server(settings.server) server.resource.credentials = (settings.admin_user, settings.admin_pass) try: #Create DB if does not exist db = server.create(settings.database) writeLog("Database: " + settings.database + " doesn't exist. Proceeding with creation...") except: #Just use existing DB db = server[settings.database] notice_msg = 'Server has initiated harvesting process \n' notice_msg = notice_msg + 'Process initiated at: ' + time.strftime( '%c') + '\n' notice_msg = notice_msg + 'Location: ' + settings.location + ', Quadrant: ' + settings.region_quadrant + '\n' notice_msg = notice_msg + 'Process Id: ' + str(proc_id) + '\n' notice_msg = notice_msg + 'Server: ' + settings.server + '\n' notice_msg = notice_msg + 'Database: ' + settings.database + '\n' notice_msg = notice_msg + 'VM: ' + settings.vm_ip + '\n' writeLog(notice_msg) emailer.sendEmail(message=str(notice_msg)) #Streams not terminate unless the connection is closed, blocking the thread. #Tweepy offers a convenient async parameter on filter so the stream will run on a new thread. twitterStream.filter(locations=settings.locations)
def emailBack(): emailer.sendEmail(True)
def emailThere(): emailer.sendEmail(False)
class listener(StreamListener): sys.stdout.write("listener launched...") #Statistics tweet_count = 0 processed_tweets = 0 ignored_tweets = 0 def on_data(self, data): sys.stdout.write("loading data rom one tweet") writeLog("loading data rom one tweet") #Load Json from Twitter API tweet = json.loads(data) try: tweet["_id"] = str(tweet['id']) #Get the ID lang = tweet['lang'] name = tweet['user']['name'] #Gender Analysis: name_list = name.split() name = name_list[0] sys.stdout.write("Gender analysis success") writeLog("Gender analysis success") gender = Genderizer.detect(firstName = name) tweet['user']['gender'] = gender #Sentiment Analysis #analysed_result = classifier.doSentimentAnalysis(str(tweet['text'])) sys.stdout.write("sentiment analysis success") writeLog("sentiment analysis success") if str(lang) == 'es': #only analyse spanish texts if not hasAlreadySentiment(tweet): #tweet = updateSentimentDoc(tweet,analysed_result["sentiment"],analysed_result["polarity"],analysed_result["subjectivity"]) self.processed_tweets += 1 else: self.ignored_tweets += 1 else: #otherwise ignore it! self.ignored_tweets += 1 #Update place coordinates to work with GeoJson tweet = updatePlaceDoc(tweet) writeLog("place updated") tweet = updateCoordinate(tweet) writeLog("coordinates updated") #Update date fields for better reporting tweet = updateDateDay(tweet) writeLog("day updated") #Update Sentiment try: tweet = updateSentiment(tweet) writeLog("sentiment updated") except Exception, err: writeLog("Error updating tweet sentiment") #Only get tweets from Quito or Ecuador try: #writeLog(str(tweet)) if settings.locations != []:#check if the node is harvesting by coordinates writeLog("Obtained by location Tweet") if (str(tweet["place"]["name"]) == 'Quito' or str(tweet["place"]["name"]) == 'Ecuador' or str(tweet["place"]["name"]) == 'Pichincha'): doc = db.save(tweet) #Save tweet into CouchDB print("Obtained by location Tweet ID: " + str(tweet['id'])) writeLog("Obtained by location Tweet from: "+str(tweet["place"]["name"])+"id: "+str(tweet['id'])+" text: "+str(tweet['text'])) else: sys.stdout.write("Tweet Discarded due to wrong place mention place: "+str(tweet["place"]["name"])+ " Discarded Tweet ID: " + str(tweet['id'])) else: #save if is harvesting by followers or by writeLog("Obtained by follower or tracks") doc = db.save(tweet) #Save tweet into CouchDB print("Obtained by follower or tracks Tweet ID: " + str(tweet['id'])) writeLog("Obtained by follower or tracks Tweet: "+str(tweet['id'])) except Exception, err: writeLog("Error: "+str(err)+" Tweet already stored: "+str(tweet['id'])) # writeLog(str(err)+str(tweet["place"])+"Error determining the tweet origin, city:"+str(tweet["place"]["name"])) self.tweet_count += 1 if (self.tweet_count%10000 == 0): #Notify when 10000 new tweets have been stored on database msg_update = '10K new tweets on database: ' + settings.database try: emailer.sendEmail(message=str(notice_msg)) except Exception, err: sys.stdout.write("Error sending mail proceeding with harvesting without notification")