def main(): user_input = int(raw_input("Welcome \ to steganography.\nEnter 1 for encoding \ an image or 2 for decoding an image.\n")) if user_input == 1: print "Message length must have a length less than 255." encode_image = raw_input("Please enter the \ image you would like to encode your message \ into.\n") hidden_text = raw_input("Please enter the \ text file that contains the message you \ would like to send.\n") dest_image = raw_input("Please enter the \ filename you would like the encoded image \ to be called.\n") encode(encode_image, hidden_text, dest_image) user_email = int(raw_input("Would you like to send this in an email?\n\ If so, enter 1. If not, enter any other character.\n")) if user_email == 1: sendEmail(dest_image); elif user_input == 2: decode_image = raw_input("Please enter the \ image you would like to decode.\n") decode(decode_image) else: print "You have entered an invalid option."
def writeSuggestions(self): with open('suggestions.txt', 'a+') as f: if len(self.cloudcover_energy) > 1: if self.cloudcover_energy[-1][1] < 30: f.write( 'HIGH ALERT!!! Very low power supply for the next 3 hours' + '\n') sendEmail.sendEmail( 'HIGH ALERT!!! Going into disaster mode... ') self.mode = 4 if self.cloudcover_energy[-1][0] - self.cloudcover_energy[-2][ 0] < -0.3: max_usage = max(self.usage_modes[-1][:]) if max_usage > 20: node = self.usage_modes[-1].index(max_usage) f.write( 'Cloudy hours ahead! Maybe you could think about switching off ' + str(self.nodeToEquip[node + 1]) + '\n') if self.cloudcover_energy[-1][1] - self.cloudcover_energy[-2][ 1] < -20: max_usage = max(self.usage_modes[-1][:]) if max_usage > 10: node = self.usage_modes[-1].index(max_usage) f.write( 'Expect a drop in power supply... Maybe you could think about switching off ' + str(self.nodeToEquip[node + 1]) + '\n') f.flush() f.close()
def handler(event, context): if not event: raise Exception("Json Error") jsonObject = jsonHelper.jsonBuilder(event) #use Comprehender to get relevant skills keyPhrases = comprehend.findKeyPhrase(jsonObject['question']) if not keyPhrases: raise Exception("NoSkillFound") print('XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX', keyPhrases) #Find experts based on the skill expertToQuery = expert.getBySkill(keyPhrases) print('ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ', expertToQuery) #UpdateJson with new object and save new question jsonObject = jsonHelper.jsonUpdate(jsonObject, keyPhrases, expertToQuery) question.createNewQuestion(jsonObject) #sendEmail to expert with question and link to the form sendEmail.sendEmail(expertToQuery, jsonObject) response = {} response['expert'] = expertToQuery response['skills'] = keyPhrases response['uid'] = jsonObject['uid'] return response
def main(): subject = '' report = '' subject, report = appendReport(subject, report, 'Site Readiness report', siteReadinessReport) subject, report = appendReport(subject, report, 'CSCS storage report', newcscs.newcscs_report) subject, report = appendReport(subject, report, 'T3 Storage report', t3StorageReport) subject, report = appendReport(subject, report, 'Storage element file operations report', testSEOperationsReport) subject = 'Report | ' + subject subject += ' ({0})'.format(datestr) if TESTMODE: print '\nPrint of subject:\n' print subject print '\nPrint of report:\n' print report print '\nSubmitting test email:\n' sendEmail(subject, report, test=True) else: sendEmail( subject, report, )
def sendTheDamnEmail(f): """ Extract the web-parameter values and send an email to the receiver... The client expects 'success' if it worked out and 'error' if not. @param f {Object} The cgi FieldStorage object. """ subject = f["subject"].value toEmails = f["toEmail"].value msg = f["msg"].value #try: #mimeMsg = MIMEText(msg, "plain", "utf-8") #mimeMsg['Subject'] = subject #mimeMsg['From'] = fromEmail #mimeMsg['To'] = toEmails mimeMsg = MIMEMultipart('alternative') mimeMsg['Subject'] = Header(subject, 'UTF-8').encode() mimeMsg['To'] = Header(toEmails, 'UTF-8').encode() mimeMsg['From'] = Header(fromEmail, 'UTF-8').encode() part1 = MIMEText(msg, 'plain', "utf-8") #part2 = MIMEText(msg, 'html') # If you want to send a fancy HTML email, use this one also mimeMsg.attach(part1) sendEmail.sendEmail(fromEmail, password, toEmails,\ smtp, port=port, msg=mimeMsg) if logPath!="null": logger = logEmail.EmailLogger(logPath) stored = logger.storePost(ip, msg, toEmails) print "stored" print "success"
def main(): # basicInfo sql1 = "SELECT 用户名, 广告主, 信誉成长值, 区域 FROM basicInfo" # QTD sql2 = "SELECT * FROM getCashSUM('%s', '%s')" % (ST_DAT, ED_DAT) # Nearly Three weeks Spending sql3 = '''SELECT * FROM getThrWeekCash(%s)''' % dat(21) # basicInfo, qtd, data = (getData(sql1, URL), getData(sql2, URL), getData(sql3, URL)) # week w = week(data) # calculate P4P getP4P(w) # merge df = merge(basicInfo, qtd, w) # groupby df = df.groupby(['广告主', '二级行业', '区域']).sum() df.reset_index(inplace=True) # rank rank(df) # ring ratio ringRatio(df) # fmt path = fmt(df) # 发送 from sendEmail import sendEmail sendEmail('Top 30广告主现金', ' 见附件。', [path])
def emailAdmin(ip, nrLoggedEmails, lastLog): """ Email the administrator that someone might be trying to spam. """ msg = lastLog[1] toEmail = lastLog[2] msg = "VARNING! En dator med IP-nummer %s har skickat fler än max-antal e-postmeddelanden under angivet tidsintervall.\n\n" % (ip) msg += "Utdrag från senaste loggade mejlet:\n\nIP: %s\nMottagare: %s\n\nFör mer info, kolla loggfilen på: %s\n\n"\ % (ip, toEmail, logPath) msg += "----------------------------------------------------------\n" msg += "Detta mejl har genererats automatiskt av sMap's email-log\n" msg += "----------------------------------------------------------\n" # Add a log summary logger = logEmail.EmailLogger(logPath) logSummary = logger.getLogSummary(secondsBack=None, asText=True) msg = msg + "\nUtdrag från loggen (visar alla sända mejl uppdelat på IP-nummer):\n\n%s" % (logSummary) mimeMsg = MIMEText(msg, "plain", "utf-8") mimeMsg['Subject'] = "Varning från sMaps e-post" mimeMsg['From'] = fromEmail mimeMsg['To'] = adminEmails """for debuggning from localhost: sendEmail.sendEmail("*****@*****.**", "asdf1234", adminEmails,\ port=port, msg=mimeMsg)""" sendEmail.sendEmail(fromEmail, password, adminEmails.split(","),\ smtp, port=port, msg=mimeMsg) # Store data that warning has been sent so that it won't # create what it tries to prevent - spamming!! blocked = logger.setBlock(ip)
def main(database): import time cursor = database.cursor() cursor.execute('SELECT * FROM emails') for row in cursor: startTime = time.time() row = str(row).split('\'')[1] row = row.encode() email = emailStorage.Email(row) email.decryption( Fernet((os.environ.get('Key').split('\'')[1]).encode())) adress = email.email title, summary = None, None while title is None and summary is None: try: request = requests.get(RANDOMLINK) content = BeautifulSoup(request.content, 'html.parser') title = content.find(id='firstHeading').text summary = wikipedia.summary(title) except: pass url = 'https://en.wikipedia.org/wiki/%s' % title.replace(' ', '_') print('Email took: ', time.time() - startTime) sendEmail(title, summary, url, TEMPLATE, adress)
def runScraper(): queryTake = 5 querySkip = 0 configs = getConfigs.getConfigs( os.getenv('CPA_SCAPER_CONFIG_PATH', './cpa-scraper-config.yaml')) creds = getConfigs.getCreds( os.getenv('CPA_SCAPER_CREDS_PATH', './cpa-scraper-creds.yaml')) print(configs) print(creds) for user in configs["recipients"]: queryStartDate = "{}T00:00:00".format(user["startDate"]) queryEndDate = "{}T00:00:00".format(user["endDate"]) queryCoords = "{},{}".format(str(user["latCenter"]), str(user["lonCenter"])) maxDistance = int(user["maxDistance"]) email = user["email"] testID = user["testID"] key = getKey() locations = queryForResults(queryStartDate, queryEndDate, queryCoords, queryTake, querySkip, maxDistance, key, testID) if not len(locations) == 0: print("Now sending email to " + email + "...") sendEmail.sendEmail(locations, email, creds["senderAddress"], creds["password"])
def checkNewOffer(): #Load yesterday's offers the scraper has already been run try: listOldOffers = pickle.load(open("listOldOffers.pickle", "rb")) except: print( "listOldOffers couldn't be found. it's normal if you use the function for the first time" ) listOldOffers = [] #Load today's offers listNewOffers = np.array(pickle.load(open("listNewOffers.pickle", "rb"))) #isNew checks if the offer were already there yesterday. isNew = np.array( [newOffer not in listOldOffers for newOffer in listNewOffers]) #If there is a new offer... if any(isNew): newOffers = listNewOffers[isNew] for newOffer in newOffers: #...Send an email with the name of the new offers. sendEmail(newOffer) #Update the old offer array pickle.dump(listNewOffers, open("listOldOffers.pickle", "wb"))
def check(): for rule in rules: stock = Stock(rule[0], rule[1]) rule = PriceRule(rule[2], rule[3]) ret = stock.fitPrice(rule) if (ret['code'] == CODE_SEND_EMAIL): sendEmail(ret['msg'], 'aaaaa') print(json.dumps(ret, ensure_ascii=False))
def processEmail(): db = Database() stocks = db.get_recommendation() emails = db.get_user_emails() stocks = [d['Ticker'] for d in stocks] for email in emails: sendEmail(stocks, email) break
def sendEmailAlert(self, destEmailAddress, feedType, liveStatus, smtpUser, smtpPass): messageSubject = self.teamName + ' ' + feedType + ' ' + liveStatus messageBody = self.videoName + str(self.bestCompression) + self.fileNameEnd sendEmail.sendEmail(smtpUser, smtpPass, destEmailAddress, ) # this is the literal command that is executed by the OS shell compiledSendEmailCommand = str.join(' ', ('/usr/bin/sendemail -xu', smtpUsername, '-xp', smtpPassword, '-s', smtpString, '-f', fromAddress, '-t', toAddress, '-u', messageSubject, '-m', messageBody))#, '>> /dev/null')) # this is the execution of that command exitStatus = int(1) while not exitStatus == 0: exitStatus = os.system(compiledSendEmailCommand)
def trigger(): if key == "1q5gdePANXci8enuiS4oHUJxcxC13d6bjMRSicakychE": print("run processData.py") import processData import charts sendEmail( "The Australian coronavirus tracking googledoc has been updated and new feeds and charts have been created", "Australian coronavirus tracking", [ "*****@*****.**", "*****@*****.**", "*****@*****.**" ])
def waterIsLow(channel): #Turn Blue indicator LED on relay.relayOnOff(blueLEDPin, "on") message = """ Subject: Mushroom Water level is Low The Digital water level sensor was triggered, Refill the mushroom humidifier water. """ #send email sendEmail.sendEmail(emailRecipients, message)
def scrape(page, courseCode): soup = BeautifulSoup(page.data, 'html.parser') regex = re.compile(".*{}.*".format(courseCode)) rows = soup.find_all("tr") for row in rows: anchorTag = row.find("a", {"name": regex}) if ((anchorTag) != None): # twice because has white text sibling = row.next_sibling.next_sibling if ("Open" in str(sibling)): print("{} is Open".format(courseCode)) sendEmail(courseCode) break
def request(self, r): #attempts to handle V20 rate limit errors try: self.client.request(r) except V20Error as e: sendEmail('AutoTrader', recipient, self.account_name + '\n' + 'V20Error ' + str(e)) if 'Requests per second exceeded' in str(e): time.sleep(1) self.client.request(r) else: raise except: raise
def searchPath(startname, endname, op=2, maxL=3, m=10, receivers=['*****@*****.**', '*****@*****.**']): file1 = "node2id.txt" sep1 = " " skip1 = 1 idx1 = (1, 0) path2 = "Dicts/" file2 = "all.txt" sep2 = "-->" idx2 = (0, 1) skip2 = 0 pathname = "pathfinder/" node2id = getMap(path2 + file1, idx1, sep1, skip1) name2node = getMap(path2 + file2, idx2, sep2, skip2) #print "Ask for start node" (startid, startnode) = name2id(name2node, node2id, startname) #print "Ask for end node" (endid, endnode) = name2id(name2node, node2id, endname) commandline = "./findPath " + str(op) + " " + str(startid) + " " + str( endid) + " " + str(maxL) #print commandline output1 = os.popen(commandline) output2 = reformatePath(output1) output1.close() mypath = rankPath(output2, sep=" ", ntopic=50) output3 = writeDict(mypath, reverse=False, sep='__', n=len(mypath)) output4 = getPathName(output3, path2 + file2) output5 = path2js(output4, n=m) if len(receivers) > 0: title = "Ranked Paths from " + startname + " to " + endname body = "*************************\nRanked Paths:\n*************************\n" body += "\n".join(output4) body += "\n\n\n*************************\njson.txt:\n*************************\n" body += "\n".join(output5) sendEmail.sendEmail(receivers, title, body, sender='*****@*****.**', usr='******', password='******', servername="smtp.gmail.com", cc=True) return output5
def forgotpassword(): form = ForgotPassword() user_name = form.user_name.data user = User.query.filter_by(username=user_name).first() if user is not None: allchar = string.ascii_letters + string.digits newpassword = "".join(choice(allchar) for x in range(randint(6, 12))) msg = "your new password is" + newpassword print(msg) print(user.email) hashedpass = generate_password_hash(newpassword) admin = User.query.filter_by(username=user_name).update( dict(password=hashedpass)) db.session.commit() message = "Your new Password is " + newpassword msg = messageBody(message=message) if sendEmail(user.email, "Password Reset", msg): return render_template('/error.html', message="Email sent to " + user.email) else: return render_template('/error.html', message="Error when email sent to " + user.email) else: return render_template('/error.html', message="User does not Exsist")
def main(): parser = optparse.OptionParser( 'usage %prog -f <csv file name> -s <save xlsx file directory> -e <send e-mail address>' ) parser.add_option('-f', dest='fileName', type='string', help='specify csv file name') parser.add_option('-s', dest='saveFile', type='string', help='specify save xlsx file directory') parser.add_option('-e', dest='emailAddress', type='string', help='specify send e-mail address') (options, args) = parser.parse_args() fileName = options.fileName saveFile = options.saveFile emailAddress = options.emailAddress if fileName == None or saveFile == None or emailAddress == None: print(parser.usage) exit(0) else: transCodingObject = transCoding(fileName, saveFile) transCodingObject.start() sendEmailObject = sendEmail(saveFile, emailAddress) sendEmailObject.start()
def rates(self, accountID, instruments, **params): self.connected = True params = params or {} while self.connected: try: response = self.client.request(self) for tick in response: if not self.connected: break if tick['type'] == 'PRICE': self.on_success(tick) except ChunkedEncodingError: sendEmail('AutoTrader', recipient, self.account_name + '\n' + 'ChunkedEncodingError') pass except: sendEmail('AutoTrader', recipient, self.account_name + '\n' + 'An exception occured') raise
def makeEmail(args,directory, errorSamples, missingSamples): """Make an email message and send it using the sendEmail.py module in this directory. Expects a dictionary with samples as keys and a list of errors as values. Loop over samples and add the errors to the message then send. """ addrs = args.email subject = "Data Validation Script Result for "+args.htag+":"+directory message = "Data Validation Script Completed for {0}:{1}. Results below:\n".format(directory, args.htag) message += "\nSamples Missing from eos:\n" for sample in missingSamples: message += " {0}\n".format(sample) message += "\n---== Detailed Results: ==---\n" if not errorSamples: message += "No Errors detected!\n" for sample in errorSamples: message += "\nErrors for sample {0}:\n".format(sample) for error in errorSamples[sample]: message += " -- {0}\n".format(error) sendEmail(addrs, subject, message)
def main(): try: conn = httplib.HTTPConnection("xx.domain.com", 80, True, 2.0) conn.request("HEAD", "/js/lib/jquery.min.js") # conn.sock.settimeout(2.0) res = conn.getresponse() status = res.status conn.close() if status == 200: print get_nowtime(), 200, res.reason else: print get_nowtime(), res.status, res.reason, res.getheaders() sendEmail(res.status, res.reason, res.getheaders()) quit() except Exception as e: if str(e) == 'timed out': print get_nowtime(), 'time out' else: print get_nowtime(), 'catch other error', e
def processOnce(settings): info = '' try: if settings.defined('f2fScheduleURL'): events = getf2fhttpEvents(settings) elif settings.defined('f2fExcelFile'): events = getf2fExcelEvents(settings) elif settings.defined('agendaExcelFile'): events = getAgendaEvents(settings) else: assert (False),"No source for F2F or Agenda schedule" # Update IMAT if settings.defined('imatUser'): info += updateIMAT(settings, events) # Update the online Google Calendar if settings.defined('calendarID'): info += updateGoogleCalendar(settings, events) except KeyboardInterrupt: raise # Pass exception on to main loop except: if settings.loop: # Summarise the exception, but keep executing import sys info = "Unexpected exception:" + str(sys.exc_info()) else: raise # Report the exception and break execution if len(info) > 0: if not settings.update: info = "WARNING: Dry run. No changes made. \n" + info print info sendEmail(settings, info)
def makeEmail(args, directory, errorSamples, missingSamples): """Make an email message and send it using the sendEmail.py module in this directory. Expects a dictionary with samples as keys and a list of errors as values. Loop over samples and add the errors to the message then send. """ addrs = args.email subject = "Data Validation Script Result for " + args.htag + ":" + directory message = "Data Validation Script Completed for {0}:{1}. Results below:\n".format( directory, args.htag) message += "\nSamples Missing from eos:\n" for sample in missingSamples: message += " {0}\n".format(sample) message += "\n---== Detailed Results: ==---\n" if not errorSamples: message += "No Errors detected!\n" for sample in errorSamples: message += "\nErrors for sample {0}:\n".format(sample) for error in errorSamples[sample]: message += " -- {0}\n".format(error) sendEmail(addrs, subject, message)
def main(): newHotnessRegex = re.compile(NEW_HOTNESS, re.IGNORECASE) oldBustedRegex = re.compile(OLD_BUSTED, re.IGNORECASE) pageContents = urllib2.urlopen(EVENT_URL).read() newHotnessFound = newHotnessRegex.search(pageContents) oldBustedGone = not oldBustedRegex.search(pageContents) if newHotnessFound or oldBustedGone: condition = 'Found "' + NEW_HOTNESS + '"' if oldBustedGone: condition = '"' + OLD_BUSTED + '" was not found' msg = condition + ' at ' + EVENT_URL + ' at ' + \ time.asctime(time.gmtime()) + ' UTC' # print "message: ", msg sendEmail.sendEmail( sys.argv[1], sys.argv[2], NEW_HOTNESS + ' is OUT!!!', msg)
def sendAPI(url): # we will totall try 3 times if send api fails retryCount = 3 # set timeout 10s, over 10s, we assume it fails. http = httplib2.Http(timeout=10) email = "*****@*****.**" password = "******" auth = base64.encodestring(email + ':' + password) headers = { "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Cache-Control": "no-cache", "Authorization": "Basic " + auth, "Connection": "Keep-Alive", } while True: try: res, data = http.request(url, "GET", headers=headers) if not parseResponse(res): raise Exception("Response is not correct.") sleep(300) except httplib2.ServerNotFoundError, e: if e.find("Name or service not known") != -1: sleep(300) continue except Exception, e: if retryCount > 0: retryCount -= 1 logger.warning("Exception: %s" % e) logger.warning("Send API retry...") sleep(30) http = httplib2.Http(timeout=10) continue logger.error("Send API error.") logger.error("Exception: %s" % e) msg = "There is server API error. The URL is: %s.\r\n Exception is: %s." % (url, e) sendEmail(msg) # we need to reset retryCount here retryCount = 3 sleep(3600) continue
def sendAPI(url): # we will totall try 3 times if send api fails retryCount = 3 # set timeout 10s, over 10s, we assume it fails. http = httplib2.Http(timeout=10) email = "*****@*****.**" password = "******" auth = base64.encodestring(email + ":" + password) headers = { "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Cache-Control": "no-cache", "Authorization": "Basic " + auth, "Connection": "Keep-Alive", } while True: try: res, data = http.request(url, "GET", headers=headers) if not parseResponse(res): raise Exception("Response is not correct.") sleep(300) except httplib2.ServerNotFoundError, e: if e.find("Name or service not known") != -1: sleep(300) continue except Exception, e: if retryCount > 0: retryCount -= 1 logger.warning("Exception: %s" % e) logger.warning("Send API retry...") sleep(30) http = httplib2.Http(timeout=10) continue logger.error("Send API error.") logger.error("Exception: %s" % e) msg = "There is server API error. The URL is: %s.\r\n Exception is: %s." % (url, e) sendEmail(msg) # we need to reset retryCount here retryCount = 3 sleep(3600) continue
def main(): '''Crea un email con un reporte de los pedidos en los cuales no se ha ingresado el ccinputdate. Aplica solo para los commonRegimes de SC Se debe ejecutar todos los dias a las 10 am. ''' #abrir base de datos client = MongoClient(SERVERDB) db = client.supplyChain collection = db['commonRegimes'] noccdate = ccinputdate(collection) norefrendo = noRefrendo(collection) nocost = noCost(collection) htmpending = htmPending(collection) notregularized = notRegularized(collection) #enviar el mensaje por correo #consolidar mensaje consolidate = noccdate + norefrendo + nocost + htmpending + notregularized sendEmail('*****@*****.**', 'Incomplete shipments', mensaje = consolidate)
def printBuildsMenu(): while True: print """ [1] Scan for old directories, saving list to oldDirectories.txt. [2] Read directories from oldDirectories.txt and email a warning. [3] Move directories to /dev_area/removal [4] Delete logs. [5] Sync Database. [6] Back """ input = raw_input("Your Choice: ") if input == '1': cls() dbi.dumpAllKeepers() lt_os.system('./findOldDirs.py > oldDirectories.txt', True) lt_os.system( './findDirs.py --path /mvista/dev_area/foundation >> oldDirectories.txt', True) cls() print "Old non-exempt directories written to oldDirectories.txt" elif input == '2': cls() sendEmail() print "Email sent to [email protected]." elif input == '3': cls() moveDirs.main() elif input == '4': cleanup.cleanupLogs() elif input == '5': cleanup.cleanupDB() elif input == '6': cls() printMainMenu()
def main(): url = 'http://bm.ruankao.org.cn/sign/welcome' # python3 # page = urllib.request.urlopen(url) # html = page.read().decode('utf-8') # page.close() # python2 urllib2 # req = urllib2.Request(url) # res = urllib2.urlopen(req) # code = res.code # html = res.read() # res.close() # python2 request res = requests.get(url, {}) code = res.status_code html = res.content if code != 200: return list = get_list(html) # 循环查看是否有2019年的数据 is_in_2019 = False for item in list: if re.match(r'2019', item[1]) is not None: is_in_2019 = True break if is_in_2019: print u'开始报名' cont = get_mail_cont(list) # save_file(str, './temp/test.html') sendEmail(cont) quit() else: print get_nowtime(), u'未开始'
def main(): client = MongoClient(host=SERVERDB, port=27017 ) collection = client.supplyChain['commonRegimes'] parameters = { 'delivery_to_warehouse':None, 'regime':{'$in':['10', '91']}, 'status':{'$ne':'OTHER'} } query_result = collection.find(parameters).sort('eta_warehouse', ASCENDING) applicationFolder = os.path.dirname(__file__) template = Template(open(os.path.join(applicationFolder, 'templates','nextArrivals.html')).read()) message = template.render(query = query_result) sendEmail(destinatario=ARRIVALNOTICECONTACTS+";[email protected]; [email protected]; [email protected]", \ titulo='Customs Clearance: Upcoming Shipments',\ mensaje= message, bc='*****@*****.**', \ desplegar=False)
def main(): #first update the report print "Updating cclearance excel report." clearanceReportCreateExcel.main() #setup message men = open(DAILYREPORTMSGTEMPLATE).read() #then if it is 12pm, d = datetime.now() if d.hour == 12: if d.isoweekday() == 3: #send to all print "Sending report to all region." sendEmail(destinatario = GLOBAL, titulo = "Customs Clearance Report Update", mensaje = men, bc = '*****@*****.**') else: #send to ecuador print "Sending report locally." sendEmail(destinatario = LOCAL, titulo = "Customs Clearance Report Update", mensaje = men, bc = '*****@*****.**') else: #send to ecuador print "Sending report locally." sendEmail(destinatario = LOCAL, titulo = "Customs Clearance Report Update", mensaje = men, bc = '*****@*****.**')
for price in html_price: if price.text.strip(): #print(tag.text.strip()) #print("Promotional price " + price.text.strip()) price1.append(price.text.strip()) title.append(tag.text.strip()) return price1,title # loop through the urls and parse the data while count < len(urls): # call the function for the specified url will= getprices(urls[count]) #print(will[0]) #using paqdas to format the output print(urls[count]) print(pd.DataFrame({'Product': will[1], \ 'Promo': will[0]})) returnData += urls[count] returnData += pd.DataFrame({'Product': will[1], 'Promo': will[0]}).to_html() count += 1 sendEmail.sendEmail(f""" <html> <head></head> <body> {returnData} </body> </html> """)
try: sendcard.run(logger, phoneFile, optSeq) logger.info("发券完成 success") print("发券完成") except Exception as e: print e print("发券失败 failed") logger.info(e) logger.info("发券失败 failed") else: print '号码文件为空,本次程序终止!' return else: print '号码文件不存在,本次程序终止!' if __name__ == "__main__": for actName in ['xj15', 'xj35', 'xj75']: rr = runMe(actName) print rr.pcardName print rr.getOptSeq() rr.run() print rr.resData, rr.phoneFileName try: sendEmail.sendEmail(rr.logger, rr.resData, rr.pcardName) except Exception as e: print e print "邮件发送失败" rr.logger.info("邮件发送失败")
def main(): parser = argparse.ArgumentParser(description="Generate Gerrit table") parser.add_argument( "-g", "--gerrits", action="store", dest="gerrit_list", nargs="+", help="<Required> List of Gerrits", required=True, ) parser.add_argument( "-e", "--email", action="store", dest="email_addresses", nargs="+", help="<Required> Email addresses", required=False, ) results = parser.parse_args() em_addrs = [] gerrit_list = results.gerrit_list email_addresses = results.email_addresses localtime = time.asctime(time.localtime(time.time())) if email_addresses: for address in email_addresses: if "@" in address: em_addrs.append(address + ",") else: em_addrs.append(address + "@qca.qualcomm.com,") email_addresses = em_addrs email_body = """\ <html> <head></head> <body> <p> Report Time: %s <br> Generated gerrit validation table<br> </p> %s </body> </html> """ % ( localtime, getGerritTable(gerrit_list), ) fd = open("gerritVTable.html", "w+") fd.write("%s" % email_body) fd.close() if email_addresses: sendEmail(email_body, "Gerrit Validation", "".join(email_addresses)) if status == "FAIL": print "Gerrit Validation FAIL" else: sys.exit(0)
def addParkingSpace(): ''' This function gets executed with HTTP call: accepts user supplied details and stores in the back-end database. :return: method endpoint returns a dictionary telling about the success or failure of action ''' #Parking space will only be added if user supplies his authentication token with every request auth = isAuthenticated(request.json['idToken']) # Check if user is authenticated if (auth['status'] == True): #If user gets authenticated, accept all details from HTTP call and parse them parkingspace_label = request.json['parkingSpaceLabel'] parkingspace_description = request.json['parkingSpaceDescription'] address_line1 = request.json['addrLine1'] address_line2 = request.json['addrLine2'] # addrLine3 = request.forms.get('addrLine3') city = request.json['city'] state = request.json['state'] country = request.json['country'] zipcode = int(request.json['zip']) disabled_parking_flag = bool(request.json['disabledParkingFlag']) parkingspace_photo = request.json['parkingSpacePhoto'] parkingspace_lat = request.json['parkingSpaceLat'] parkingspace_long = request.json['parkingSpaceLong'] ''' Generating a parking ID, which is a random string from uuid module ''' tempstring = uuid.uuid4().urn parkingspace_id = tempstring[9:] ''' Below is the [qrcode = parkingspace_id + zipcode] The concatenation here can be a performance hit when there are large number of parking spaces being added. this qrcode string will be sent to the user ''' qrcode = (parkingspace_id + str(zipcode)).upper() '''insert into database as they are received, initialize default variables which are not coming from front-end default parking rate until user sets it in another end-point is 0.00 start_datetime = None [inserted here as default in DB ] end_datetime = None [inserted here as default in DB] parking_space_availability_flag = False , this will the default value since the user has just added it ''' parking_space_availability_flag = False start_datetime = None end_datetime = None parkingspace_rate = 0.00 ''' Geting users private feilds from auth token return dictionary of auth function callback sub: returns a unique big integer which can be used as a unique user id for storing user details email: returns the user id, corresponding to loggedin session phone: currently we are not storing users phone number ''' user_id = auth['sub'] user_email = auth['email'] user_phone = None is_owner = True is_parker = False ''' Create connection with the database and insert all details in the respective relations remember %s here used saves us from SQL injections, in case any hacker is in mood to waste his time you know :) ''' try: con = connection.get_db_connection() #con = psycopg2.connect(database=parketconstants.PARKET_DATABASE_NAME, user=parketconstants.PARKET_DATABASE_USER, password=parketconstants.PARKET_DATABASE_PASSWORD)#constant name required here cur = con.cursor() ''' TODO: IF user passes same details again, stopping him from doing so is reuired, we will do it later as of now we are handling it from front-end ''' ''' 1. Insert all parking space related details to PARKING_SPACE table ''' insert_detail_data = (parkingspace_id, address_line1, address_line2, city, country, state, zipcode, parkingspace_label, parkingspace_description,parkingspace_photo, disabled_parking_flag, parking_space_availability_flag, start_datetime, end_datetime, parkingspace_rate, qrcode,) cur.execute(parketconstants.INSERT_INTO_PARKING_SPACE, insert_detail_data) ''' 2. Insert parking space location details to PARKING_SPACE_LOCATION table ''' insert_location_data = (parkingspace_id, parkingspace_lat, parkingspace_long,) cur.execute(parketconstants.INSERT_INTO_PARKING_SPACE_LOCATION, insert_location_data) ''' 3. Fetch list of all users from user-detail relation so as to check if this user has already any parking space in db ''' cur.execute(parketconstants.GET_ALL_USERS_FROM_USER_DETAILS) existing_users_list = cur.fetchall() #con.commit() #this will insert when user is a new user if (user_id,) not in existing_users_list: print user_id+ ' EXISTS' insert_user_details_data = (user_email, user_phone, user_id, is_owner, is_parker,) cur.execute(parketconstants.INSERT_INTO_USER_DETAILS, insert_user_details_data) con.commit() insert_parkingspace_owner_data = (parkingspace_id, user_id,) cur.execute(parketconstants.INSERT_INTO_PARKING_SPACE_OWNER,insert_parkingspace_owner_data) con.commit() connection.close_db_connection(con) #close the db connection #print cur.fetchone() con.commit() #print (cur.execute("SELECT * from parketb.parking_space")) except: pass #handle connection errors here return_dict = {} return_dict["parkingSpaceId"] = parkingspace_id #Sending Email email.sendEmail(auth['email'], qrcode) response.status = 201 return return_dict else: return_dict = {'status' : 'authentication failure'} response.status = 400 return return_dict
from sendEmail import sendEmail from crawler import crawler from keywords import keywords sendEmail("Cha'", crawler("https://www.ncbi.nlm.nih.gov/pubmed/trending/"), keywords)
pass else: raise # then we do the backend videos # inside a loop because things are unreliable attempts = 0 succeeded = False while attempts < 3 and succeeded == False: try: currentBackendVideos = getCurrentBackendVideos(currentTeam) oldBackendVideos = getOldVideosFromFile(oldBackendVideosFile, currentTeam) if feedType == 'backend' or feedType == 'both': releaseAppropriateVideos(currentBackendVideos, oldBackendVideos, destEmailAddress, 'backend', currentTeam, logFile, pathToContentDirectories, downloadOrNot) updateOldVideosFile(oldBackendVideosFile, currentBackendVideos) except HTTPError as error: # the ravens locked us out of their backend if error.code == 403 and currentTeam.name == 'ravens': pass else: raise except: exc_type, exc_obj, tb = sys.exc_info() f = tb.tb_frame lineno = tb.tb_lineno filename = f.f_code.co_filename linecache.checkcache(filename) line = linecache.getline(filename, lineno, f.f_globals) errorMessage = str('EXCEPTION IN ({}\n LINE {} "{}"): {}'.format(filename, lineno, line.strip(), exc_obj)) sendEmail.sendEmail(smtpUser, smtpPass, reportingAddress, 'feedalert error', errorMessage) print('unhandled exception')
def handleFiles(self,fname,modified,root): """ # eg file IMOS_SOOP-SST_T_20081230T000900Z_VHW5167_FV01.nc # IMOS_<Facility-Code>_<Data-Code>_<Start-date>_<Platform-Code>_FV<File-Version>_<Product-Type>_END-<End-date>_C-<Creation_date>_<PARTX>.nc """ theFile = root+"/"+fname file = fname.split("_") #print theFile if file[0] != "IMOS": self.ignoredCount += 1 return facility = file[1] # <Facility-Code> # the file name must have at least 6 component parts to be valid if len(file) > 5: year = file[3][:4] # year out of <Start-date> # check for the code in the ships code = file[4] if code in self.ships: platform = code+"_"+ self.ships[code] if facility == "SOOP-ASF": if file[2] in self.data_codes: product = self.data_codes[file[2]] targetDir = self.destDir+"/"+facility+"/"+platform+"/"+product+"/"+year targetDirBase = self.destDir+"/"+facility else: err = "Unknown Data Code "+product+" for "+facilty+". Add it to this script. File ignored" print err self.errorFiles.append(err) # common error that needs our attention email = sendEmail.sendEmail() email.sendEmail("*****@*****.**","SOOP Filesorter - Unrecognised Data code",err) self.ignoredCount += 1 return False else: targetDir = self.destDir+"/"+facility+"/"+platform+"/"+year targetDirBase = self.destDir+"/"+facility # files that contain '1-min-avg.nc' get their own sub folder if "1-min-avg" in fname: targetDir = targetDir+ "/1-min-avg" error = None if(not os.path.exists(targetDir)): try: os.makedirs(targetDir) #od = os.popen("chmod -R g+w " + targetDirBase) #os.popen("chgrp -R " +self.datasetGroup+" " + targetDirBase) except: print "Failed to create directory " + targetDir self.errorFiles.append("Failed to create directory " + targetDir ) error = 1 # blacklist check if fname in self.blackList: print "Ignoreing Blacklisted file " + fname self.errorFiles.append("Ignoreing Blacklisted file " + fname ) error = 1 if not error: targetFile = targetDir+'/'+fname # see if file exists if(not os.path.exists(targetFile)): #try: shutil.copy(theFile,targetFile ) print theFile +" created in -> "+ targetDir os.popen("chmod g+w " + targetFile).readline() #os.popen("chgrp "+self.datasetGroup +" " + targetFile).readline() self.newCount += 1; #except: # print "Failed to create file (" + theFile + "). check permissions and file name" # self.errorFiles.append("Failed to create file (" + theFile + "). check permissions and file name") # copy if more recent or rubbish file elif (modified > os.stat(targetFile)[stat.ST_MTIME] + self.timezone_offset) or (os.path.getsize(targetFile) == 0): try: if os.path.getsize(targetFile) == 0: print "Zero sized file found: " + targetFile #shutil.copy dosent seem to overwrite so delete then write try: os.remove(targetFile) except os.error: print "remove wasnt successfull" try: shutil.copy(theFile,targetFile ) print theFile +" updated in -> "+ targetDir self.updatedCount += 1; except: print "copy of " + theFile + " wasnt successfull" except Exception, e: msg = "Failed to update file (" + theFile + " " + time.ctime() + ") " + str(e) self.errorFiles.append(msg) else: #print theFile +" checked ok -> "+ targetDir self.checkedCount += 1; #os.popen("chmod g+w " + targetFile).readline() #os.popen("chgrp "+self.datasetGroup +" " + targetFile).readline() else: if code != "SOFS": # SOFS = bogus files writen by CSIRO. ignore them err = "Unrecognised file "+ root+"/"+ fname + " with code '" + code + "' found by the filesorter" self.errorFiles.append(err) # common error that needs our attention email = sendEmail.sendEmail() email.sendEmail(self.emailAddress,"SOOP File sorter- Unrecognised ship code",err)
#"", #), #("http://sandiego.craigslist.org/search/apa?query=north+park&srchType=A&minAsk=&maxAsk=1400&bedrooms=2&format=rss", #"[email protected];[email protected]", #"", #), #] # If file called as a script from the command line, run this if __name__ == "__main__": testfeed = ("Chickens", "http://sandiego.craigslist.org/search/gra?hasPic=1&query=chickens&srchType=A&format=rss", "Clare", "*****@*****.**") random.seed() # unpack the tuple for arguments myfeed = Feed(*testfeed) myfeed.fillTable() images = [] if 'hasPic=1' in myfeed.feedURL: random_listing = random.choice(myfeed.feedData) images = myfeed.getAllImages(random_listing['link']) images = [os.path.basename(img) for img in images] #get credentials here sendEmail.smtp(username, password) sendEmail.sendEmail([myfeed.feedOwnerEmail], "Cool Craiglist Stuff", myfeed.createEmailBody(random_listing), images)
from sendEmail import sendEmail from string import Template messageTemplate = Template(open('D:/myScripts/ccreport/reports/ArrivalNotice.html').read()) data = {'process_id':'HODORPL', 'cases':1000, 'gross_weight':5030, 'volume':3.5, 'container': 'loose cargo' } finalMessage = messageTemplate.substitute(data) sendEmail(destinatario="*****@*****.**", titulo="email de pruba", mensaje=finalMessage, desplegar = True)
# -*- coding: utf-8 -*- import unittest from suite import CreateSuite from server import Server from report import report from sendEmail import sendEmail if __name__ == '__main__': server = Server() server.startServer() suite = unittest.TestSuite() suite.addTest(CreateSuite().add_sg_case()) report().sgReport(suite) server.stopServer() sendEmail().sgEmail()
def main(): #Parameters retrieved by FileSystemWatcher from #filesystemwatcher_config.json #logs to hold all log information for current smallkey publication logs =[] #Parameters retrieved by FileSystemWatcher from #filesystemwatcher_config.json smallKey = arcpy.GetParameterAsText(0) smallKeyFolder = arcpy.GetParameterAsText(1) server = arcpy.GetParameterAsText(2) port = arcpy.GetParameterAsText(3) pubTemplate = arcpy.GetParameterAsText(4) connPath = arcpy.GetParameterAsText(5) publishStatus = arcpy.GetParameterAsText(6) folder = arcpy.GetParameterAsText(7) geocatUrl = arcpy.GetParameterAsText(8) geocatUsername = arcpy.GetParameterAsText(9) geocatPassword = arcpy.GetParameterAsText(10) agsUser = arcpy.GetParameterAsText(11) agsPassword = arcpy.GetParameterAsText(12) smtpserver = arcpy.GetParameterAsText(13) fromaddr = arcpy.GetParameterAsText(14) toaddrs = arcpy.GetParameterAsText(15) metaDataUrl = arcpy.GetParameterAsText(16) webAdaptorName = arcpy.GetParameterAsText(17) ## print("smallKey ="+ smallKey) ## print("smallKeyFolder="+ smallKeyFolder) ## print("server ="+ server) ## print("port ="+ port) ## print("pubTemplate ="+ pubTemplate) ## print("connPath ="+ connPath) ## print("publishStatus="+ publishStatus) ## print("folder ="+folder) ## print("geocatUrl ="+ geocatUrl) ## print("geocatUsername ="******"geocatPassword ="******"agsUser ="******"agsPassword ="******"smtpserver ="+smtpserver) ## print("fromaddr="+ fromaddr) ## print("toaddrs ="+ toaddrs) ## print("metaDataUrl ="+metaDataUrl) try: serviceName = smallKey mapServiceFullName = folder + "/" + serviceName serviceNameDelete = serviceName + ".MapServer" #Folder to move payload zip files to after they are published (payloadFolder, sk) = os.path.split(smallKeyFolder) payloadZip = os.path.join(payloadFolder, smallKey + ".zip") publishedFolderName = "Published" publishedFolderPath = os.path.join(payloadFolder, publishedFolderName) badLoadsFolderName = "Bad_Payloads" badLoadsFolderPath = os.path.join(payloadFolder, badLoadsFolderName) #check error errReturns= checkError.errorValidation(smallKey,smallKeyFolder,publishStatus,geocatUrl, geocatUsername,geocatPassword,metaDataUrl,logs) if errReturns == 1: #fatal error sys.exit(1) serviceExists = False #Get the list of existing map service agsServiceList = publishService.getCatalog(server, port,logs) #Check if the map service already exists if mapServiceFullName in agsServiceList: serviceExists = True if publishStatus in ("1", "2"): #NEW or UPDATE if publishStatus == "1": #NEW if serviceExists: checkError.printLog(logs,"") checkError.printLog(logs,mapServiceFullName + " already exists. System exit.") moveFileToFolder(payloadZip, badLoadsFolderPath,logs) sys.exit(0) else: #UPDATE checkError.printLog(logs,"") checkError.printLog(logs,"Attempting to update the service: " + mapServiceFullName) if not serviceExists: checkError.printLog(logs,"Service does not exist. Publishing as new service.") checkError.printLog(logs,"") else: deleteService.deleteService(server, serviceNameDelete, agsUser, agsPassword, folder, port) #Publish the new service shpFolder = os.path.join(smallKeyFolder, smallKey) pMXD = publishService.createMXD(shpFolder, pubTemplate,logs) try: publishService.publishMXD(shpFolder, pMXD, connPath, serviceName, folder,logs) finally: del pMXD #Check publishing status status = publishService.serviceStatus(mapServiceFullName, smallKey, smallKeyFolder,server, port,geocatUrl, geocatUsername, geocatPassword,logs) #If the service is published successfully, make the #descriptor file, otherwise exit if status == 'SUCCESS': publishService.addFileSizeToJson(smallKey, smallKeyFolder, shpFolder) moveFileToFolder(payloadZip, publishedFolderPath,logs) if onlineResources.updateOnlineResources(smallKey, smallKeyFolder, webAdaptorName, folder, geocatUrl, geocatUsername, geocatPassword,logs)==1: sys.exit(1) elif status == 'ERROR': sys.exit(1) ##cleanUp(smallKeyFolder, smallKey,logs) elif publishStatus == "0": #NOTHING checkError.printLog(logs,"") checkError.printLog(logs,"Status code 0 ignored.") elif publishStatus == "3": #DELETE checkError.printLog(logs,"") checkError.printLog(logs,"Attempting to delete the service: " + mapServiceFullName) if serviceExists: deleteService.deleteService(server, serviceNameDelete, agsUser, agsPassword, folder, port) checkError.printLog(logs,mapServiceFullName + " map service has been deleted") publishedZipPath = os.path.join(payloadFolder, publishedFolderName, smallKey + ".zip") checkError.printLog(logs,"Deleted: " + publishedZipPath) if os.path.isfile(publishedZipPath): os.remove(publishedZipPath) checkError.printLog(logs,"Deleted: " + payloadZip) if os.path.isfile(payloadZip): os.remove(payloadZip) else: checkError.printLog(logs,"Service does not exist. Exiting.") sys.exit(0) else: checkError.printLog(logs,"Unknown publish status: " + publishStatus) if errReturns == 2: #warning error #add at last to avoid duplicated emails sendEmail.sendEmail(smtpserver, fromaddr, toaddrs, smallKey, smallKeyFolder, logs) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "\n\nERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" if hasattr(sys.exc_value, 'code'): if sys.exc_value.code !=0: #only for un-normal exit moveFileToFolder(payloadZip, badLoadsFolderPath,logs) checkError.printLog(logs,pymsg) sendEmail.sendEmail(smtpserver, fromaddr, toaddrs, smallKey, smallKeyFolder, logs) else: moveFileToFolder(payloadZip, badLoadsFolderPath,logs) checkError.printLog(logs,pymsg) sendEmail.sendEmail(smtpserver, fromaddr, toaddrs, smallKey, smallKeyFolder, logs)
from lxml import etree, html from sendEmail import sendEmail import requests, time, random, datetime timeLow = 180 timeHigh = 480 while True: try: page = requests.get( 'https://www.blackhat.com/us-18/training/index.html') d = etree.HTML(page.text) status = d.xpath( '//*[@id="adversary-tactics-red-team-ops"]/div[1]/div[2]/@class') if "course-sold-out-graphic" not in status: CN = "Train" msg = 'https://www.blackhat.com/us-18/training/adversary-tactics-red-team-ops.html' sendEmail("Blackhat training now available!", msg, CN) else: tStamp = datetime.datetime.now() print('[{:%H:%M %m/%d/%y}]Status unchanged. Sleeping...'.format( tStamp)) except Exception as e: print(e) finally: time.sleep(random.randint(timeLow, timeHigh))
#os.system("mpg123 \"" + filename + "\"") else: print("Name is empty") query = 'UPDATE `ConnectedHosts` SET `Status` = \'In\',`LastSeen`=NOW(),`lastIP` = \''+h['ip']+'\' WHERE `MAC` = \''+h['mac']+'\'' #print(query) cur.execute(query) else: query = 'UPDATE `ConnectedHosts` SET `Status` = \'Out\', `LastLeft`=NOW() WHERE NOT `MAC` = \'C4:17:FE:65:1E:8A\' AND `Status` = \'In\'' #print(query) cur.execute(query) rows = cur.fetchall() if h['mac']: mail = sendEmail.sendEmail() mail.fromAddress(emailFrom) mail.toAddress(emailTo) mail.subject("Unknown device detected") mail.message("Unknown device: "+h['mac']) mail.send() query = "INSERT INTO `test`.`ConnectedHosts` (`Name`, `Device`, `MAC`, `LastSeen`, `LastLeft`, `Status`, `lastIP`) VALUES (\'"+h['mac']+"\', \'Unknown\', \'"+h['mac']+"\', NOW(), NOW(), \'In\', \'"+h['ip']+"\');"; print(query) cur.execute(query) rows = cur.fetchall() beep() if isAlert: pass#beep()
username = form.getvalue('username') code = form.getvalue('code') #Strip quotes around values username = str(username) username = username.translate(None, "'") code = str(code) code = code.translate(None, "'") #Code is a string at this point (not a number) #Check if username exists in database if username in users: #determine method to contact if users[username][0] == 'p': number = users[username][1:] sendTxt(number,code) if users[username][0] == 'e': #email address must be a list for SMTP address = [users[username][1:]] sendEmail(address,code) print "Content-type:text/html\r\n\r\n" print "<html>" print "<head />" print "<body>" print "<h2>%s %s</h2>" % (username, code) print "</body>" print "</html>"
def apiTemp_sendEmail(): if request.method == "POST": formJson = request.get_json() return jsonify(sendEmail.sendEmail(escape(formJson["loginID"]) if formJson.has_key("loginID") else session["user"]["loginID"], \ formJson["subscribe"] if formJson.has_key("subscribe") else session["user"]["subscribe"]))
def exportCsv(currrentYear): if currrentYear is None: today = datetime.datetime.now() currrentYear = today.year db = DatabaseManager() mydb = db.setDBConnection('stockInfo') myCursor = mydb.cursor() divCursor = mydb.cursor() epsCursor = mydb.cursor() #Financials selectKeyFigures = """ SELECT bd.Isin, bd.yahooTicker, bd.Name, bd.currency, bd.Sector, bd.Industry, keyFigures.Hight_52, keyFigures.Low_52, keyFigures.PERatio, keyFigures.BookValue, keyFigures.PriceToBook, keyFigures.LastDividend, keyFigures.LastDividendYield, keyFigures.LastDividendDate, timeseries.TSValue , keyFigures.KeyDate, timeseries.TSDate, indexInfo.indexName FROM stockInfo.BasicData AS bd INNER JOIN ( SELECT kf.isin, kf.Hight_52, kf.Low_52, kf.PERatio , kf.BookValue, kf.PriceToBook, kf.LastDividend, kf.LastDividendYield, kf.LastDividendDate, kf.KeyDate FROM stockInfo.KeyFigures AS kf INNER JOIN (SELECT isin, max(KeyDate) as kfDate FROM stockInfo.KeyFigures GROUP BY isin ) maxKF ON maxKF.isin=kf.isin AND maxKF.kfDate=kf.keyDate ) keyFigures ON keyFigures.isin = bd.isin INNER JOIN (SELECT ts.isin, ts.TSDate, ts.TSValue FROM stockInfo.Timeseries AS ts INNER JOIN (SELECT isin, max(TSDate) as tsDate FROM stockInfo.Timeseries GROUP BY isin ) maxTS on maxTS.isin = ts.isin AND maxTS.tsDate=ts.tsDate ) timeseries ON timeseries.isin=bd.isin INNER JOIN (SELECT bd.isin, group_concat(inf.indexname SEPARATOR ', ') AS indexName FROM stockInfo.BasicData AS bd INNER JOIN stockInfo.IndexDetails AS ind ON ind.isin=bd.isin INNER JOIN stockInfo.IndexInformation AS inf ON inf.idIndexInformation=ind.indexId GROUP BY bd.isin ) indexInfo ON indexInfo.isin = bd.isin """ selectDividends = """ SELECT DIVYear, DIVValue FROM stockInfo.DivFigures WHERE ISIN='{0}' """ selectEPS = """ SELECT EPSYear, EPSValue FROM stockInfo.EPSFigures WHERE ISIN='{0}' ORDER BY EPSYear ASC """ yearRange = range(currrentYear - 4, currrentYear + 1) keyTitleRow = [ 'ISIN', 'YahooTicker', 'Name', 'Currency', 'Sector', 'Industry', 'Hight_52', 'Low_52', 'PeRatio', 'BookValue', 'PriceToBook', 'LastDividend', 'LastDividendYield', 'LastDividendDate', 'TSValue', 'KeyDate', 'TSDate', 'IndexName§ 0 ' ] divTitleRow = [ 'FirstDivYear', 'LastDivYear', 'DivIncreas', 'DivDecrease', 'DivGrowthSi', 'DivGrowthLast7', 'DivGrowthLast5', 'DivGrowthlast3', 'DivGrowthLast' ] divYearTitleRow = [] payoutYearTitleRow = [] for year in yearRange: divYearTitleRow.append('DivYear_{0}'.format(year)) payoutYearTitleRow.append('PayoutYear_{0}'.format(year)) curPath = os.path.dirname(os.path.realpath(__file__)) csvPath = os.path.join(curPath, 'export\\financials.csv') titleRow = keyTitleRow + divTitleRow + divYearTitleRow + payoutYearTitleRow with open(csvPath, mode='w', newline='') as fin_file: fin_writer = csv.writer(fin_file, delimiter=';', quotechar='"', quoting=csv.QUOTE_MINIMAL) fin_writer.writerow(titleRow) myCursor.execute(selectKeyFigures) allFinancials = myCursor.fetchall() for sqlRow in allFinancials: isin = sqlRow[0] divCursor.execute(selectDividends.format(isin)) divRow, divYearDict = calcDividends(divCursor.fetchall(), yearRange) epsCursor.execute(selectEPS.format(isin)) payoutRow, divYearRow = calcPayout(epsCursor.fetchall(), divYearDict, yearRange) dataRow = list(sqlRow) + divRow + divYearRow + payoutRow fin_writer.writerow(dataRow) # #Dividends # selectDiv = """ # SELECT bd.isin, di.DIVFiguresType, di.DIVFiguresYear, di.DIVFiguresValue FROM stockInfo.DivFigures AS di # RIGHT JOIN stockInfo.BasicData AS bd # ON di.ISIN=bd.isin # """ # titleRow = ['ISIN','DIVFiguresType', 'DIVFiguresYear', 'DIVFiguresValue'] # with open('export/dividends.csv', mode='w', newline='') as div_file: # div_writer = csv.writer(div_file, delimiter=';', quotechar='"', quoting=csv.QUOTE_MINIMAL) # div_writer.writerow(titleRow) # myCursor.execute(selectDiv) # allDividends = myCursor.fetchall() # for sqlRow in allDividends: # div_writer.writerow(sqlRow) myCursor.close() divCursor.close() epsCursor.close() sendEmail()
def sendPasswdToEmail(): import sendEmail sendEmail.sendEmail()
def main(): #Parameters retrieved by FileSystemWatcher from #filesystemwatcher_config.json #logs to hold all log information for current smallkey publication logs = [] #Parameters retrieved by FileSystemWatcher from #filesystemwatcher_config.json smallKey = arcpy.GetParameterAsText(0) smallKeyFolder = arcpy.GetParameterAsText(1) server = arcpy.GetParameterAsText(2) port = arcpy.GetParameterAsText(3) pubTemplate = arcpy.GetParameterAsText(4) connPath = arcpy.GetParameterAsText(5) publishStatus = arcpy.GetParameterAsText(6) folder = arcpy.GetParameterAsText(7) geocatUrl = arcpy.GetParameterAsText(8) geocatUsername = arcpy.GetParameterAsText(9) geocatPassword = arcpy.GetParameterAsText(10) agsUser = arcpy.GetParameterAsText(11) agsPassword = arcpy.GetParameterAsText(12) smtpserver = arcpy.GetParameterAsText(13) fromaddr = arcpy.GetParameterAsText(14) toaddrs = arcpy.GetParameterAsText(15) metaDataUrl = arcpy.GetParameterAsText(16) webAdaptorName = arcpy.GetParameterAsText(17) ## print("smallKey ="+ smallKey) ## print("smallKeyFolder="+ smallKeyFolder) ## print("server ="+ server) ## print("port ="+ port) ## print("pubTemplate ="+ pubTemplate) ## print("connPath ="+ connPath) ## print("publishStatus="+ publishStatus) ## print("folder ="+folder) ## print("geocatUrl ="+ geocatUrl) ## print("geocatUsername ="******"geocatPassword ="******"agsUser ="******"agsPassword ="******"smtpserver ="+smtpserver) ## print("fromaddr="+ fromaddr) ## print("toaddrs ="+ toaddrs) ## print("metaDataUrl ="+metaDataUrl) try: serviceName = smallKey mapServiceFullName = folder + "/" + serviceName serviceNameDelete = serviceName + ".MapServer" #Folder to move payload zip files to after they are published (payloadFolder, sk) = os.path.split(smallKeyFolder) payloadZip = os.path.join(payloadFolder, smallKey + ".zip") publishedFolderName = "Published" publishedFolderPath = os.path.join(payloadFolder, publishedFolderName) badLoadsFolderName = "Bad_Payloads" badLoadsFolderPath = os.path.join(payloadFolder, badLoadsFolderName) #check error errReturns = checkError.errorValidation(smallKey, smallKeyFolder, publishStatus, geocatUrl, geocatUsername, geocatPassword, metaDataUrl, logs) if errReturns == 1: #fatal error sys.exit(1) serviceExists = False #Get the list of existing map service agsServiceList = publishService.getCatalog(server, port, logs) #Check if the map service already exists if mapServiceFullName in agsServiceList: serviceExists = True if publishStatus in ("1", "2"): #NEW or UPDATE if publishStatus == "1": #NEW if serviceExists: checkError.printLog(logs, "") checkError.printLog( logs, mapServiceFullName + " already exists. System exit.") moveFileToFolder(payloadZip, badLoadsFolderPath, logs) sys.exit(0) else: #UPDATE checkError.printLog(logs, "") checkError.printLog( logs, "Attempting to update the service: " + mapServiceFullName) if not serviceExists: checkError.printLog( logs, "Service does not exist. Publishing as new service.") checkError.printLog(logs, "") else: deleteService.deleteService(server, serviceNameDelete, agsUser, agsPassword, folder, port) #Publish the new service shpFolder = os.path.join(smallKeyFolder, smallKey) pMXD = publishService.createMXD(shpFolder, pubTemplate, logs) try: publishService.publishMXD(shpFolder, pMXD, connPath, serviceName, folder, logs) finally: del pMXD #Check publishing status status = publishService.serviceStatus(mapServiceFullName, smallKey, smallKeyFolder, server, port, geocatUrl, geocatUsername, geocatPassword, logs) #If the service is published successfully, make the #descriptor file, otherwise exit if status == 'SUCCESS': publishService.addFileSizeToJson(smallKey, smallKeyFolder, shpFolder) moveFileToFolder(payloadZip, publishedFolderPath, logs) if onlineResources.updateOnlineResources( smallKey, smallKeyFolder, webAdaptorName, folder, geocatUrl, geocatUsername, geocatPassword, logs) == 1: sys.exit(1) elif status == 'ERROR': sys.exit(1) ##cleanUp(smallKeyFolder, smallKey,logs) elif publishStatus == "0": #NOTHING checkError.printLog(logs, "") checkError.printLog(logs, "Status code 0 ignored.") elif publishStatus == "3": #DELETE checkError.printLog(logs, "") checkError.printLog( logs, "Attempting to delete the service: " + mapServiceFullName) if serviceExists: deleteService.deleteService(server, serviceNameDelete, agsUser, agsPassword, folder, port) checkError.printLog( logs, mapServiceFullName + " map service has been deleted") publishedZipPath = os.path.join(payloadFolder, publishedFolderName, smallKey + ".zip") checkError.printLog(logs, "Deleted: " + publishedZipPath) if os.path.isfile(publishedZipPath): os.remove(publishedZipPath) checkError.printLog(logs, "Deleted: " + payloadZip) if os.path.isfile(payloadZip): os.remove(payloadZip) else: checkError.printLog(logs, "Service does not exist. Exiting.") sys.exit(0) else: checkError.printLog(logs, "Unknown publish status: " + publishStatus) if errReturns == 2: #warning error #add at last to avoid duplicated emails sendEmail.sendEmail(smtpserver, fromaddr, toaddrs, smallKey, smallKeyFolder, logs) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "\n\nERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" if hasattr(sys.exc_value, 'code'): if sys.exc_value.code != 0: #only for un-normal exit moveFileToFolder(payloadZip, badLoadsFolderPath, logs) checkError.printLog(logs, pymsg) sendEmail.sendEmail(smtpserver, fromaddr, toaddrs, smallKey, smallKeyFolder, logs) else: moveFileToFolder(payloadZip, badLoadsFolderPath, logs) checkError.printLog(logs, pymsg) sendEmail.sendEmail(smtpserver, fromaddr, toaddrs, smallKey, smallKeyFolder, logs)
def writeSQL(self): """ creates a SQL insert file for the current day """ if len(self.fileOutput) > 0: commonData = self.getExtras() sqlDir = self.sbddataFolder + "/sql" if(not os.path.exists(sqlDir)): os.mkdir(sqlDir) os.chdir(sqlDir) self.sql_filename = "xbt_sbd" +"_"+ time.strftime('%B-%d-%Y') + ".sql" schema_table = "soop.xbt_realtime" parent_table = schema_table + " (uuid,callsign,lat,lon,max_depth,drop_id,measurements,csv_name,sbd_name,create_date)" values_table = "soop.xbt_realtime_measurements (pkid,uuid,callsign,temperature,depth)" print "writing to: " + sqlDir + "/" + self.sql_filename try: f = open(self.sql_filename,'w+') f.write("-- this sql created on " + self.script_time + " for XBT Realtime from sbd files created by the CSIRO Iridium Devil Transmitters\r\n\r\n") f.write("BEGIN;\r\n\r\n"); for x in self.fileOutput: datetime = x['year'] +"/"+ x['month'] +"/"+ x['day'] + ":"+ x['hour'] +":"+ x['minute'] max_depth = x['depths'][len(x['depths'])-1] # fname like: /var/lib/python_cron_data/sbddata/300034012430490_001012.sbd filename = x['fname'].replace(".sbd","") stringArr = filename.split("_") filename = stringArr[len(stringArr)-1] imos_filedate = x['year'] + x['month'] + x['day'] + "T" + x['hour'] + x['minute'] + "00Z" shipname = x['callsign'] if (x['callsign'] in self.ships): shipname = self.ships[x['callsign']] else: err = "ERROR: The ship callsign '" + shipname + "' is not known. Enter the new ship into this script and re-run. Script terminating." # big error that needs attention email = sendEmail.sendEmail() email.sendEmail("*****@*****.**","SOOP Realtime Process BSD",err) print err self.errorFiles.append(err) self.writetoLog() sys.exit() filename = x['callsign'] + "_" + shipname + "/" + x['year'] + "/IMOS_SOOP-XBT_T_" + imos_filedate + "_" + x['callsign'] + "_" + filename + "_FV00.csv" print x['fname'] + " makes -> " + filename # should delete cascade. scv_name is enforced unique f.write("\r\ndelete from " + schema_table + " where csv_name like '" + filename +"' ;\r\n\r\n") sql = "insert into " + parent_table + " values (" +\ "nextval('soop.xbt_realtime_seq'::regclass) ," + \ "'"+ x['callsign'] + "'," +\ x['lat'] + "," +\ x['lon'] + "," +\ max_depth + "," +\ x['drop_id'] + "," +\ x['points'] + ",'" +\ filename + "','" +\ x['fname'] + "'," +\ "'"+ datetime + "');\r\n" f.write(sql) self.newCount += 1 # debuging if x.get('incorrectHeaders'): #print sql pass for index in range(len(x['depths'])): f.write("insert into " + values_table + " values (" +\ "nextval('soop.xbt_realtime_measurements_seq'::regclass) ," + \ "currval('soop.xbt_realtime_seq'::regclass) ," + \ "'"+x['callsign'] + "'," +\ x['temps'][index]+","+ x['depths'][index] + ");\r\n") # always need to update the geom column f.write("UPDATE " +schema_table+" SET geom = PointFromText('POINT(' || lon || ' ' || lat || ')',4326);\r\n") f.write("COMMIT;\r\n\r\n"); f.close() os.popen("chmod g+w " + self.sql_filename).readline() #os.popen("chgrp "+self.datasetGroup +" " + self.sql_filename).readline() except Exception, e: err= "ERROR: problem opening or writing the SQL file. exiting.. " + str(e) print err self.errorFiles.append(err) self.writetoLog() sys.exit()
def match(): # declare local variables matches = dict() favorites = dict() # connect with the database con = MySQLdb.connect(user='******', db='db_mysql',passwd='princetoncos333', host='localhost') #con = lite.connect('db.sqlite3') # read and evaluate today.json global today file = open('today.json') today = json.load(file) file.close() # fetch IDs for dining items corresponding with today's menu items with con: for dhall in today: for meals in dhall['menus']: item = meals['name'] cur = con.cursor() # fetch ID corresponding with the dining item cur.execute("SELECT id FROM users_item WHERE name = '"+item+"'") print(item) item_ID = cur.fetchone() cur.execute("SELECT isVegan FROM users_item WHERE name = '"+item+"'") item_isVegan = cur.fetchone()[0] cur.execute("SELECT isVegetarian FROM users_item WHERE name = '"+item+"'") item_isVegetarian = cur.fetchone()[0] cur.execute("SELECT isPork FROM users_item WHERE name = '"+item+"'") item_isPork = cur.fetchone()[0] cur.execute("SELECT hasNuts FROM users_item WHERE name = '"+item+"'") item_hasNuts = cur.fetchone()[0] cur.execute("SELECT * FROM users_item") similarItems = [] for food in cur.fetchall(): # check to make sure potential similar item has same allergen attributes if item_isVegan != food[1] or item_isVegetarian != food[2] or item_isPork != food[3] or item_hasNuts != food[4]: continue similarity = Levenshtein.ratio(food[6], item.encode('utf-8')) if (len(food[6]) > len(item)): ratio = float(len(item))/float(len(food[6])) else: ratio = float(len(food[6]))/float(len(item)) if (similarity > 0.8) and (ratio < 1.0) and (ratio > 0.5): cur.execute("SELECT id FROM users_item WHERE name = '"+food[6]+"'") similarItem_ID = cur.fetchone() cur.execute("SELECT netid_id FROM users_netid_favorites WHERE item_id = '"+ str(similarItem_ID[0])+ "'") for similarNetID_ID in cur.fetchall(): # fetch the netid corresponding with the netID ID cur.execute("SELECT netid FROM users_netid WHERE id = '" + str(similarNetID_ID[0])+"'") similarNetID = cur.fetchone()[0] # add item from today's menu to list corresponding with user if user favorited the item if similarNetID in matches: similarTup = (item, dhall['dhall'], dhall['meal'], food[6]) matches[similarNetID].append(similarTup) else: matches[similarNetID] = [(item, dhall['dhall'], dhall['meal'], food[6])] # fetch netID IDs corresponding with IDs previously fetched cur.execute("SELECT netid_id FROM users_netid_favorites WHERE item_id = '"+ str(item_ID[0])+ "'") for netID_ID in cur.fetchall(): # fetch the netid corresponding with the netID ID cur.execute("SELECT netid FROM users_netid WHERE id = '" + str(netID_ID[0])+"'") netID = cur.fetchone()[0] # add item from today's menu to list corresponding with user if user favorited the item if netID in matches: somevar = False for foodtup in matches[netID]: if (foodtup[0] == item): if (foodtup[3] != None): matches[netID].remove(foodtup) newtup = (item, dhall['dhall'], dhall['meal'], None) matches[netID].append(newtup) somevar = True break if somevar == False: tup = (item, dhall['dhall'], dhall['meal'], None) matches[netID].append(tup) else: matches[netID] = [(item, dhall['dhall'], dhall['meal'], None)] # eventually the comparisons will be made with database items instead of dictionary items sendEmail(matches)
def motion(channel): sendEmail(); print ("MOTION DETECTED")