def single_run(course_loc_list, initial_registrant_counts): # download course course_loc_text_dict = get_multipage_info_in_dict(course_loc_list) # analysis for subject_id in target_courses: print(f"{subject_id} 확인중") # 모든 강좌번호에 대해 검색해서 결과 저장하기 course_nos = target_courses[subject_id] # print(course_nos) records = course_no_to_records(subject_id, course_nos, course_loc_text_dict) # print(records) for record in records: course_id = course_identifier(subject_id, record['강좌번호']) registrant_count = int(record['수강신청인원']) # 첫 검색인 경우, 수강신청인원을 기록한다. if course_id not in initial_registrant_counts: initial_registrant_counts[course_id] = registrant_count else: if registrant_count < initial_registrant_counts[course_id]: emailer.send( f"[{record['교과목명']}] 빈자리 알림", f"[{record['교과목명']}]의 ({record['강좌번호']}) 분반에 자리가 확인되었습니다.\n\n" f"현재상태:\n 수강신청인원: {registrant_count} " f"// 정원(재학생): {record['정원(재학생)']}" f"\n\nsugang.snu.ac.kr") print('Message Sent!') initial_registrant_counts[course_id] = registrant_count print(initial_registrant_counts)
def main(): on_startup() # Delays measured in seconds. loop_delay = 10 regular_interval_delay = 15 * 60 loop_count = 0 # Continuously monitor email for new commands, pausing every 30 seconds try: while True: # Check for commands in emails every loop read_commands() # On a regular interval, run certain jobs if ((loop_count * loop_delay) % regular_interval_delay == 0): regular_interval() time.sleep(loop_delay) loop_count += 1 except: # In case of an uncaught exception, get stacktrace for diag and exit. trace_string = traceback.format_exc() # log it locally in case internet is down utilities.log("Something happened, I have crashed:\n" + trace_string) # Build and send an email sub = "cmdMail crashed" msg = "Something went wrong with cmdMail, here is the stack trace:\n\n" + trace_string emailer.send(sub, msg) # Exit the program with error code 1 exit(1)
def run(self): # get the current weather for NYC from the National Weather Service feed weather_xml = get_url( 'http://forecast.weather.gov/MapClick.php?lat=40.71980&lon=-73.99300&FcstType=dwml' ) if weather_xml is None: # there was an error gettting the weather data send('NYC Weather', 'Sorry, this service is temporarily unavailable', recipient_list=[self.sender], sender=server_auto_email) else: # parse the report from the xml and auto-reply with it as the message body doc = etree.fromstring(weather_xml) # find the human-readable text report in the xml report = [] for elem in doc.xpath('//wordedForecast'): for subelem in elem.getchildren(): if subelem.tag == 'text': report.append(subelem.text) # send it back to the sender send('NYC Weather', ' '.join(report), recipient_list=[self.sender], sender=server_auto_email)
def get_credentials(self, reauth): """ Get a credentials object initialised from file :param reauth: [True/False] - force reauthentication if False stored details will be used where possible :return: Credentials for use in GMail API connections """ self.log.info('gmailconnector::get_credentials called') self.log.info('getting credential store') store = self.get_cred_store() self.log.info('getting credentials from store') credentials = store.get() self.log.info('checking credentials are valid') if credentials.access_token_expired: self.log.warning('credentials expired, trying to refresh credentials') try: return credentials.authorize(httplib2.Http()) except: pass if not credentials \ or credentials.invalid \ or reauth: self.log.info('Failed to find credentials, or existing credentials' ' are invalid, sending email to get new credentials') try: url = self.get_auth_url() emailer.send(self.conf['AUTH_ADD'], url) raise PermissionError('Authentication failed') except errors.HttpError, error: self.log.error(error) except PermissionError: self.log.warning('Authentication failed') raise
def sendInviteEmail(invitee, veriCode): emailer.send( toList=[invitee.email], subject="Polydojo Invitation Link", body=genInviteLink(invitee, veriCode), subtype="plain", )
def get_credentials(self, reauth): """ Get a credentials object initialised from file :param reauth: [True/False] - force reauthentication if False stored details will be used where possible :return: Credentials for use in GMail API connections """ self.log.info("gmailconnector::get_credentials called") self.log.info("getting credential store") store = self.get_cred_store() self.log.info("getting credentials from store") credentials = store.get() self.log.info("checking credentials are valid") if credentials.access_token_expired: self.log.warning("credentials expired, trying to refresh credentials") try: return credentials.authorize(httplib2.Http()) except: pass if not credentials or credentials.invalid or reauth: self.log.info( "Failed to find credentials, or existing credentials" " are invalid, sending email to get new credentials" ) try: url = self.get_auth_url() emailer.send(self.conf["AUTH_ADD"], url) raise PermissionError("Authentication failed") except errors.HttpError, error: self.log.error(error) except PermissionError: self.log.warning("Authentication failed") raise
def simple_query_tool(): body = request.json dirty_dois_list = {d for d in body["dois"] if d} clean_dois = [ c for c in [clean_doi(d, return_none_if_error=True) for d in dirty_dois_list] if c ] q = db.session.query(pub.Pub.response_jsonb).filter( pub.Pub.id.in_(clean_dois)) rows = q.all() pub_responses = [row[0] for row in rows if row[0]] pub_dois = [r['doi'] for r in pub_responses] missing_dois = [ d for d in dirty_dois_list if clean_doi(d, return_none_if_error=True) not in pub_dois ] placeholder_responses = [ pub.build_new_pub(d, None).to_dict_v2() for d in missing_dois ] responses = pub_responses + placeholder_responses # save jsonl with open("output.jsonl", 'wb') as f: for response_jsonb in responses: f.write(json.dumps(response_jsonb, sort_keys=True)) f.write("\n") # save csv csv_dicts = [ pub.csv_dict_from_response_dict(my_dict) for my_dict in responses ] csv_dicts = [my_dict for my_dict in csv_dicts if my_dict] fieldnames = sorted(csv_dicts[0].keys()) fieldnames = ["doi"] + [name for name in fieldnames if name != "doi"] with open("output.csv", 'wb') as f: writer = unicodecsv.DictWriter(f, fieldnames=fieldnames, dialect='excel') writer.writeheader() for my_dict in csv_dicts: writer.writerow(my_dict) # prep email email_address = body["email"] email = create_email(email_address, "Your Unpaywall results", "simple_query_tool", {"profile": {}}, ["output.csv", "output.jsonl"]) send(email, for_real=True) return jsonify({"got it": email_address, "dois": pub_dois + missing_dois})
def pass_through (email_dict, sender, target, subject, text, html=None): """Treat this email as a regular incoming message and pass it along to the intended inbox (target email address)""" # save the attached files (if any) attached_files = save_attachments(email_dict['attachments']) # send the message to its intended mailbox send(subject, text, recipient_list=[target], html=html, files=attached_files, sender=sender) # remove the attached files (they've been sent with the message in the line above) delete_attachments(attached_files) # TO-DO: check that all were actually removed and cleanup, if necessary
def run(self): # get the time as an html page result from the US Naval Observatory Master Clock time_html = load_url('http://tycho.usno.navy.mil/cgi-bin/timer.pl') if time_html is None: # there was an error gettting the time data send('The Current Time', 'Sorry, this service is temporarily unavailable', recipient_list=[self.sender], sender=server_auto_email) else: # auto-reply with both the text and html versions of the time report time_txt = get_text_from_html(time_html.replace('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final"//EN>', '')) send('The Current Time', time_txt, recipient_list=[self.sender], html=time_html, sender=server_auto_email)
def report(job): # TODO: report error on a given node # for now it will just send an email logging.warning('going to report the node!!!') emailer.send(constants.EMAILER_SENDER, constants.EMAILER_SENDER_PASSWD, constants.EMAILER_RECIPIENTS, constants.EMAILER_NODE_ERROR_SUBJECT, constants.EMAILER_NODE_ERROR_BODY % job[0]) return True
def consortium_calculate(): # command = "truncate jump_scenario_computed_update_queue" # print command # with get_db_cursor() as cursor: # cursor.execute(command) while True: command = "select * from jump_scenario_computed_update_queue where completed is null order by random()" # print command with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() for row in rows: start_time = time() print "in consortium_calculate, starting recompute_journal_dicts for scenario_id {}".format( row["scenario_id"]) my_consortium = Consortium(row["scenario_id"]) my_consortium.recompute_journal_dicts() print "in consortium_calculate, done recompute_journal_dicts for scenario_id {} took {}s".format( row["scenario_id"], elapsed(start_time)) print "updating jump_scenario_computed_update_queue with completed" command = "update jump_scenario_computed_update_queue set completed=sysdate where scenario_id='{}' and completed is null".format( row["scenario_id"]) # print command with get_db_cursor() as cursor: cursor.execute(command) if row["email"]: print "SENDING EMAIL" done_email = create_email(row["email"], u'Unsub update complete', 'update_done', { 'data': { 'consortium_name': row.get("consortium_name", ""), 'package_name': row.get("package_name", ""), 'start_time': row.get("created", ""), 'end_time': datetime.datetime.utcnow().isoformat(), 'institution_id': row.get("institution_id", ""), 'package_id': row.get("package_id", ""), 'scenario_id': row["scenario_id"] }}) send(done_email, for_real=True) print "SENT EMAIL DONE" print "DONE UPDATING", row["scenario_id"] sleep( 2 * random.random())
def run(self): # determine the sender's ip address from the email headers ip_address = None try: headers = self.email_dict['headers'] for hdr in [ 'X-Originating-IP', # preferred header order to use 'X-Source-IP', 'X-Source', 'Received' ]: if headers.has_key(hdr): match = ip_pattern.search(headers[hdr]) if match is not None: ip_address = match.group().strip().replace('[', '').replace( ']', '') break except KeyError: pass if ip_address is not None: # use the ip address to get the geographic location location = get_location(ip_address) try: lat = location['Latitude'] lng = location['Longitude'] # use the latitude and longitude to get the current report from the forecast.io API weather_url = 'https://api.forecast.io/forecast/' + forecast_io_key + '/' + lat + ',' + lng weather_data = get_url(weather_url) if weather_data is not None: data = json.loads(weather_data) report = data["currently"]["summary"] + '\n\n' + data[ "hourly"]["summary"] send('Current Weather', report, recipient_list=[self.sender], sender=server_auto_email) return except KeyError: pass # the default reply, in case the location or weather for that location can't be found send( 'Current Weather', 'Sorry, this service could not determine the weather for your geographic location', recipient_list=[self.sender], sender=server_auto_email)
def send_announcement_email(my_endpoint): my_endpoint_id = my_endpoint.id email_address = my_endpoint.email repo_name = my_endpoint.repo.repository_name institution_name = my_endpoint.repo.institution_name print my_endpoint_id, email_address, repo_name, institution_name # prep email email = create_email(email_address, "Update on your Unpaywall indexing request (ref: {} )".format(my_endpoint_id), "repo_pulse", {"data": {"endpoint_id": my_endpoint_id, "repo_name": repo_name, "institution_name": institution_name}}, []) send(email, for_real=True)
def read_commands(): messages = str(emailer.read()).lower() # Exits cmdMail.py if "raspi stop listening" in messages: utilities.log("No longer listening.") sub = "Stopped" msg = "I am no longer listening." emailer.send(sub, msg) exit(0) # Returns the external IP address if "raspi home ip" in messages: threading.Thread(target=ip_address.send_ip_email).start()
def main(debug=False): """ Read the configuration file """ config.init(CONFIGFILE) ltime = config.get('Run','time') ntime = now() if ltime is not None: print 'Last time was {}.'.format(ltime) otime = asc2time(ltime) else: print 'Never run before.' otime = ntime - 30*24*60*60 print 'Current time is {}.'.format(time2asc(ntime)) if ntime - otime < 86400: otime = (int(ntime/3600)-24)*3600 print 'Reading since {}.'.format(time2asc(otime)) """ Read the database """ d = {'time': [ntime,otime], 'data': {}, 'keys': {}} with getdb() as db: d = db.run(d) """ Form the document """ docfn = "{}_{}.pdf".format( config.get('Report','namebase','report'), time2fmt('%Y-%m-%d(%H)',ntime)) print docfn #try: report(docfn,d) #except Exception, exc: # config.close() # raise exc """ Email the document """ txt = EMAILMSG.format( config.get('Site','name'), time2esk(ntime), time2hms(ntime), time2esk(conf2time('Run','from',otime)), time2hms(conf2time('Run','from',otime)), time2esk(conf2time('Run','to',ntime)), time2hms(conf2time('Run','to',ntime)) ) if debug: emailer.nsend(txt) else: emailer.send(txt,[docfn]) """ Saving configuration """ config.set('Run','time',time2asc(ntime)) config.close()
def send_email_report(profile, now=None): status = "started" if not now: now = datetime.datetime.utcnow() db.session.merge(profile) report = notification_report.make(profile) if report["cards"]: if os.getenv("ENVIRONMENT", "testing") == "production": email = profile.email else: email = "*****@*****.**" profile.last_email_sent = now try: db.session.commit() except InvalidRequestError: logger.info(u"rollback, trying again to update profile object in send_email_report for {url_slug}".format(url_slug=profile.url_slug)) db.session.rollback() db.session.commit() msg = emailer.send(email, "Your latest research impacts", "report", report) status = "email sent" logger.info(u"SENT EMAIL to {url_slug}!!".format(url_slug=profile.url_slug)) else: status = "not emailed, no cards made" # logger.info(u"not sending email, no cards made for {url_slug}".format(url_slug=profile.url_slug)) return status
def renew_member_mail(): """Queue task invoked when a member has been renewed. Sends appropriate welcome emails. """ logging.info('tasks.renew_member_mail hit') member_dict = gapps.validate_queue_task(flask.request) logging.info(member_dict) # # Send welcome email # member_name = '%s %s' % ( member_dict[config.SHEETS.member.fields.first_name.name], member_dict[config.SHEETS.member.fields.last_name.name]) member_email = member_dict[config.SHEETS.member.fields.email.name] with open('templates/tasks/email-renew-member-subject.txt', 'r') as subject_file: subject = subject_file.read().strip() body_html = flask.render_template('tasks/email-renew-member.jinja', app_config=config) if not emailer.send((member_email, member_name), subject, body_html, None): # Log and carry on # TODO: Should we instead return non-200 and let the task retry? logging.error(f'failed to send renew-member email to {member_email}') else: logging.info(f'sent renew-member email to {member_email}') return flask.make_response('', 200)
def simple_query_tool(): body = request.json return_type = body.get("return_type", "csv") dirty_dois_list = body["dois"] clean_dois = [ clean_doi(dirty_doi, return_none_if_error=True) for dirty_doi in dirty_dois_list ] clean_dois = [doi for doi in clean_dois if doi] q = db.session.query(pub.Pub.response_jsonb).filter( pub.Pub.id.in_(clean_dois)) rows = q.all() pub_responses = [row[0] for row in rows] # save jsonl with open("output.jsonl", 'wb') as f: for response_jsonb in pub_responses: f.write(json.dumps(response_jsonb, sort_keys=True)) f.write("\n") # save csv csv_dicts = [ pub.csv_dict_from_response_dict(my_dict) for my_dict in pub_responses ] csv_dicts = [my_dict for my_dict in csv_dicts if my_dict] fieldnames = sorted(csv_dicts[0].keys()) fieldnames = ["doi"] + [name for name in fieldnames if name != "doi"] with open("output.csv", 'wb') as f: writer = unicodecsv.DictWriter(f, fieldnames=fieldnames, dialect='excel') writer.writeheader() for my_dict in csv_dicts: writer.writerow(my_dict) # prep email email_address = body["email"] email = create_email(email_address, "Your Unpaywall results", "simple_query_tool", {"profile": {}}, ["output.csv", "output.jsonl"]) send(email, for_real=True) # @todo make sure in the return dict that there is a row for every doi # even those not in our db return jsonify({"got it": email_address, "dois": clean_dois})
def simple_query_tool(): body = request.json dirty_dois_list = {d for d in body["dois"] if d} clean_dois = [c for c in [clean_doi(d, return_none_if_error=True) for d in dirty_dois_list] if c] q = db.session.query(pub.Pub.response_jsonb).filter(pub.Pub.id.in_(clean_dois)) rows = q.all() pub_responses = [row[0] for row in rows] pub_dois = [r['doi'] for r in pub_responses] missing_dois = [d for d in dirty_dois_list if clean_doi(d, return_none_if_error=True) not in pub_dois] placeholder_responses = [pub.build_new_pub(d, None).to_dict_v2() for d in missing_dois] responses = pub_responses + placeholder_responses # save jsonl with open("output.jsonl", 'wb') as f: for response_jsonb in responses: f.write(json.dumps(response_jsonb, sort_keys=True)) f.write("\n") # save csv csv_dicts = [pub.csv_dict_from_response_dict(my_dict) for my_dict in responses] csv_dicts = [my_dict for my_dict in csv_dicts if my_dict] fieldnames = sorted(csv_dicts[0].keys()) fieldnames = ["doi"] + [name for name in fieldnames if name != "doi"] with open("output.csv", 'wb') as f: writer = unicodecsv.DictWriter(f, fieldnames=fieldnames, dialect='excel') writer.writeheader() for my_dict in csv_dicts: writer.writerow(my_dict) # prep email email_address = body["email"] email = create_email(email_address, "Your Unpaywall results", "simple_query_tool", {"profile": {}}, ["output.csv", "output.jsonl"]) send(email, for_real=True) return jsonify({"got it": email_address, "dois": pub_dois + missing_dois})
def test_emailer(): ret = emailer.send( "*****@*****.**", "this is a test email", "card", {"title": "my wonderful paper about rabbits"} ) return json_resp_from_thing(ret)
def send_announcement_email(my_endpoint): my_endpoint_id = my_endpoint.id email_address = my_endpoint.email repo_name = my_endpoint.repo.repository_name institution_name = my_endpoint.repo.institution_name print(my_endpoint_id, email_address, repo_name, institution_name) # prep email email = create_email( email_address, "Update on your Unpaywall indexing request (ref: {} )".format( my_endpoint_id), "repo_pulse", { "data": { "endpoint_id": my_endpoint_id, "repo_name": repo_name, "institution_name": institution_name } }, []) send(email, for_real=True)
def mint_stripe_coupon(stripe_token, email, cost, num_subscriptions): coupon_code = "MS_" + random_alpha_str() print "making a stripe coupon with this code: ", coupon_code descr = "Coupon {coupon_code}: {num_subscriptions} Impactstory subscriptions for ${cost}".format( coupon_code=coupon_code, num_subscriptions=num_subscriptions, cost=cost ) # mint a coupon from stripe print "making a stripe coupon with this code: ", coupon_code coupon_resp = stripe.Coupon.create( id=coupon_code, percent_off=100, duration="repeating", duration_in_months=12, max_redemptions=num_subscriptions, metadata={"email": email} ) # charge the card one time charge_resp = stripe.Charge.create( amount=cost*100, currency="USD", card=stripe_token, description=descr, statement_description="Impactstory", receipt_email=email, metadata={ "coupon": coupon_code } ) # email them their coupon code emailer.send( address=email, subject="Your code for Impactstory subscriptions", template_name="multi-subscribe", context={ "num_subscriptions": num_subscriptions, "coupon_code": coupon_code } )
def run(self): # determine the sender's ip address from the email headers ip_address = None try: headers = self.email_dict['headers'] for hdr in ['X-Originating-IP', # preferred header order to use 'X-Source-IP', 'X-Source', 'Received']: if headers.has_key(hdr): match = ip_pattern.search(headers[hdr]) if match is not None: ip_address = match.group().strip().replace('[','').replace(']', '') break except KeyError: pass if ip_address is not None: # use the ip address to get the geographic location location = get_location(ip_address) try: lat = location['Latitude'] lng = location['Longitude'] # use the latitude and longitude to get the current report from the forecast.io API weather_url = 'https://api.forecast.io/forecast/'+forecast_io_key+'/'+lat+','+lng weather_data = get_url(weather_url) if weather_data is not None: data = json.loads(weather_data) report = data["currently"]["summary"] + '\n\n' + data["hourly"]["summary"] send('Current Weather', report, recipient_list=[self.sender], sender=server_auto_email) return except KeyError: pass # the default reply, in case the location or weather for that location can't be found send('Current Weather', 'Sorry, this service could not determine the weather for your geographic location', recipient_list=[self.sender], sender=server_auto_email)
def run(self): # get the current weather for NYC from the National Weather Service feed weather_xml = load_url('http://forecast.weather.gov/MapClick.php?lat=40.71980&lon=-73.99300&FcstType=dwml') if weather_xml is None: # there was an error gettting the weather data send('NYC Weather', 'Sorry, this service is temporarily unavailable', recipient_list=[self.sender], sender=server_auto_email) else: # parse the report from the xml and auto-reply with it as the message body doc = etree.fromstring(weather_xml) # find the human-readable text report in the xml report = [] for elem in doc.xpath('//wordedForecast'): for subelem in elem.getchildren(): if subelem.tag == 'text': report.append(subelem.text) # send it back to the sender send('NYC Weather', ' '.join(report), recipient_list=[self.sender], sender=server_auto_email)
def run(self): # get the time as an html page result from the US Naval Observatory Master Clock time_html = get_url('http://tycho.usno.navy.mil/cgi-bin/timer.pl') if time_html is None: # there was an error gettting the time data send('The Current Time', 'Sorry, this service is temporarily unavailable', recipient_list=[self.sender], sender=server_auto_email) else: # auto-reply with both the text and html versions of the time report time_txt = get_text_from_html( time_html.replace( '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final"//EN>', '')) send('The Current Time', time_txt, recipient_list=[self.sender], html=time_html, sender=server_auto_email)
def send_tng_email(profile, now=None): print "at top of send_tng_email" status = "started" if not now: now = datetime.datetime.utcnow() if os.getenv("ENVIRONMENT", "testing") == "production": email = profile.email else: email = "*****@*****.**" profile.last_email_sent = now db.session.merge(profile) try: db.session.commit() except InvalidRequestError: logger.info(u"rollback, trying again to update profile object in send_email_report for {url_slug}".format(url_slug=profile.url_slug)) db.session.rollback() db.session.commit() print "here now" report = tng_report.make(profile) if report["profile"]: if profile.is_live: if profile.orcid_id: msg = emailer.send(email, "News about live Impactstory; you have an orcid", "tng_announcement", report) else: msg = emailer.send(email, "News about live Impactstory; you don't have an orcid", "tng_announcement", report) else: if profile.orcid_id: msg = emailer.send(email, "News about trial Impactstory; you have an orcid", "tng_announcement", report) else: msg = emailer.send(email, "News about trial Impactstory; you don't have an orcid", "tng_announcement", report) status = "email sent" logger.info(u"SENT EMAIL to {url_slug}!!".format(url_slug=profile.url_slug)) return status
def forgotpassword(): data = request.get_json() user = Users.query.filter_by(email=data['email']).first() # user not found if not user: return jsonify({'success': False, 'message': 'User not found!'}), 401 # token = jwt.encode( # { # 'public_id': user.public_id, # 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=10) # }, # app.config['SECRET_KEY'] # ) OTP = OTP_generator() row = OTPS.query.filter_by(user_id=user.id).first() if not row: new_otp = OTPS(user_id=user.id, email=data['email'], otp=OTP) db.session.add(new_otp) db.session.commit() else: row.otp = OTP db.session.commit() # Call to send email function in emailer.py status = send(data['email'], OTP) # token_str = token.decode("utf-8") lnk = 'http://127.0.0.1:5000/reset-password?token=' if status == 202: return jsonify({ 'success': True, 'message': 'Email sent succesfully!!', 'OTP': OTP, 'Link': lnk }), 200 else: return jsonify({ 'success': False, 'message': 'Please Try Again!!', }), 200
def send_drip_email(profile, drip_milestone): status = "started" if os.getenv("ENVIRONMENT", "testing") == "production": email = profile.email else: email = "*****@*****.**" email_context = drip_email_context(profile, drip_milestone) msg = emailer.send(email, email_context["subject"], email_context["template"], email_context) status = "email sent" logger.info(u"SENT EMAIL to {url_slug}!!".format(url_slug=profile.url_slug)) return status
def on_startup(): utilities.log("Starting up.") # Ensure the user has setup the script if config.email_user_name == "" or config.email_send_to == "" or config.email_password == "": utilities.log("Email variables are not setup. Exiting.") exit(1) utilities.log("Waiting for startup delay.") # Start by sending a boot up email. # 2 minute delay to allow drivers and internet connection to get going time.sleep(120) ip = ip_address.check_against_current() # Build and send the email sub = "Startup complete" msg = "I have successfully booted up.\n" msg += "Home IP Address: " + ip emailer.send(sub, msg) utilities.log("Starting normal loop.")
def renewal_reminder_emails(): """Sends renewal reminder emails to members who are nearing their renewal date. """ logging.debug('tasks.renewal_reminder_emails: hit') gapps.validate_cron_task(flask.request) expiring_rows = gapps.get_members_expiring_soon() if not expiring_rows: logging.debug('tasks.renewal_reminder_emails: no expiring members') return flask.make_response('', 200) logging.debug('tasks.renewal_reminder_emails: found %d expiring members', len(expiring_rows)) with open('templates/tasks/email-renewal-reminder-subject.txt', 'r') as subject_file: subject_noauto = subject_file.read().strip() with open('templates/tasks/email-renewal-reminder-auto-subject.txt', 'r') as subject_file: subject_auto = subject_file.read().strip() for row in expiring_rows: member_first_name = row.dict.get( config.SHEETS.member.fields.first_name.name) member_name = '%s %s' % ( member_first_name, row.dict.get(config.SHEETS.member.fields.last_name.name)) member_email = row.dict.get(config.SHEETS.member.fields.email.name) # Right now we use a Paypal button that does one-time purchases; # that is, members pay for a year and then need to manually pay # again the next year. But previously we used a "subscription" # Paypal button, so there are still some members who automatically # pay each year. These two groups will get different reminder # emails. auto_renewing = str( row.dict.get( config.SHEETS.member.fields.paypal_auto_renewing.name)) if auto_renewing.lower().startswith('y'): # Member is auto-renewing (i.e., is a Paypal "subscriber") subject = subject_auto body_html = flask.render_template( 'tasks/email-renewal-reminder-auto.jinja', app_config=config, member_first_name=row.dict.get( config.SHEETS.member.fields.first_name.name)) logging.info( 'tasks.renewal_reminder_emails: sending auto-renewing reminder to %s', member_email) else: # Member is year-to-year subject = subject_noauto body_html = flask.render_template( 'tasks/email-renewal-reminder.jinja', app_config=config, member_first_name=row.dict.get( config.SHEETS.member.fields.first_name.name)) logging.info( 'tasks.renewal_reminder_emails: sending non-auto-renewing reminder to %s', member_email) emailer.send((member_email, member_name), subject, body_html, None) return flask.make_response('', 200)
def log_control(size = 100000): '''limit the number of lines of logfile to size//2 once size is reached''' path_log = Constants().files().get("logs.log") with open(path_log) as pf: data = pf.readlines() if len(data) > size: size //= -2 with open(path_log, "w+") as pf: pf.writelines(data[size:]) if __name__ == "__main__": logger.info("Start time") Updater(logger).run() emailer.send(logger=logger) log_control() interval = 60 #run every X seconds c = 10 dampening = 60 #60 minutes t = time() while True: if set_reg(): interval = 5 dampening = 60 logger.warning(f"Decreasing validation time to {interval=} (seconds)") else: if interval == 5: dampening -= (interval/60) if dampening < 0: logger.warning(f"Validation restored to {interval=} (seconds)")
while True: for subject_id in target_courses: print(f"{subject_id} 확인중") # 모든 강좌번호에 대해 검색해서 결과 저장하기 course_nos, target_mail = target_courses[subject_id] records = course_no_to_records(subject_id, course_nos) for record in records: course_id = course_identifier(subject_id, record['강좌번호']) registrant_count = int(record['수강신청인원']) # 첫 검색인 경우, 수강신청인원을 기록한다. if course_id not in initial_registrant_counts: initial_registrant_counts[course_id] = registrant_count emailer.send( target_mail, f"[{record['교과목명']}]의 빈자리 감지가 시작되었습니다.", f"목표: [{record['교과목명']}]의 ({record['강좌번호']}) 분반\n\n" f"현재상태:\n 수강신청인원: {registrant_count} " f"// 정원(재학생): {record['정원(재학생)']}" f"\n\nsugang.snu.ac.kr") else: if registrant_count < initial_registrant_counts[course_id]: emailer.send( target_mail, f"[{record['교과목명']}] 빈자리 알림", f"[{record['교과목명']}]의 ({record['강좌번호']}) 분반에 자리가 확인되었습니다.\n\n" f"현재상태:\n 수강신청인원: {registrant_count} " f"// 정원(재학생): {record['정원(재학생)']}" f"\n\nsugang.snu.ac.kr") print('Message Sent!') initial_registrant_counts[course_id] = registrant_count print(initial_registrant_counts) sleep(5) # delay between checks
import glob import subprocess import emailer import os files = glob.glob('comments/*.raw') for file in files: newfile = file.replace(".raw",".ogg") subprocess.check_call(["sox","-r","44.1k","-e","float","-b","32","-c","1",file,newfile]) telephoneid = "5"+newfile.split("_")[0].split("/")[-1] emailer.send("*****@*****.**", "Alexander Bar Telephone System <*****@*****.**>", "Comment from Telephone %s" % (telephoneid),"Test", html="Hi",attachments=[(newfile,"audio/ogg")]) os.unlink(file) os.unlink(newfile) #
import glob import subprocess import emailer import os files = glob.glob('comments/*.raw') for file in files: newfile = file.replace(".raw", ".ogg") subprocess.check_call([ "sox", "-r", "44.1k", "-e", "float", "-b", "32", "-c", "1", file, newfile ]) telephoneid = "5" + newfile.split("_")[0].split("/")[-1] emailer.send( "*****@*****.**", "Alexander Bar Telephone System <*****@*****.**>", "Comment from Telephone %s" % (telephoneid), "Test", html="Hi", attachments=[(newfile, "audio/ogg")]) os.unlink(file) os.unlink(newfile) #
try: grade = float(row.find("div", {"class": "percent"}).text.replace('%', '')) # if class doesn't have a grade except AttributeError: grade = 'None' course_name = str(row.find("td", {"class": "course"}).a.text) new_data.append([course_name, grade]) try: old_data = pickle.load(open('data.txt', 'rb')) # if data.txt is missing, load it from new_data and then quit # everything will work normally the next time the program is run except IOError, EOFError: pickle.dump(new_data, open('data.txt', 'wb')) pickle.dump(new_data, open('data.txt', 'wb')) sys.exit() if new_data != old_data: pickle.dump(new_data, open('data.txt', 'wb')) emailer.send(emailer.format(new_data, old_data), args[3], args[4], args[5], args[6]) try: while True: main() time.sleep(60) except KeyboardInterrupt: print "User terminated the program, shutting down..."
# get the winning times for i in range(max_winners): winning_times.append(find_lowest_time(times)) # create a list of winners # create an inverted dict for finding winning times time_to_contender = dict(map(reversed, contenders.items())) for t in winning_times: winner_list.append(str(time_to_contender[t])) winner_string += str(time_to_contender[t]) winner_string += '; ' # create a list of runner ups (for future development) for t in times: runner_up_list.append(str(time_to_contender[t])) runner_up_string += str(time_to_contender[t]) runner_up_string += '\n' # PM winners for champ in winner_list: pm_sender(champ, winner_subject, winner_message) # send email to client subject = 'Reddit Game ' + date_start body = [ 'The game has ended.', 'Here are the winners: ' + winner_string, 'Here are the runner ups: ' + runner_up_string ] emailer.send('*****@*****.**', subject, body)
def order(r): # Ascertains generic client info re POST ip = r.META['REMOTE_ADDR'] tz = timezone.now().strftime('%Y-%m-%d %H:%M:%S') # Checks POST method if r.method == 'POST': # Instantiate form f = OrderForm(r.POST) # Checks if form submitted was valid if f.is_valid(): data = r.POST username = data['username'] email = data['email'] amount = data['amount'] # For debugging purposes print '\n' print 'Successful POST' print '--> Date/Time: ' + tz print '--> IP: ' + ip print '--> Data' print ' --> Username: '******' --> Email address: ' + email print ' --> GC Amount: ' + amount print '\n' agent.create('camus') # Fetch Amazon GC code with open(PWD+'codes','r') as f: codes = f.read().splitlines() if len(codes) == 0: return HttpResponse('Order failed.') code = codes[0] del codes[0] codes = '\n'.join(codes) f.close() # Rewrite code file to reflect the removed GC code with open(PWD+'codes', 'w') as f: f.write(codes) f.close() # Sends GC code to customer # email = their email address # username = their chosen username # amount = the GC amount # code = Amazon GC code emailer.send(email, username, amount, code) return HttpResponse('Order success!') else: # For debugging purposes print '\n' print 'Unsuccessful POST' print '--> Date/Time: ' + tz print '--> IP: ' + ip print '\n' # Tra-la-f*****g-la return HttpResponse('<!--Of the wide world I stand alone, and think, / Till Love and Fame to nothingness do sink.-->')
def send_ip_email(): ip = check_against_current() utilities.log("Home IP requested. Found " + ip) emailer.send("Home IP", ip)
def simple_query_tool(): body = request.json dirty_dois_list = {d for d in body["dois"] if d} clean_dois = [c for c in [clean_doi(d, return_none_if_error=True) for d in dirty_dois_list] if c] q = db.session.query(pub.Pub.response_jsonb).filter(pub.Pub.id.in_(clean_dois)) rows = q.all() pub_responses = [row[0] for row in rows if row[0]] pub_dois = [r['doi'] for r in pub_responses] missing_dois = [d for d in dirty_dois_list if clean_doi(d, return_none_if_error=True) not in pub_dois] placeholder_responses = [pub.build_new_pub(d, None).to_dict_v2() for d in missing_dois] responses = pub_responses + placeholder_responses formats = body.get("formats", []) or ["jsonl", "csv"] files = [] if "jsonl" in formats: # save jsonl with open("output.jsonl", 'wb') as f: for response_jsonb in responses: f.write(json.dumps(response_jsonb, sort_keys=True)) f.write("\n") files.append("output.jsonl") csv_dicts = [pub.csv_dict_from_response_dict(my_dict) for my_dict in responses] csv_dicts = [my_dict for my_dict in csv_dicts if my_dict] fieldnames = sorted(csv_dicts[0].keys()) fieldnames = ["doi"] + [name for name in fieldnames if name != "doi"] if "csv" in formats: # save csv with open("output.csv", 'wb') as f: writer = unicodecsv.DictWriter(f, fieldnames=fieldnames, dialect='excel') writer.writeheader() for my_dict in csv_dicts: writer.writerow(my_dict) files.append("output.csv") if "xlsx" in formats: book = Workbook() sheet = book.worksheets[0] sheet.title = "results" for col_idx, field_name in enumerate(fieldnames): sheet.cell(column=col_idx+1, row=1, value=field_name) for row_idx, row in enumerate(csv_dicts): for col_idx, field_name in enumerate(fieldnames): sheet.cell(column=col_idx+1, row=row_idx+2, value=row[field_name]) book.save(filename="output.xlsx") files.append("output.xlsx") # prep email email_address = body["email"] email = create_email(email_address, "Your Unpaywall results", "simple_query_tool", {"profile": {}}, files) send(email, for_real=True) return jsonify({"got it": email_address, "dois": pub_dois + missing_dois})
def new_member_mail(): """Queue task invoked when a member has been newly registered. Sends appropriate welcome emails. """ logging.info('tasks.new_member_mail hit') member_dict = gapps.validate_queue_task(flask.request) logging.info(member_dict) # # Send welcome email # member_name = '%s %s' % ( member_dict[config.SHEETS.member.fields.first_name.name], member_dict[config.SHEETS.member.fields.last_name.name]) member_email = member_dict[config.SHEETS.member.fields.email.name] with open('templates/tasks/email-new-member-subject.txt', 'r') as subject_file: subject = subject_file.read().strip() body_html = flask.render_template('tasks/email-new-member.jinja', app_config=config) if not emailer.send((member_email, member_name), subject, body_html, None): # Log and carry on logging.error(f'failed to send new-member email to {member_email}') else: logging.info(f'sent new-member email to {member_email}') # # Send email to volunteer-interest-area reps # interest_reps = gapps.get_volunteer_interest_reps_for_member(member_dict) if interest_reps: subject = flask.render_template( 'tasks/email-volunteer-interest-rep-subject.jinja', app_config=config, join_type='member').strip() for interest, reps in interest_reps.items(): body_html = flask.render_template( 'tasks/email-volunteer-interest-rep.jinja', app_config=config, join_type='member', interest=interest, member_name=member_name, member_email=member_email) for rep in reps: rep_email = rep.get( config.SHEETS.volunteer_interest.fields.email.name) rep_name = rep.get( config.SHEETS.volunteer_interest.fields.name.name) ok = emailer.send((rep_email, rep_name), subject, body_html, None) if not ok: logging.error( f'failed to send new-member-volunteer-interest email to {rep_email}' ) else: logging.info( f'sent new-member-volunteer-interest email to {rep_email}' ) return flask.make_response('', 200)
teacher_name = str( row.find("td", { "class": "teacher co-teacher" }).a.text).strip().split(", ")[1] + " " + str( row.find("td", { "class": "teacher co-teacher" }).a.text).strip().split(", ")[0] new_data.append([course_name, grade, teacher_name]) try: old_data = pickle.load(open('data.txt', 'rb')) # if data.txt is missing, load it from new_data and then quit # everything will work normally the next time the program is run except IOError, EOFError: pickle.dump(new_data, open('data.txt', 'wb')) pickle.dump(new_data, open('data.txt', 'wb')) sys.exit() if new_data != old_data: pickle.dump(new_data, open('data.txt', 'wb')) emailer.send(emailer.format(new_data, old_data), args[3], args[4], args[5], args[6]) try: while True: main() time.sleep(update_time) except KeyboardInterrupt: print "User terminated the program, shutting down..."