def send_email(self, to_email, from_email, body, subject): """ METHOD FOR SENDING A NOTIFICATION EMAIL TO A CA USER THAT A NEW ITEM IS IN OUR REDDIT INBOX :param to_email: :param from_email: :param body: :param subject: :return: """ # TODO: FIX SUBJECT TO INCLUDE REDDIT MESSAGE ID AND USERNAME # DATE_FORMAT = "%d/%m/%Y" EMAIL_TO = to_email EMAIL_FROM = from_email EMAIL_SUBJECT = subject EMAIL_SPACE = ", " MSG = MIMEMultipart() MSG['Subject'] = EMAIL_SUBJECT # + " %s" % (date.today().strftime(DATE_FORMAT)) MSG['To'] = EMAIL_SPACE.join(EMAIL_TO) MSG['From'] = EMAIL_FROM MSG.attach(MIMEText(body, 'plain')) try: mail = smtplib.SMTP(self.SMTP_SERVER, self.SMTP_PORT) mail.starttls() mail.login(self.username, self.password) mail.sendmail(EMAIL_FROM, EMAIL_TO, MSG.as_string()) mail.quit() except Exception: logger.critical("FAILED TO SEND EMAIL IN REDDIT READER. EXITING") exit(1)
def start_https(host, port, chain, key): import ssl ctx = ssl.SSLContext() try: ctx.load_cert_chain(chain, key) except FileNotFoundError: logger.critical("SSL FILES ARE NOT FOUND. ABORTING LAUNCH.") sys.exit(2) web.run_app( app, host=host, port=port, ssl_context=ctx, )
def crop_preprocessor_update_single(instance_id=None, data=None, **kw): """Create an Crop specific PATCH_SINGLE and PUT_SINGLE preprocessor. Accepts two arguments, `instance_id`, the primary key of the instance of the model to patch, and `data`, the dictionary of fields to change on the instance. """ logger.info('`crop_preprocessor_update_single` used for endpoint') if request.args.get('access_token', '') or \ request.headers.get('Authorization'): authorization = verify_authorization() resource = Model.query.get(instance_id) if check_roles('admin', authorization.roles): logger.info('User %d accessed Crop UPDATE_SINGLE ' 'as %s' % (authorization.id, 'admin')) pass else: logger.critical('User %d accessed Crop UPDATE_SINGLE with ' 'no role failed' % (authorization.id)) abort(403) """Role checking complete. Role checking has passed and if no abort messages have been emitted then we can move on to the auto assignment of our user and date information. We don't want to allow users at any level to override these fields (e.g., creator_id, created_on, modified_on) because this is how we track user activity the entire way up the chain and if we allowed overriding them we would end up with an unauthenticate representation of data. """ data['modified_on'] = datetime.now().isoformat() data['last_modified_by_id'] = authorization.id else: logger.info('Anonymous user attempted to access Crop' 'UPDATE_SINGLE') abort(403)
def _do_bearer_auth(): # Validate auth token try: verify_jwt_in_request() except ExpiredSignatureError: raise errors.TokenExpired except JWTExtendedException as err: logger.critical( 'JWTExtendedException: {}'.format(err), exc_info=True) raise errors.InvalidAuthToken except PyJWTError as err: logger.critical('PyJWTError: {}'.format(err), exc_info=True) raise errors.InvalidAuthToken return User.get_for_auth(username=get_jwt_identity())
def message_user(self, reddit_user, subject, message, sender_email): # TODO: CREATE MODULE TO ENCAPSULATE REDDIT FUNCTIONALITY """ SEND A REDDIT USER A MESSAGE :param reddit: REDDIT INSTANCE :param reddit_user: USERS NAME (STR) :param subject: MESSAGE SUBJECT (STR) :param message: MESSAGE TO SEND (STR) :return: """ try: self.reddit_conn.redditor(reddit_user).message(subject, message) except Exception: logger.critical('----------Error encountered----------') tb = traceback.format_exc() logger.critical(tb) logger.critical('-------------------------------------') return False message = Messages(sender=sender_email, recipient=reddit_user, body=message, subject=subject, date_sent=datetime.datetime.now(), has_been_sent_for_processing=0, ca_user_id_assoc='parent') Messages.add_record(message) db.session.commit() return True
def add_flagged_comment_to_db(flagged_comment, primary_keywords, secondary_keywords): """ FUNCTION FOR ADDING A FLAGGED COMMENT WITH ITS RESPECTIVE PKS AND SKS TO THE DB :param comment_list: :return: """ try: db.session.add(flagged_comment) db.session.commit() [i.fc_pk_mapping.append(flagged_comment) for i in primary_keywords] # MAKE SURE LIST IS POPULATE. PRIMARY_KEYWORDS CHECK IS DONE IN CALLER if secondary_keywords: [i.fc_sk_mapping.append(flagged_comment) for i in secondary_keywords] db.session.commit() logger.info("--FLAGGED COMMENTS ADDED TO DB--") except (IntegrityError, FlushError): db.session.rollback() if len(FlaggedComments.query.filter_by(username=flagged_comment.username).all()) > 0: logger.info("COMMENT ALREADY PRESENT FOR UN: {username}".format(username=flagged_comment.username)) else: logger.critical("{} COULD NOT ADD TO DB".format(flagged_comment.reddit_link))
def run_reddit_scraper(self): logger.info("REQUESTING COMMENT STREAM") while True: # logger.info(threading.current_thread()) try: comments = self.reddit_conn.subreddit('all').stream.comments( pause_after=-1) for comment in comments: self.comments.append(self.build_comment(comment)) except (Exception, AttributeError) as e: if isinstance(type(e), AttributeError.__class__): self.add_reddit_comments_to_db() self.comments = [] else: logger.critical('----------Error encountered----------') tb = traceback.format_exc() logger.critical(tb) logger.critical('-------------------------------------')
def run(self): """ :return: """ logger.info("ENTERED EMAIL INBOX SCRAPER") while True: # CONNECT TO EMAIL SERVER try: # TRY TO LOGIN self.server = imaplib.IMAP4_SSL(self.imap_ssl_host, self.imap_ssl_port) self.server.login(self.username, self.password) except Exception: logger.critical( "FAILED TO LOGIN TO EMAIL SERVER. KILLING THREAD") # exit(1) self.server.select('INBOX') (retcode, messages) = self.server.search(None, '(UNSEEN)') if retcode == 'OK': for num in messages[0].split(): typ, data = self.server.fetch(num, '(RFC822)') for response_part in data: if isinstance(response_part, tuple): message = email.message_from_bytes( response_part[1]) _from = message['Reply-to'] _subject = message['Subject'] if 're' in _subject.lower(): _subject = re.split("Re:", _subject)[1].strip() _subject = json.loads(_subject) mesage_id = _subject['message_id'] # EXTRACT REDDIT USERNAME reddit_user = _subject['username'] body = EmailReader.get_first_text_block(message) try: # MESSAGE HAS A SIGNATURE, ALSO A POSSIBLE REPLY W SIGNATURE body_without_signature = body.split( "=E2=80=94")[1] except IndexError: # MESSAGE IS A REPLY body_without_signature = body.split( "=EF=BB=BF")[0] # parent_message = Messages.query.filter_by( # recipient=reddit_user).filter_by( # ca_user_id_assoc='parent').first() self.reddit.reddit_conn.inbox.message( mesage_id).reply(body_without_signature) self.server.store(num, '+FLAGS', '\\Seen') message = Messages( sender=_from, recipient=reddit_user, subject=_subject['subject'], body=body_without_signature, date_sent=datetime.datetime.now(), has_been_sent_for_processing=1, ca_user_id_assoc=mesage_id) Messages.add_record(message) db.session.commit() self.server.logout() # logger.info("NOTHING NEW IN EMAIL INBOX--SLEEPING") time.sleep(20)
web.run_app( app, host=host, port=port, ssl_context=ctx, ) def start_socket(sock): web.run_app(app, path=sock) if __name__ == "__main__": socket = config.get("Webserver", "socket", fallback=None) if socket: start_socket(socket) else: host = config.get("Webserver", "host") port = config.getint("Webserver", "port") if config.getboolean("Webserver", "ssl"): try: chain = config.get("Webserver", "ssl_fullchain") key = config.get("Webserver", "ssl_privkey") except configparser.NoOptionError: logger.critical( "SSL CONFIGURATION IS NOT CORRECTLY CONFIGURED. ABORTING LAUNCH." ) sys.exit(2) start_https(host, port, chain, key) else: start_http(host, port)
def crop_preprocessor_delete_single(instance_id=None, **kw): """Create an Crop specific DELETE_SINGLE preprocessor. Accepts a single argument, `instance_id`, which is the primary key of the instance which will be deleted. """ logger.info('`crop_preprocessor_delete_single` used for endpoint') if request.args.get('access_token', '') or \ request.headers.get('Authorization'): authorization = verify_authorization() resource = Model.query.get(instance_id) if not hasattr(resource, 'id'): logger.warning('User %s attempted to delete a non-existent ' 'resource with id %s' % (authorization.id, instance_id)) abort(404) if check_roles('grantee', authorization.roles): logger.warning('User %d %s access Crop ' 'DELETE_SINGLE endpoint' % (authorization.id, 'grantee')) if (authorization.id == resource.creator_id) or \ (is_group_member(authorization.id, resource.members)): logger.info('Group Member %s deleting resource %s' % (authorization.id, instance_id)) pass else: logger.critical('User %s not authorized to delete ' 'resource %s' % (authorization.id, instance_id)) abort(401) elif check_roles('manager', authorization.roles): if authorization.id == resource.creator_id: logger.info('User %s deleting resource %s' % (authorization.id, instance_id)) pass else: if is_group_member(authorization.id, resource.members): logger.info('Group Member %s deleting resource %s' % (authorization.id, instance_id)) pass else: logger.critical('User %s not authorized to delete ' 'resource %s' % (authorization.id, instance_id)) abort(401) elif check_roles('admin', authorization.roles): logger.info('User %d accessed Crop DELETE_SINGLE ' 'as %s' % (authorization.id, 'admin')) pass else: logger.critical('User %d accessed Crop DELETE_SINGLE with ' 'no role failed' % (authorization.id)) abort(403) else: logger.info('Anonymous user attempted to access Crop ' 'DELETE_SINGLE') abort(403)
for f in h.filters: print(f" f = {f} {id(f)}") print(f" when {h.when} every {h.interval} secs") print( f" rollover at {h.rolloverAt} {datetime.datetime.fromtimestamp(h.rolloverAt)}" ) try: print(f' name {h.name} level {h.level} mode {h.mode}') except: print(f' name {h.name} level {h.level}') logger.trace("trace Init butler Web Server Execution") logger.debug("debug Init butler Web Server Execution") logger.info("info Init butler Web Server Execution") logger.warning("warning Init butler Web Server Execution") logger.error("error Init butler Web Server Execution") logger.critical("critical Init butler Web Server Execution") logger.audit("audit Init butler Web Server Execution") logger.trace("os.environ=%s" % os.environ) logger.trace("app.config=%s" % app.config) logger.info("*****************************************") print(" *****************************************") else: print("****************************************") print("**** WARNING **** No logger defined ****") print("****************************************") print(f" * Will execute app here ({run_mode})") # GV 20200217 LOCATION OPORTINITY CHANGE DUE TO CONFIG ISSUES from emtec.butler.db.flask_models import User from emtec.butler.db.flask_models import Role
print(" * logger is %s" % (logger)) if logger.getEffectiveLevel() < logging.INFO: print("*** logger.handlers are :" % logger.handlers) for h in logger.handlers: print(" handler", h, id(h)) print(" format", h.format, id(h.format)) print(" formatter", h.formatter, id(h.formatter)) print(" filter", h.filter, id(h.filter)) print(" filters", h.filters, id(h.filters)) print(' name', h.name, 'level', h.level, 'mode', h.mode) logger.trace("trace Init Collector Web Server Execution") logger.debug("debug Init Collector Web Server Execution") logger.info("info Init Collector Web Server Execution") logger.warning("warning Init Collector Web Server Execution") logger.error("error Init Collector Web Server Execution") logger.critical("critical Init Collector Web Server Execution") logger.audit("audit Init Collector Web Server Execution") logger.trace("os.environ=%s" % os.environ) logger.trace("app.config=%s" % app.config) logger.info("*****************************************") print("*****************************************") else: print("****************************************") print("**** WARNING **** No logger defined ****") print("****************************************") print(f" * Will execute app {app} here") # GV 20200217 LOCATION OPORTINITY CHANGE DUE TO CONFIG ISSUES ------ from emtec.collector.db.flask_models import User, Role # GV --------------------------------------------------------------- # GV Will be replaced by embedded Green Unicorn HTTP Server