def scan_blueprints(app): for bp, bp_name in SourceScanner('.', r'get_(\w+)_bp').apply_scanned_function(): bp_url_prefix = '/' + bp_name app.register_blueprint(bp, url_prefix=bp_url_prefix) logger.info('Register blueprint: {} for url_prefix: {}'.format( bp, bp_url_prefix))
def log_policy_request(smtp_session_data, action, start_time=None, end_time=None): # Log sasl username, sender, recipient # `sender -> recipient`: sender not authenticated # `sender => recipient`: sasl username is same as sender address (From:) # `sasl_username => sender -> recipient`: user send as different sender address # @start_time, @end_time are instance of 'time.time()'. _log_sender_to_rcpt = '' sasl_username = smtp_session_data.get('sasl_username', '') sender = smtp_session_data.get('sender', '') recipient = smtp_session_data.get('recipient', '') client_address = smtp_session_data['client_address'] protocol_state = smtp_session_data['protocol_state'] helo = smtp_session_data.get('helo_name', '') client_name = smtp_session_data.get('client_name', '') reverse_client_name = smtp_session_data.get('reverse_client_name', '').lstrip('[').rstrip(']') if sasl_username: if sasl_username == sender: _log_sender_to_rcpt = "{} => {}".format(sasl_username, recipient) else: _log_sender_to_rcpt = "{} => {} -> {}".format(sasl_username, sender, recipient) else: _log_sender_to_rcpt = "{} -> {}".format(sender, recipient) _time = '' if start_time and end_time: _shift_time = end_time - start_time _time = "{:.4f}s".format(_shift_time) # Log final action if smtp_session_data['protocol_state'] == 'RCPT': logger.info("[{}] {}, {}, " "{} [sasl_username={}, sender={}, " "client_name={}, " "reverse_client_name={}, " "helo={}, " "encryption_protocol={}, " "encryption_cipher={}, " "server_port={}, " "process_time={}]".format( client_address, protocol_state, _log_sender_to_rcpt, action, sasl_username, sender, client_name, reverse_client_name, helo, smtp_session_data.get('encryption_protocol', ''), smtp_session_data.get('encryption_cipher', ''), smtp_session_data.get('server_port', ''), _time)) else: logger.info("[{}] {}, {}, " "{} [recipient_count={}, " "size={}, process_time={}]".format( client_address, protocol_state, _log_sender_to_rcpt, action, smtp_session_data.get('recipient_count', 0), smtp_session_data.get('size', 0), _time)) return None
def __update_list_param(mail, param, value, param_file=None, is_email=False): if not param_file: param_file = __get_param_file(mail=mail, param=param) if isinstance(value, (str, unicode)): _values = __convert_web_param_value_to_list(value=value, is_email=is_email) else: _values = value if _values: try: param_file = __get_param_file(mail=mail, param=param) if param == 'listaddress': # Remove primary address(es) _values = [v for v in _values if v != mail] # Prepend primary address (must be first one) _values = [mail] + _values with open(param_file, 'w') as f: f.write('\n'.join(_values) + '\n') logger.info("[{0}] {1}, updated: {2} -> {3}".format( web.ctx.ip, mail, param, ', '.join(_values))) except Exception, e: logger.error( "[{0}] {1}, error while updating (list) parameter: {2} -> {3}, {4}" .format(web.ctx.ip, mail, param, value, e)) return (False, repr(e))
def add_maillist(mail, form, conn=None): """Add required SQL records to add a mailing list account.""" mail = str(mail).lower() (listname, domain) = mail.split('@', 1) if not utils.is_email(mail): return (False, 'INVALID_EMAIL') if not conn: _wrap = SQLWrap() conn = _wrap.conn if not is_domain_exists(domain=domain): return (False, 'NO_SUCH_DOMAIN') if is_email_exists(mail=mail): return (False, 'ALREADY_EXISTS') params = { 'active': 1, 'address': mail, 'domain': domain, 'name': form.get('name', ''), 'transport': '%s:%s/%s' % (settings.MTA_TRANSPORT_NAME, domain, listname), 'mlid': __get_new_mlid(conn=conn), 'maxmsgsize': form_utils.get_max_message_size(form), } if 'only_moderator_can_post' in form: params['accesspolicy'] = 'moderatorsonly' elif 'only_subscriber_can_post' in form: params['accesspolicy'] = 'membersonly' try: conn.insert('maillists', **params) params = { 'active': 1, 'address': mail, 'domain': domain, 'forwarding': mail, 'dest_domain': domain, } conn.insert('forwardings', **params) # Get moderators, store in SQL table `vmail.moderators` if 'moderators' in form: qr = __reset_moderators(mail=mail, form=form, conn=conn) if 'owner' in form: qr = __reset_owners(mail=mail, form=form, conn=conn) if not qr[0]: return qr logger.info('Created: {0}.'.format(mail)) return (True,) except Exception as e: logger.error('Error while creating {0}: {1}'.format(mail, e)) return (False, repr(e))
def delete_ml(mail, archive=True): """Delete a mailing list account. If archive is True or 'yes', account is 'removed' by renaming its data directory. """ _ml_dir = __get_ml_dir(mail=mail) if os.path.exists(_ml_dir): if archive in [True, 'yes']: qr = __archive_ml(mail=mail) return qr else: try: shutil.rmtree(_ml_dir) logger.info("[{0}] {1}, removed without archiving.".format( web.ctx.ip, mail)) except Exception as e: logger.error( "[{0}] {1}, error while removing list from file system: {2}" .format(web.ctx.ip, mail, repr(e))) return (False, repr(e)) else: logger.info("[{0}] {1}, removed (no data on file system).".format( web.ctx.ip, mail)) return (True, )
def restriction(**kwargs): rdns_name = kwargs['smtp_session_data']['reverse_client_name'] client_address = kwargs['smtp_session_data']['client_address'] # Bypass outgoing emails. if kwargs['sasl_username']: logger.debug('Found SASL username, bypass rDNS check for outbound.') return SMTP_ACTIONS['default'] if rdns_name == 'unknown': logger.debug('No reverse dns name, bypass.') return SMTP_ACTIONS['default'] if is_trusted_client(client_address): return SMTP_ACTIONS['default'] _policy_rdns_names = [rdns_name] _splited = rdns_name.split('.') for i in range(len(_splited)): _name = '.' + '.'.join(_splited) _policy_rdns_names.append(_name) _splited.pop(0) logger.debug('All policy rDNS names: %s' % repr(_policy_rdns_names)) conn = kwargs['conn_iredapd'] # Query whitelist sql = """SELECT rdns FROM wblist_rdns WHERE rdns IN %s AND wb='W' LIMIT 1""" % sqlquote(_policy_rdns_names) logger.debug('[SQL] Query whitelisted rDNS names: \n%s' % sql) qr = conn.execute(sql) record = qr.fetchone() if record: rdns = str(record[0]).lower() logger.info("[{}] Reverse client hostname is whitelisted: {}.".format( client_address, rdns)) # better use 'DUNNO' instead of 'OK' return SMTP_ACTIONS['default'] # Query blacklist sql = """SELECT rdns FROM wblist_rdns WHERE rdns IN %s AND wb='B' LIMIT 1""" % sqlquote(_policy_rdns_names) logger.debug('[SQL] Query blacklisted rDNS names: \n%s' % sql) qr = conn.execute(sql) record = qr.fetchone() if record: rdns = str(record[0]).lower() logger.info("[{}] Reverse client hostname is blacklisted: {}".format( client_address, rdns)) return reject_action return SMTP_ACTIONS['default']
def __sendmail(conn, user, client_address, throttle_tracking_id, throttle_name, throttle_value, throttle_kind, throttle_info, throttle_value_unit=None): """Construct and send notification email.""" # conn: SQL connection cursor # user: user email address # client_address: client IP address # throttle_tracking_id: value of sql column `throttle_tracking.id` # throttle_name: name of throttle settings: msg_size, max_quota, max_msgs # throttle_value: value throttle setting # throttle_kind: one of throttle kinds: inbound, outbound # throttle_info: detailed throttle setting # throttle_value_unit: unit of throttle setting. e.g 'bytes' for max_quota # and msg_size. if not throttle_value_unit: throttle_value_unit = '' try: _subject = 'Throttle quota exceeded: %s, %s=%d %s' % ( user, throttle_name, throttle_value, throttle_value_unit) _body = '- User: '******'\n' _body += '- Client IP address: ' + client_address + '\n' _body += '- Throttle type: ' + throttle_kind + '\n' _body += '- Throttle setting: ' + throttle_name + '\n' _body += '- Limit: %d %s\n' % (throttle_value, throttle_value_unit) _body += '- Detailed setting: ' + throttle_info + '\n' utils.sendmail(subject=_subject, mail_body=_body) logger.info( 'Sent notification email to admin(s) to report quota exceed: user=%s, %s=%d.' % (user, throttle_name, throttle_value)) if throttle_tracking_id: _now = int(time.time()) # Update last_notify_time. _sql = """UPDATE throttle_tracking SET last_notify_time=%d WHERE id=%d; """ % (_now, throttle_tracking_id) try: conn.execute(_sql) logger.debug('Updated last notify time.') except Exception as e: logger.error( 'Error while updating last notify time of quota exceed: %s.' % (repr(e))) return (True, ) except Exception as e: logger.error('Error while sending notification email: %s' % repr(e)) return (False, repr(e))
def DeleteMedia(self): markedForDelete = self.db.queryIfcDB(queries.Ifc.videoToDeleteInStalker) if isinstance(markedForDelete, bool): #no records to delete in Stalker found return for r in markedForDelete: stalker_video.StalkerVideo.DeleteVideo(self.db, r[0]) logger.info("ifc_media: deleting non exisitng in Plex video with plex_metadata_item_id = {}".format(r[1])) self.db.queryIfcDB(queries.Ifc.deleteVideo.format(r[1]))
def __init__(self, interval="Manual"): logger.info( f"Initialising energy reader with interval '{interval}'...") self.interval = interval self.prev_readings = {} self.readings_cache = self.init_readings_cache() self.modbus_client = ModbusTcpClient(**MODBUS_GW) self.publish_to_influx_lst = []
def connect_modbus(self, retries=0): connection = self.modbus_client.connect() if not connection: if (retries < 3): time.sleep(1) self._connect(self, retries + 1) else: raise Exception('cannot establish connection to gateway') logger.info('connected to Modbus gateway')
def srs_forward(self, addr, domain): # if domain is hostname, virtual mail domain or srs_domain, do not rewrite. if domain == settings.srs_domain: reply = TCP_REPLIES['not_exist'] + 'Domain is srs_domain, bypassed.' return reply elif domain == fqdn: reply = TCP_REPLIES['not_exist'] + 'Domain is server hostname, bypassed.' return reply else: _is_local_domain = False try: conn_vmail = self.db_conns['conn_vmail'] _is_local_domain = is_local_domain(conn=conn_vmail, domain=domain) except Exception as e: logger.error("{} Error while verifying domain: {}".format(self.log_prefix, repr(e))) if _is_local_domain: reply = TCP_REPLIES['not_exist'] + 'Domain is a local mail domain, bypassed.' return reply else: possible_domains = [] _splited_parts = domain.split('.') _length = len(_splited_parts) for i in range(_length): _part1 = '.'.join(_splited_parts[-i:]) _part2 = '.' + _part1 possible_domains += [_part1, _part2] conn_iredapd = self.db_conns['conn_iredapd'] sql = """SELECT id FROM srs_exclude_domains WHERE domain IN %s LIMIT 1""" % sqlquote(list(possible_domains)) logger.debug("{} [SQL] Query srs_exclude_domains: {}".format(self.log_prefix, sql)) try: qr = conn_iredapd.execute(sql) sql_record = qr.fetchone() logger.debug("{} [SQL] Query result: {}".format(self.log_prefix, sql_record)) except Exception as e: logger.debug("{} Error while querying SQL: {}".format(self.log_prefix, repr(e))) reply = TCP_REPLIES['not_exist'] return reply if sql_record: reply = TCP_REPLIES['not_exist'] + 'Domain is explicitly excluded, bypassed.' return reply else: try: new_addr = str(self.srslib_instance.forward(addr, settings.srs_domain)) logger.info("{} rewrote: {} -> {}".format(self.log_prefix, addr, new_addr)) reply = TCP_REPLIES['success'] + new_addr return reply except Exception as e: logger.debug("{} Error while generating forward address: {}".format(self.log_prefix, repr(e))) # Return original address. reply = TCP_REPLIES['not_exist'] return reply
def log_policy_request(smtp_session_data, action, start_time=None, end_time=None): # Log sasl username, sender, recipient # `sender -> recipient`: sender not authenticated # `sender => recipient`: sasl username is same as sender address (From:) # `sasl_username => sender -> recipient`: user send as different sender address # @start_time, @end_time are instance of 'time.time()'. _log_sender_to_rcpt = '' sasl_username = smtp_session_data.get('sasl_username', '') sender = smtp_session_data.get('sender', '') recipient = smtp_session_data.get('recipient', '') client_address = smtp_session_data['client_address'] protocol_state = smtp_session_data['protocol_state'] helo = smtp_session_data.get('helo_name', '') client_name = smtp_session_data.get('client_name', '') reverse_client_name = smtp_session_data.get('reverse_client_name', '').lstrip('[').rstrip(']') if sasl_username: if sasl_username == sender: _log_sender_to_rcpt = f"{sasl_username} => {recipient}" else: _log_sender_to_rcpt = f"{sasl_username} => {sender} -> {recipient}" else: _log_sender_to_rcpt = f"{sender} -> {recipient}" _time = '' if start_time and end_time: _shift_time = end_time - start_time _time = f"{_shift_time:.4f}s" # Log final action if smtp_session_data['protocol_state'] == 'RCPT': logger.info( f"[{client_address}] {protocol_state}, {_log_sender_to_rcpt}, " f"{action} [sasl_username={sasl_username}, sender={sender}, " f"client_name={client_name}, " f"reverse_client_name={reverse_client_name}, " f"helo={helo}, " f"encryption_protocol={smtp_session_data.get('encryption_protocol', '')}, " f"encryption_cipher={smtp_session_data.get('encryption_cipher', '')}, " f"server_port={smtp_session_data.get('server_port', '')}, " f"process_time={_time}]") else: logger.info( f"[{client_address}] {protocol_state}, {_log_sender_to_rcpt}, " f"{action} [recipient_count={smtp_session_data.get('recipient_count', 0)}, " f"size={smtp_session_data.get('size', 0)}, process_time={_time}]") return None
def restriction(**kwargs): sender = kwargs['sender'] sasl_username = kwargs['sasl_username'] if sasl_username and (not sender): logger.info( 'Possible spam (authenticated as %s but sender address is null).' % sasl_username) return SMTP_ACTIONS['reject_null_sender'] return SMTP_ACTIONS['default']
def total_energy_now(self, meter): meter_id = meter.get('meter_id') result = self.read_modbus_registers(meter_id) decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=Endian.Big) energy_kwh = decoder.decode_32bit_uint() / 100 influx_measure = meter.get('influx_measure_base_name') logger.info(f"{influx_measure}total = {energy_kwh} kWh") return dict(meter_id=meter_id, measurement_total=influx_measure + "Total", value_total=energy_kwh)
def add_subscribers(mail, subscribers, subscription='normal', require_confirm=True): """Add subscribers to given subscription version of mailing list. :param mail: mail address of mailing list account :param subscribers: a list/tuple/set of subscribers' email addresses :param subscription: subscription version: normal, nomail, digest. :param require_confirm: subscription version: normal, nomail, digest. """ mail = mail.lower() subscribers = [str(i).lower() for i in subscribers if utils.is_email(i)] if not subscribers: return (True, ) if require_confirm: qr = __add_subscribers_with_confirm(mail=mail, subscribers=subscribers, subscription=subscription) if not qr[0]: logger.error('[{0}] {1} Failed to add subscribers (require ' 'confirm): error={2}'.format(web.ctx.ip, mail, qr[1])) return qr else: grouped_subscribers = {} for i in subscribers: letter = i[0] if letter in grouped_subscribers: grouped_subscribers[letter].append(i) else: grouped_subscribers[letter] = [i] _dir = __get_ml_subscribers_dir(mail=mail, subscription=subscription) for letter in grouped_subscribers: # Get file stores the subscriber. path = os.path.join(_dir, letter) qr = __add_lines_in_file(f=path, lines=grouped_subscribers[letter]) if not qr[0]: logger.error('[{0}] {1} Failed to add subscribers to file: ' 'error={2}'.format(web.ctx.ip, mail, qr[1])) return qr logger.info( '[{0}] {1}, added subscribers without confirming: {2}.'.format( web.ctx.ip, mail, ', '.join(subscribers))) return (True, )
def sendTemplateResponse(self, templatePath, data={}): """ Generates the template response back to the client. """ # Systematically adding the request url to the data because it is used (at least) in the meta tags data["url"] = self.request.url logger.info(self, "Sending back a template-based response to the client: " + templatePath) pageContent = template.render(self.getTemplatePath(templatePath), data) self.response.out.write(pageContent)
def create_model(self, form): try: org_pwd = form.password.data encrypted_pwd = encrypt_password(org_pwd) logger.info('Encrypt password {} to {}'.format( org_pwd, encrypted_pwd)) form.password.data = encrypted_pwd except AttributeError: pass super(AccessCheckView, self).create_model(form)
def add_subscribers(mail, subscribers, conn=None): """Add subscribers to mailing list.""" mail = str(mail).lower() (listname, domain) = mail.split('@', 1) if not utils.is_email(mail): return (False, 'INVALID_EMAIL') if not conn: _wrap = SQLWrap() conn = _wrap.conn if not is_domain_exists(domain=domain): return (False, 'NO_SUCH_DOMAIN') if not is_email_exists(mail=mail): return (False, 'MAILLIST_NOT_EXIST') if not subscribers: return (False, 'NO_SUBSCRIBERS') if not isinstance(subscribers, (list, tuple, set)): return (False, 'NO_SUBSCRIBERS') try: # Delete existing members first, then add them with one SQL statement. # To avoid inserting rows one by one, and have to handle duplicate # record error. conn.delete('maillist_members', vars={'address': mail, 'members': subscribers}, where='address=$address AND member IN $members') subscribers = __exclude_non_existing_addresses(domain=domain, addresses=subscribers, conn=conn) if subscribers: records = [] for _addr in subscribers: params = { 'address': mail, 'domain': domain, 'member': _addr, 'dest_domain': _addr.split('@', 1)[-1], } records.append(params) conn.multiple_insert('maillist_members', records) logger.info('Added subscribers: {0}.'.format(mail)) return (True,) except Exception as e: logger.error('Error while adding members {0}: {1}'.format(mail, e)) return (False, repr(e))
def get_token(): get_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=' + corpid + '&corpsecret=' + corpsecret try: token_content = request.urlopen(get_token_url) except request.HTTPError as e: logger.info(e.code) logger.info(e.read().decode("utf8")) exit() token_data = token_content.read().decode('utf-8') token_json = json.loads(token_data) token_json.keys() token = token_json['access_token'] return token
def WriteVideoRecords(self, movies_or_series = "movies"): "Query Plex and run Stalker update queries" recordsIDs=[] is_series = 0 if movies_or_series in "movies" else 1 #get the Plex items not exisiting in Stalker (w/o stalker_video_id in ifc table) newPlexData = self.db.queryIfcDB(queries.Ifc.toUpdatePlexMediaData.format(is_series,is_series,'')) if isinstance(newPlexData, list): #new moviews or series exist logger.info("{} new Plex {} will be added as Stalker videos.".format(len(newPlexData), movies_or_series)) #query to insert new items into Stalker insertQueryStalker = queries.Stalker.insertVideo parametersList = self.BuildInsertQueryParams(newPlexData, movies_or_series) #run the query and collect the new stalker video ids recordsIDs = self.UpdateStalkerVideo(insertQueryStalker, parametersList, "new") else: logger.info("No new Plex {} to add into Stalker videos table.".format(movies_or_series)) #get the existing Plex items where update is needed toUpdatePlexData = self.db.queryIfcDB(queries.Ifc.toUpdatePlexMediaData.format(is_series,is_series,'NOT')) if isinstance(toUpdatePlexData, list): logger.info("{} Plex {} will be updated into Stalker.".format(len(toUpdatePlexData), movies_or_series)) #query to update the Stalker records updateQueryStalker = queries.Stalker.updateVideo parametersList = self.BuildInsertQueryParams(toUpdatePlexData, movies_or_series) #run the query and collect the updated stalker video ids recordsIDs = recordsIDs + self.UpdateStalkerVideo(updateQueryStalker, parametersList, "update") else: logger.info("No Plex {} to update into Stalker videos table.".format(movies_or_series)) return recordsIDs
def lambda_handler(event, _) -> object: try: logger.info(f'Received an event: ${event}') query_params = event['queryStringParameters'] validate_query_params(query_params) user_id = query_params['userId'] word = query_params['word'] logger.info(f'Deleting word: ${word} from user ${user_id} dictionary') table = dynamo_db.Table(os.environ['DYNAMODB_TABLE']) result = table.delete_item( Key={ 'userId': user_id, 'word': word, }, ReturnValues='ALL_OLD' ) logger.info(f'Response from dynamo_db: ${result}') attributes = 'Attributes' if attributes not in result: return create_response(404) logger.info(f'Successfully deleted item: ${attributes}') return create_response(200) except ValueError: return create_response(400, {'message': 'Missing required parameters: userId, word'}) except Exception as err: logger.error(err) return create_response(500, {'message': 'Unknown error occurred!'})
def deploy_product_data(): logger.info('Deploy blog info') # step_1:insert basic blog info BlogInfo.insert_blog_info() # step_2:insert system default setting ArticleTypeSetting.insert_system_setting() # step_3:insert default article sources Source.insert_sources() # step_4:insert default article_type ArticleType.insert_system_article_type() # step_5:insert system plugin Plugin.insert_system_plugin() # step_6:insert blog view BlogView.insert_view()
def apply_outbound_wblist(conn, sender_ids, recipient_ids): # Return if no valid sender or recipient id. if not (sender_ids and recipient_ids): logger.debug("No valid sender id or recipient id.") return SMTP_ACTIONS["default"] # Bypass outgoing emails. if settings.WBLIST_BYPASS_OUTGOING_EMAIL: logger.debug( "Bypass outgoing email as defined in WBLIST_BYPASS_OUTGOING_EMAIL." ) return SMTP_ACTIONS["default"] # Get wblist sql = """SELECT rid, sid, wb FROM outbound_wblist WHERE sid IN %s AND rid IN %s""" % (sqlquote(sender_ids), sqlquote(recipient_ids)) logger.debug("[SQL] Query outbound wblist: \n{}".format(sql)) qr = conn.execute(sql) wblists = qr.fetchall() if not wblists: # no wblist logger.debug("No wblist found.") return SMTP_ACTIONS["default"] logger.debug("Found outbound wblist: {}".format(wblists)) # Check sender addresses # rids/recipients are orded by priority for sid in sender_ids: for rid in recipient_ids: if (rid, sid, "W") in wblists: logger.info( "Whitelisted: outbound_wblist=({}, {}, 'W')".format( rid, sid)) return SMTP_ACTIONS[ "default"] + " outbound_wblist=({}, {}, 'W')".format( rid, sid) if (rid, sid, "B") in wblists: logger.info( "Blacklisted: outbound_wblist=({}, {}, 'B')".format( rid, sid)) return reject_action return SMTP_ACTIONS["default"]
def UpdateInterfaceStalkerRecords(cls, db, recordIDs): "writes back the IDs of the new stalker.video records into ifc_media table" #get ifc_media data where update flag is 1 ifcPlexMedia = db.queryIfcDB(queries.Ifc.toWriteStalkerIDs) #write stalker video ids against the matching plex metadata_item_ids for r in recordIDs: if (pm[1] == r[2] for pm in ifcPlexMedia): if r[3] in "new": logger.info("writing stalker_video_id {} against plex_metadata_id {}".format(r[0], r[2])) else: logger.info("stalker_video_id {} exists against plex_metadata_id {}".format(r[0], r[2])) updateQuery = queries.Ifc.updateStalkerVideoID.format(r[0],r[2]) db.queryIfcDB(updateQuery)
def delete_ml(mail, archive=True): """Delete a mailing list account. If archive is True or 'yes', account is 'removed' by renaming its data directory. """ _ml_dir = __get_ml_dir(mail=mail) if os.path.exists(_ml_dir): if archive in [True, 'yes']: qr = __archive_ml(mail=mail) return qr else: try: shutil.rmtree(_ml_dir) logger.info("[{0}] {1}, removed.".format(web.ctx.ip, mail)) except Exception, e: return (False, repr(e))
def apply_outbound_wblist(conn, sender_ids, recipient_ids): # Return if no valid sender or recipient id. if not (sender_ids and recipient_ids): logger.debug('No valid sender id or recipient id.') return SMTP_ACTIONS['default'] # Bypass outgoing emails. if settings.WBLIST_BYPASS_OUTGOING_EMAIL: logger.debug( 'Bypass outgoing email as defined in WBLIST_BYPASS_OUTGOING_EMAIL.' ) return SMTP_ACTIONS['default'] # Get wblist sql = """SELECT rid, sid, wb FROM outbound_wblist WHERE sid IN {0} AND rid IN {1}""".format( sqlquote(sender_ids), sqlquote(recipient_ids)) logger.debug('[SQL] Query outbound wblist: \n{0}'.format(sql)) qr = conn.execute(sql) wblists = qr.fetchall() if not wblists: # no wblist logger.debug('No wblist found.') return SMTP_ACTIONS['default'] logger.debug(f'Found outbound wblist: {wblists}') # Check sender addresses # rids/recipients are orded by priority for sid in sender_ids: for rid in recipient_ids: if (rid, sid, 'W') in wblists: logger.info( f"Whitelisted: outbound_wblist=({rid}, {sid}, 'W')") return SMTP_ACTIONS[ 'default'] + " outbound_wblist=({0}, {1}, 'W')".format( rid, sid) if (rid, sid, 'B') in wblists: logger.info( f"Blacklisted: outbound_wblist=({rid}, {sid}, 'B')") return reject_action return SMTP_ACTIONS['default']
def __archive_ml(mail): _dir = __get_ml_dir(mail=mail) if __has_ml_dir(mail=mail, path=_dir): _timestamp = time.strftime('-%Y%m%d%H%M%S', time.gmtime()) _new_dir = _dir + _timestamp if settings.MLMMJ_ARCHIVE_DIR: # Move to archive directory. __base_dir = _new_dir.replace(settings.MLMMJ_SPOOL_DIR, settings.MLMMJ_ARCHIVE_DIR) _new_dir = os.path.join(settings.MLMMJ_ARCHIVE_DIR, __base_dir) # Create parent directory if _new_dir.endswith('/'): _new_dir = os.path.dirname(_new_dir) # If new directory exists, append one more timestamp if os.path.exists(_new_dir): _new_dir = _new_dir + _timestamp # Create archive directory try: os.makedirs(_new_dir, mode=settings.MLMMJ_FILE_PERMISSION) except Exception as e: _msg = "error while creating directory under archive directory ({0}), {1}".format( _new_dir, repr(e)) logger.error("[{0}] {1}, {2}".format(web.ctx.ip, mail, _msg)) return (False, _msg) try: # Don't use `os.rename()` to handle this move, it raises error # if src and dest directories are not on same disk partition. shutil.move(_dir, _new_dir) logger.info("[{0}] {1}, archived: {2} -> {3}".format( web.ctx.ip, mail, _dir, _new_dir)) # Return new directory path return (True, _new_dir) except Exception as e: logger.error( "[{0}] {1}, error while archiving: {2} ({3} -> {4})".format( web.ctx.ip, mail, repr(e), _dir, _new_dir)) return (False, repr(e)) return (True, )
def load_config(): """Load config""" mode = os.environ.get('MODE') logger.info('Current mode: ' + mode) try: if mode == 'PRODUCTION': from .production import ProductionConfig return ProductionConfig elif mode == 'TESTING': from .testing import TestingConfig return TestingConfig else: from .development import DevelopmentConfig return DevelopmentConfig except ImportError as e: from .default import Config return Config
def __update_normal_param(mail, param, value, param_file=None, is_email=False): # Although we write all given value, but only first line is used by mlmmj. if not param_file: param_file = __get_param_file(mail=mail, param=param) if param == 'maxmailsize': try: value = int(value) except: value = 0 if not value: # Remove param file. qr = __remove_file(path=param_file) return qr if value: if is_email: value = str(value).lower() if not utils.is_email(value): return (False, 'INVALID_EMAIL') try: if isinstance(value, int): value = str(value) value = value.encode('utf-8') with open(param_file, 'w') as f: f.write(value + '\n') except Exception as e: logger.error( "[{0}] {1}, error while updating (normal) parameter: {2} -> {3}, {4}" .format(web.ctx.ip, mail, param, value, e)) return (False, repr(e)) else: qr = __remove_file(path=param_file) if not qr[0]: return qr logger.info("[{0}] {1}, updated (normal) parameter: {2} -> {3}".format( web.ctx.ip, mail, param, value)) return (True, )
def srs_reverse(self, addr): # if address is not srs address, do not reverse. _is_srs_address = self.srslib_instance.is_srs_address(addr, strict=True) if _is_srs_address: # Reverse try: new_addr = str(self.srslib_instance.reverse(addr)) logger.info("{} reversed: {} -> {}".format(self.log_prefix, addr, new_addr)) reply = TCP_REPLIES['success'] + new_addr except Exception as e: logger.debug("{} Error while generating reverse address: {}".format(self.log_prefix, repr(e))) # Return original address. reply = TCP_REPLIES['not_exist'] else: reply = TCP_REPLIES['not_exist'] + 'Not a valid SRS address, bypassed.' return reply
def __update_boolean_param(mail, param, value, param_file=None, touch_instead_of_create=False): """Create or remove parameter file for boolean type parameter. @touch_instead_of_create - touch parameter file instead of re-create it. """ if not param_file: param_file = __get_param_file(mail=mail, param=param) if value == 'yes': try: if touch_instead_of_create: open(param_file, 'a').close() else: open(param_file, 'w').close() # Avoid some conflicts if param == 'subonlypost': __remove_param_file(mail=mail, param='modonlypost') if param == 'modonlypost': __remove_param_file(mail=mail, param='subonlypost') # Create 'control/moderated' also _f = __get_param_file(mail=mail, param='moderated') open(_f, 'a').close() except Exception as e: logger.error( "[{0}] {1}, error while updating (boolean) parameter: {2} -> {3}, {4}" .format(web.ctx.ip, mail, param, value, e)) return (False, repr(e)) else: qr = __remove_file(path=param_file) if not qr[0]: return qr logger.info("[{0}] {1}, updated (boolean) parameter: {2} -> {3}".format( web.ctx.ip, mail, param, value)) return (True, )
def get(self, indexB64): index = pb64.decodeB64Padless(indexB64) if index: logger.info(self, "Getting page at index: " + indexB64 + " (b64: " + str(index) + ")") pagedata = modelaccess.getPageData(index) if pagedata: template = pagedata["template"] pagedata["template"] = None jsondata = simplejson.dumps(pagedata, cls=jsondateutils.JsonDatesEncoder) pagedata.update({"json": jsondata, "html": template}) modelaccess.incrementPageUsage(index) self.sendTemplateResponse("page.html", pagedata) else: self.send404Response() else: self.send404Response()
def getTemplatePath(self, tplName): """ Returns the complete path of a django template, based on its file name only """ logger.info(self, os.path.join(os.path.dirname(__file__), "../templates/" + tplName)) return os.path.join(os.path.dirname(__file__), "../templates/" + tplName)