def check_domain(form, field): "check domain" domain = field.data.split("@")[1] try: Session.query(Domain).filter(Domain.name == domain).one() except NoResultFound: raise validators.ValidationError(_("The domain: %(dom)s is not local") % dict(dom=domain))
def get_tagged_addrs(user): """Generate a list of tagged addresses for a user""" query1 = Session.query(Message.to_address) query2 = Session.query(Message.from_address) addrs = [addr.address for addr in user.addresses if '+*' not in addr.address and '-*' not in addr.address] addrs.append(user.email) tagged_addrs = [addr.address for addr in user.addresses if '+*' in addr.address or '-*' in addr.address] if tagged_addrs: tagged_opts1 = func._(or_(*[Message.to_address .like(TAGGED_RE.sub(r'\g<one>%', taddr)) for taddr in tagged_addrs])) tagged_opts2 = func._(or_(*[Message.from_address .like(TAGGED_RE.sub(r'\g<one>%', taddr)) for taddr in tagged_addrs])) query1 = query1.filter(func._( or_(tagged_opts1, Message.to_address.in_(addrs)))) query2 = query2.filter(func._( or_(tagged_opts2, Message.from_address.in_(addrs)))) else: query1 = query1.filter(Message.to_address.in_(addrs)) query2 = query2.filter(Message.from_address.in_(addrs)) query1 = query1.distinct() query2 = query2.distinct() to_addrs = [val.to_address for val in query1] from_addrs = [val.from_address for val in query2] all_addrs = set(to_addrs + from_addrs) return [str(crc32(val)) for val in all_addrs]
def create_content_ruleset(): """Create content ruleset""" def set_attrs(obj, dom=None): """Set attrs""" for key in POLICY_SETTINGS_MAP: attr = POLICY_SETTINGS_MAP[key] value = getattr(obj, attr) if value != 0: policy = Session.query(Policy.name)\ .filter(Policy.id == value)\ .one() setattr(obj, '%s-name' % attr, "%s.conf" % policy.name) if dom: setattr(obj, 'domain_name', dom.name) setattr(obj, 'domain_aliases', dom.aliases) return obj global_policy = Session.query(PolicySettings).get(1) set_attrs(global_policy) dpsq = Session.query(DomainPolicy, Domain)\ .filter(DomainPolicy.domain_id == Domain.id)\ .filter(Domain.status == true()) domain_policies = [set_attrs(dps[0], dps[1]) for dps in dpsq] for policy_type in [1, 2, 3, 4]: kwargs = dict(gps=global_policy, dps=domain_policies, policy_type=POLICY_SETTINGS_MAP[policy_type], default="%s.conf" % POLICY_FILE_MAP[policy_type]) write_ruleset(POLICY_FILE_MAP[policy_type], kwargs, 'content.protection.ruleset')
def delete(self, orgid): "Delete an organization" org = self._get_org(orgid) if not org: abort(404) c.form = DelOrgForm(request.POST, org, csrf_context=session) c.form.domains.query = Session.query(Domain) c.form.admins.query = Session.query(User).filter( User.account_type == 2) c.id = org.id if request.POST and c.form.validate(): org_name = org.name if c.form.delete_domains.data: for domain in org.domains: Session.delete(domain) Session.delete(org) Session.commit() info = DELETEORG_MSG % dict(o=org_name) audit_log(c.user.username, 4, info, request.host, request.remote_addr, datetime.now()) flash(_('The organization has been deleted')) redirect(url(controller='organizations')) else: flash( _('The organization: %(s)s will be deleted,' ' This action is not reversible') % dict(s=org.name)) return render('/organizations/delete.html')
def update_queue_stats(hostname): "Update queue stats" inqdir = get_config_option('IncomingQueueDir') outqdir = get_config_option('OutgoingQueueDir') allids, inqueue = process_queue(inqdir, 1) tmpids, outqueue = process_queue(outqdir, 2) allids.extend(tmpids) dbids = [item.messageid for item in Session.query(MailQueueItem.messageid) .filter(MailQueueItem.hostname == hostname.decode('utf-8')).all()] remids = [item for item in dbids if item not in allids] preids = [item for item in dbids if item not in remids] if remids: print >> sys.stderr, ("== Deleting %(items)d queue " "items from DB ==" % dict(items=len(remids))) Session.query(MailQueueItem)\ .filter(MailQueueItem.messageid.in_(remids))\ .delete(synchronize_session='fetch') Session.commit() populate_db(inqueue, inqdir, 1, preids) populate_db(outqueue, outqdir, 2, preids)
def edit(self, orgid): "Edit an organization" org = self._get_org(orgid) if not org: abort(404) c.form = OrgForm(request.POST, org, csrf_context=session) c.form.domains.query = Session.query(Domain) c.form.admins.query = Session.query(User).filter( User.account_type == 2) c.id = org.id if request.POST and c.form.validate(): updated = False for field in c.form: if (field.name != 'csrf_token' and field.data != getattr(org, field.name)): setattr(org, field.name, field.data) updated = True if updated: try: Session.add(org) Session.commit() info = UPDATEORG_MSG % dict(o=org.name) audit_log(c.user.username, 2, info, request.host, request.remote_addr, datetime.now()) flash(_('The organization has been updated')) except IntegrityError: Session.rollback() flash(_('The organization could not be updated')) else: flash_info(_('No changes made, Organization not updated')) redirect(url(controller='organizations')) return render('/organizations/edit.html')
def evaluate(self, environ, credentials): "Evaluate" identity = environ.get('repoze.who.identity') user = identity['user'] if not user.is_superadmin or not user.active: try: varbs = self.parse_variables(environ) if 'domainid' in varbs['named_args']: domainid = varbs['named_args'].get('domainid') if 'destinationid' in varbs['named_args']: destinationid = varbs['named_args'].get('destinationid') dest = Session.query(DeliveryServer.domain_id)\ .filter(DeliveryServer.id == destinationid)\ .one() domainid = dest.domain_id if 'authid' in varbs['named_args']: authid = varbs['named_args'].get('authid') authsvr = Session.query(AuthServer.domain_id)\ .filter(AuthServer.id == authid).one() domainid = authsvr.domain_id if 'sigid' in varbs['named_args']: sigid = varbs['named_args'].get('sigid') sig = Session.query(DomSignature.domain_id)\ .filter(DomSignature.id == sigid).one() domainid = sig.domain_id if not check_domain_ownership(user.id, domainid): self.unmet() except NoResultFound: self.unmet()
def get_list_data(list_type): "Return lists" # email to any email2any = Session.query(List).filter(List.list_type == list_type)\ .filter(List.from_addr_type == 1)\ .filter(List.to_address == u'any') email2any = windowed_query(email2any, List.id, 500) # non email to any nonemail2any = Session.query(List).filter(List.list_type == list_type)\ .filter(List.from_addr_type != 1)\ .filter(List.to_address == u'any') nonemail2any = windowed_query(nonemail2any, List.id, 500) # email to non any email2nonany = Session.query(List).filter(List.list_type == list_type)\ .filter(List.from_addr_type == 1)\ .filter(List.to_address != u'any') email2nonany = windowed_query(email2nonany, List.id, 500) # nonemail to non any nonemail2nonany = Session.query(List).filter(List.list_type == list_type)\ .filter(List.from_addr_type != 1)\ .filter(List.to_address != u'any') nonemail2nonany = windowed_query(nonemail2nonany, List.id, 500) kwargs = dict(email2any=email2any, nonemail2any=nonemail2any, email2nonany=email2nonany, nonemail2nonany=nonemail2nonany) return kwargs
def update_queue_stats(hostname): "Update queue stats" inqdir = get_config_option('IncomingQueueDir') outqdir = get_config_option('OutgoingQueueDir') allids, inqueue = process_queue(inqdir, 1) tmpids, outqueue = process_queue(outqdir, 2) allids.extend(tmpids) dbids = [item.messageid for item in Session.query(MailQueueItem.messageid)\ .filter(MailQueueItem.hostname == hostname)\ .all()] remids = [item for item in dbids if not item in allids] preids = [item for item in dbids if not item in remids] if remids: print >> sys.stderr, ("== Deleting %(items)d queue " "items from DB ==" % dict(items=len(remids))) Session.query(MailQueueItem)\ .filter(MailQueueItem.messageid.in_(remids))\ .delete(synchronize_session='fetch') Session.commit() populate_db(inqueue, inqdir, 1, preids) populate_db(outqueue, outqdir, 2, preids)
def add_address(row, user, requester): "Add address" session_dict = {} dummy = AddressForm(dict2mdict({}), csrf_context=session_dict) fields = getkeys(row, 'af') post_data = dict2mdict(fields) post_data.add('csrf_token', dummy.csrf_token.current_token) form = AddressForm(post_data, csrf_context=session_dict) if form.validate(): try: if requester.is_domain_admin: # check if they own the domain domainname = form.address.data.split('@')[1] Session.query(Domain).options( joinedload('organizations')).join( domain_owners, (oa, domain_owners.c.organization_id == \ oa.c.organization_id))\ .filter(oa.c.user_id == user.id)\ .filter(Domain.name == domainname).one() addr = Address(address=form.address.data) addr.enabled = form.enabled.data addr.user = user Session.add(addr) Session.commit() except (IntegrityError, NoResultFound): pass
def delete(self, orgid): "Delete an organization" org = self._get_org(orgid) if not org: abort(404) c.form = DelOrgForm(request.POST, org, csrf_context=session) c.form.domains.query = Session.query(Domain) c.form.admins.query = Session.query(User).filter( User.account_type == 2) c.id = org.id if request.POST and c.form.validate(): org_name = org.name if c.form.delete_domains.data: for domain in org.domains: Session.delete(domain) Session.delete(org) Session.commit() info = DELETEORG_MSG % dict(o=org_name) audit_log(c.user.username, 4, info, request.host, request.remote_addr, datetime.now()) flash(_('The organization has been deleted')) redirect(url(controller='organizations')) else: flash(_('The organization: %(s)s will be deleted,' ' This action is not reversible') % dict(s=org.name)) return render('/organizations/delete.html')
def audit(self, page=1, format=None): "Audit log" total_found = 0 search_time = 0 num_items = session.get('auditlog_num_items', 50) qry = request.GET.get('q', None) kwds = {} if qry: conn = SphinxClient() sphinxopts = extract_sphinx_opts(config['sphinx.url']) conn.SetServer(sphinxopts.get('host', '127.0.0.1')) conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) qry = clean_sphinx_q(qry) results = conn.Query(qry, 'auditlog, auditlog_rt') qry = restore_sphinx_q(qry) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] query = Session.query(AuditLog)\ .filter(AuditLog.id.in_(ids))\ .order_by(desc('timestamp'))\ .all() total_found = results['total_found'] search_time = results['time'] logcount = total_found kwds['presliced_list'] = True else: query = [] lcount = 0 logcount = 0 else: query = Session.query(AuditLog)\ .order_by(desc('timestamp')) lcount = Session.query(AuditLog)\ .order_by(desc('timestamp')) if 'logcount' not in locals(): logcount = lcount.count() items = paginate.Page(query, page=int(page), items_per_page=num_items, item_count=logcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' jdict = convert_settings_to_json(items) if qry: encoded = json.loads(jdict) encoded['q'] = qry jdict = json.dumps(encoded) return jdict c.page = items c.q = qry c.total_found = total_found c.search_time = search_time return self.render('/status/audit.html')
def audit(self, page=1, format=None): "Audit log" total_found = 0 search_time = 0 num_items = session.get('auditlog_num_items', 50) q = request.GET.get('q', None) kwds = {} if q: conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) q = clean_sphinx_q(q) results = conn.Query(q, 'auditlog, auditlog_rt') q = restore_sphinx_q(q) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] query = Session.query(AuditLog)\ .filter(AuditLog.id.in_(ids))\ .order_by(desc('timestamp'))\ .all() total_found = results['total_found'] search_time = results['time'] logcount = total_found kwds['presliced_list'] = True else: query = [] lcount = 0 logcount = 0 else: query = Session.query(AuditLog)\ .order_by(desc('timestamp')) lcount = Session.query(AuditLog)\ .order_by(desc('timestamp')) if not 'logcount' in locals(): logcount = lcount.count() items = paginate.Page(query, page=int(page), items_per_page=num_items, item_count=logcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' jdict = convert_settings_to_json(items) if q: encoded = json.loads(jdict) encoded['q'] = q jdict = json.dumps(encoded) return jdict c.page = items c.q = q c.total_found = total_found c.search_time = search_time return render('/status/audit.html')
def check_domain(form, field): "check domain" try: is_freemail(field.data) Session.query(Domain.name).filter(Domain.name == field.data).one() raise ValidationError(_('The domain already exists on the system')) except NoResultFound: pass
def check_domain(form, field): "check domain" domain = field.data.split('@')[1] try: Session.query(Domain).filter(Domain.name == domain).one() except NoResultFound: raise validators.ValidationError( _('The domain: %(dom)s is not local') % dict(dom=domain))
def _user_addresses(self): "Return user addresses" userid = self.identity['user'].id query1 = Session.query(User.email.label('email'))\ .filter_by(active=True, account_type=3, id=userid) query2 = Session.query(Address.address.label('email'))\ .filter_by(enabled=True, user_id=userid) return query1.union(query2)
def create_spam_checks(): "Generate file based spam checks ruleset" users_q = Session.query(User).filter(User.spam_checks == false()) users = windowed_query(users_q, User.id, 100) domains = Session.query(Domain).filter(Domain.spam_checks == false()).all() kwargs = dict(users=users, domains=domains) write_ruleset('spam.checks.rules', kwargs) Session.close()
def confirm_delete(self): "Confirm bulk delete of domains" domainids = session.get('bulk_domain_delete', []) if not domainids: redirect(url(controller='domains', action='index')) num_items = 10 if len(domainids) > num_items and len(domainids) <= 20: num_items = 20 if len(domainids) > num_items and len(domainids) <= 50: num_items = 50 if len(domainids) > num_items and len(domainids) <= 100: num_items = 100 domains = Session.query(Domain).filter(Domain.id.in_(domainids))\ .options(joinedload('organizations')) domcount = Session.query(Domain.id).filter(Domain.id.in_(domainids)) if c.user.is_domain_admin: domains = domains.join(domain_owners, (oa, domain_owners.c.organization_id == oa.c.organization_id))\ .filter(oa.c.user_id == c.user.id) domcount = domcount.join(domain_owners, (oa, domain_owners.c.organization_id == oa.c.organization_id))\ .filter(oa.c.user_id == c.user.id) if request.POST: tasks = [] for domain in domains.all(): info = DELETEDOMAIN_MSG % dict(d=domain.name) tasks.append((c.user.username, 4, unicode(info), request.host, request.remote_addr, now())) Session.delete(domain) Session.commit() del session['bulk_domain_delete'] session.save() for task in tasks: audit_log(*task) flash(_('The domains have been deleted')) redirect(url(controller='domains')) else: flash(_('The following domains are about to be deleted,' ' this action is not reversible, Do you wish to' ' continue ?')) try: c.page = paginate.Page(domains, page=1, items_per_page=num_items, item_count=domcount.count()) except DataError: flash_alert(_('An error occured try again')) redirect(url(controller='domains', action='index')) return render('/domains/confirmbulkdel.html')
def confirm_delete(self): "Confirm bulk delete of domains" domainids = session.get('bulk_domain_delete', []) if not domainids: redirect(url(controller='domains', action='index')) num_items = 10 if len(domainids) > num_items and len(domainids) <= 20: num_items = 20 if len(domainids) > num_items and len(domainids) <= 50: num_items = 50 if len(domainids) > num_items and len(domainids) <= 100: num_items = 100 domains = Session.query(Domain).filter(Domain.id.in_(domainids))\ .options(joinedload('organizations')) domcount = Session.query(Domain.id).filter(Domain.id.in_(domainids)) if c.user.is_domain_admin: domains = domains.join(domain_owners, (oa, domain_owners.c.organization_id == oa.c.organization_id))\ .filter(oa.c.user_id == c.user.id) domcount = domcount.join(domain_owners, (oa, domain_owners.c.organization_id == oa.c.organization_id))\ .filter(oa.c.user_id == c.user.id) if request.POST: tasks = [] for domain in domains.all(): info = DELETEDOMAIN_MSG % dict(d=domain.name) tasks.append((c.user.username, 4, info, request.host, request.remote_addr, now())) Session.delete(domain) Session.commit() del session['bulk_domain_delete'] session.save() for task in tasks: audit_log(*task) flash(_('The domains have been deleted')) redirect(url(controller='domains')) else: flash( _('The following domains are about to be deleted,' ' this action is not reversible, Do you wish to' ' continue ?')) try: c.page = paginate.Page(domains, page=1, items_per_page=num_items, item_count=domcount.count()) except DataError: flash_alert(_('An error occured try again')) redirect(url(controller='domains', action='index')) return render('/domains/confirmbulkdel.html')
def search(self, format=None): "Search for accounts" total_found = 0 search_time = 0 num_items = session.get('accounts_num_items', 10) q = request.GET.get('q', '') d = request.GET.get('d', None) kwds = {'presliced_list': True} page = int(request.GET.get('p', 1)) conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) conn.SetFieldWeights(dict(username=50, email=30, firstname=10, lastname=10)) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) if d: conn.SetFilter('domains', [int(d),]) if c.user.is_domain_admin: #crcs = get_dom_crcs(Session, c.user) domains = Session.query(Domain.id).join(dom_owns, (oas, dom_owns.c.organization_id == oas.c.organization_id))\ .filter(oas.c.user_id == c.user.id) conn.SetFilter('domains', [domain[0] for domain in domains]) q = clean_sphinx_q(q) results = conn.Query(q, 'accounts, accounts_rt') q = restore_sphinx_q(q) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] total_found = results['total_found'] search_time = results['time'] users = Session.query(User.id, User.username, User.firstname, User.lastname, User.email, User.active, User.local, User.account_type)\ .filter(User.id.in_(ids))\ .order_by(User.id)\ .all() usercount = total_found else: users = [] usercount = 0 c.q = q c.d = d c.total_found = total_found c.search_time = search_time c.page = paginate.Page(users, page=int(page), items_per_page=num_items, item_count=usercount, **kwds) return render('/accounts/searchresults.html')
def check_domain_alias(form, field): "check domain alias" try: Session.query(DomainAlias.name)\ .filter(DomainAlias.name == field.data)\ .one() raise ValidationError(_('The domain already exists on the system')) except NoResultFound: pass
def _get_data(self, format=None, success=None, errors=None): "Get report data" filters = session.get('filter_by', []) query = Session.query(func.max(Message.timestamp).label('oldest'), func.min(Message.timestamp).label('newest')) uquery = UserFilter(Session, c.user, query) query = uquery.filter() # count = self._get_count() countq = MsgCount(Session, c.user) count = countq() cachekey = u'savedfilters-%s' % c.user.username sfq = Session.query(SavedFilter)\ .filter(SavedFilter.user == c.user)\ .options(FromCache('sql_cache_short', cachekey)) if self.invalidate: sfq.invalidate() savedfilters = sfq.all() if filters: dynq = DynaQuery(Message, query, filters) query = dynq.generate() dcountq = Session.query(func.count(Message.id).label('count')) dcountqi = UserFilter(Session, c.user, dcountq) dcountq = dcountqi.filter() dyncq = DynaQuery(Message, dcountq, filters) dcountq = dyncq.generate() dcount = dcountq.one() count = dcount.count cachekey = u'report-aggregates-%s' % c.user.username query = query.options(FromCache('sql_cache_short', cachekey)) if self.invalidate: query.invalidate() data = query.all() saved_filters = [processfilters(filt, filters) for filt in savedfilters] if format is None: return data, count, filters, saved_filters else: if format == 'json': data = data[0] filterdict = dict(FILTER_ITEMS) filterbydict = dict(FILTER_BY) active_filters = [dict( filter_field=unicode(filterdict[filt['field']]), filter_by=unicode(filterbydict[filt['filter']]), filter_value=unicode(filt['value'])) for filt in filters] try: newest = data.newest.strftime("%Y-%m-%d %H:%M") oldest = data.oldest.strftime("%Y-%m-%d %H:%M") except AttributeError: newest = '' oldest = '' datadict = dict(count=count, newest=newest, oldest=oldest) jsondata = dict(success=success, data=datadict, errors=errors, active_filters=active_filters, saved_filters=saved_filters) return jsondata
def index(self, page=1, format=None): "index page" total_found = 0 search_time = 0 num_items = session.get('organizations_num_items', 10) qry = request.GET.get('q', None) kwds = {} if qry: kwds['presliced_list'] = True conn = SphinxClient() sphinxopts = extract_sphinx_opts(config['sphinx.url']) conn.SetServer(sphinxopts.get('host', '127.0.0.1')) conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) qry = clean_sphinx_q(qry) try: results = conn.Query(qry, 'organizations, organizations_rt') except (socket.timeout, struct.error): redirect(request.path_qs) qry = restore_sphinx_q(qry) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] orgs = Session.query(Group)\ .filter(Group.id.in_(ids))\ .all() total_found = results['total_found'] search_time = results['time'] orgcount = total_found else: orgs = [] ocount = 0 orgcount = 0 else: orgs = Session.query(Group) ocount = Session.query(Group.id) if 'orgcount' not in locals(): orgcount = ocount.count() items = paginate.Page(orgs, page=int(page), items_per_page=num_items, item_count=orgcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' data = convert_org_to_json(items) return data c.page = items c.q = qry c.total_found = total_found c.search_time = search_time return self.render('/organizations/index.html')
def save_dom_sig(sigid): "Save domain signature" logger = save_dom_sig.get_logger() try: logger.info('Processing domain signature: %s' % sigid) sign = Session.query(DomSignature)\ .filter(DomSignature.id == sigid).one() domain = Session.query(Domain)\ .filter(Domain.id == sign.domain_id).one() basedir = config.get('ms.signatures.base', '/etc/MailScanner/baruwa/signatures') def mksigdir(sigfile): "create directory" logger.info('Creating signature directory for: %s' % domain.name) os.mkdir(os.path.dirname(sigfile)) logger.info('Created: %s' % os.path.dirname(sigfile)) def mksymlinks(domname): "Create symlinked directories" domdr = os.path.join(basedir, 'domains', domname.name) for alias in domname.aliases: linkdr = os.path.join(basedir, 'domains', alias.name) if not os.path.exists(linkdr): os.symlink(domdr, linkdr) logger.info('Created symlink: %s' % linkdr) if not sign.enabled: logger.info('Signature disabled, notifying scanner') update_ms_serial(logger) return if sign.signature_type == 1: # text sigfile = os.path.join(basedir, 'domains', domain.name, 'sig.txt') print sigfile print os.path.dirname(sigfile) if not os.path.exists(os.path.dirname(sigfile)): mksigdir(sigfile) with open(sigfile, 'w') as handle: if not sign.signature_content.startswith('--'): handle.write("\n--\n") handle.write(sign.signature_content) logger.info('Signature written to file: %s' % sigfile) mksymlinks(domain) else: # html sigfile = os.path.join(basedir, 'domains', domain.name, 'sig.html') if not os.path.exists(os.path.dirname(sigfile)): mksigdir(sigfile) write_html_sig(sigfile, sign, True, logger) mksymlinks(domain) update_ms_serial(logger) except NoResultFound: pass finally: Session.close()
def evaluate(self, environ, credentials): "Evaluate" identity = environ.get('repoze.who.identity') user = identity['user'] if not user or not user.active: self.unmet() if not user.is_superadmin: try: varbs = self.parse_variables(environ) accountid = varbs['named_args'].get('userid') if accountid is None and varbs['named_args'].get('addressid'): addressid = varbs['named_args'].get('addressid') acct = Session.query(Address.user_id)\ .filter(Address.id == addressid).one() accountid = acct.user_id if accountid is None and varbs['named_args'].get('sigid'): sigid = varbs['named_args'].get('sigid') sig = Session.query(UserSignature.user_id)\ .filter(UserSignature.id == sigid).one() accountid = sig.user_id if accountid is None: self.unmet() requested_account = Session.query(User).get(accountid) if not requested_account: abort(404) self.unmet() if requested_account.is_superadmin: self.unmet() if user.is_peleb: if requested_account.id != user.id: self.unmet() if user.is_domain_admin: if accountid and user.id == int(accountid): return orgs = [org.id for org in user.organizations] if not orgs: self.unmet() doms = Session.query(Domain.name)\ .join(domain_owners)\ .filter(domain_owners.c.organization_id.in_(orgs))\ .all() domains = [dom.name for dom in doms] if not domains: self.unmet() addrs = [requested_account.email.split('@')[1]] if '@' in requested_account.username: addrs.append(requested_account.username.split('@')[1]) [ addrs.append(addr.address.split('@')[1]) for addr in requested_account.addresses ] for addr in addrs: if addr in domains: return self.unmet() except NoResultFound: self.unmet()
def save_user_sig(sigid): "Save a user signature and associated images to filesystem" logger = save_user_sig.get_logger() try: logger.info('Processing user signature: %s' % sigid) sign = Session.query(UserSignature)\ .filter(UserSignature.id == sigid).one() user = Session.query(User)\ .filter(User.id == sign.user_id).one() basedir = config.get('ms.signatures.base', '/etc/MailScanner/baruwa/signatures') def mksigdir(sigfile): "make directory" logger.info('Creating signature directory for: %s' % user.username) os.mkdir(os.path.dirname(sigfile)) logger.info('Created: %s' % os.path.dirname(sigfile)) def mksymlinks(usrname): "Create symlinked directories" usrdir = os.path.join(basedir, 'users', usrname.username) for addr in usrname.addresses: linkdr = os.path.join(basedir, 'users', addr.address) if not os.path.exists(linkdr): os.symlink(usrdir, linkdr) logger.info('Created symlink: %s' % linkdr) if not sign.enabled: logger.info('Signature disabled, notifying scanner') update_ms_serial(logger) return if sign.signature_type == 1: # text sigfile = os.path.join(basedir, 'users', user.username, 'sig.txt') if not os.path.exists(os.path.dirname(sigfile)): mksigdir(sigfile) with open(sigfile, 'w') as handle: if not sign.signature_content.startswith('--'): handle.write("\n--\n") handle.write(sign.signature_content) # os.write(handle, os.linesep) logger.info('Signature written to file: %s' % sigfile) mksymlinks(user) else: # html sigfile = os.path.join(basedir, 'users', user.username, 'sig.html') if not os.path.exists(os.path.dirname(sigfile)): mksigdir(sigfile) write_html_sig(sigfile, sign, False, logger) mksymlinks(user) update_ms_serial(logger) except NoResultFound: pass finally: Session.close()
def create_highspam_actions(): "Generate file based highspam actions ruleset" hosts = Session.query(Relay.address, Relay.highspam_actions)\ .filter(Relay.address != u'')\ .filter(Relay.highspam_actions != 2)\ .distinct(Relay.address).all() domains = Session.query(Domain).filter(Domain.highspam_actions != 2).all() kwargs = dict(domains=domains, hosts=hosts) write_ruleset('highspam.actions.rules', kwargs) Session.close()
def evaluate(self, environ, credentials): "Evaluate" identity = environ.get('repoze.who.identity') user = identity['user'] if not user or not user.active: self.unmet() if not user.is_superadmin: try: varbs = self.parse_variables(environ) accountid = varbs['named_args'].get('userid') if accountid is None and varbs['named_args'].get('addressid'): addressid = varbs['named_args'].get('addressid') acct = Session.query(Address.user_id)\ .filter(Address.id == addressid).one() accountid = acct.user_id if accountid is None and varbs['named_args'].get('sigid'): sigid = varbs['named_args'].get('sigid') sig = Session.query(UserSignature.user_id)\ .filter(UserSignature.id == sigid).one() accountid = sig.user_id if accountid is None: self.unmet() requested_account = Session.query(User).get(accountid) if not requested_account: abort(404) self.unmet() if requested_account.is_superadmin: self.unmet() if user.is_peleb: if requested_account.id != user.id: self.unmet() if user.is_domain_admin: if accountid and user.id == int(accountid): return orgs = [org.id for org in user.organizations] if not orgs: self.unmet() doms = Session.query(Domain.name)\ .join(domain_owners)\ .filter(domain_owners.c.organization_id.in_(orgs))\ .all() domains = [dom.name for dom in doms] if not domains: self.unmet() addrs = [requested_account.email.split('@')[1]] if '@' in requested_account.username: addrs.append(requested_account.username.split('@')[1]) [addrs.append(addr.address.split('@')[1]) for addr in requested_account.addresses] for addr in addrs: if addr in domains: return self.unmet() except NoResultFound: self.unmet()
def edit(self, userid): """GET /accounts/edit/id: Form to edit an existing item""" user = self._get_user(userid) if not user: abort(404) c.form = EditUserForm(request.POST, user, csrf_context=session) c.form.domains.query = Session.query(Domain) if c.user.is_domain_admin: c.form.domains.query = Session.query(Domain).join(dom_owns, (oas, dom_owns.c.organization_id == oas.c.organization_id))\ .filter(oas.c.user_id == c.user.id) if user.account_type != 3 or c.user.is_peleb: del c.form.domains if c.user.is_peleb: del c.form.username del c.form.email del c.form.active if request.POST and c.form.validate(): update = False kwd = dict(userid=userid) for attr in FORM_FIELDS: field = getattr(c.form, attr) if field and field.data != getattr(user, attr): setattr(user, attr, field.data) update = True if update: try: Session.add(user) Session.commit() update_serial.delay() flash(_('The account has been updated')) kwd['uc'] = 1 info = UPDATEACCOUNT_MSG % dict(u=user.username) audit_log(c.user.username, 2, unicode(info), request.host, request.remote_addr, now()) except IntegrityError: Session.rollback() flash_alert( _('The account: %(acc)s could not be updated') % dict(acc=user.username)) if (user.id == c.user.id and c.form.active and c.form.active.data == False): redirect(url('/logout')) else: flash_info(_('No changes made to the account')) redirect(url(controller='accounts', action='detail', **kwd)) c.fields = FORM_FIELDS c.id = userid return render('/accounts/edit.html')
def check_username(form, field): "check the username" try: if '@' in field.data: raise ValidationError(_('The username cannot be an email address')) if field.data in DISALLOWED_USERNAMES: raise ValidationError(_('The username is not available')) Session.query(User).filter(User.username == field.data).one() raise ValidationError(_('The username is not available')) except NoResultFound: pass
def exportaccounts(domainid, userid, orgid): "Export Accounts" logger = exportaccounts.get_logger() results = dict(f=None, global_error='') try: logger.info('Starting export of accounts for userid: %s' % userid) user = Session.query(User).get(userid) if user.is_peleb: results['global_error'] = \ 'You are not authorized to export accounts' return results if user.is_domain_admin and orgid: results['global_error'] = \ 'You are not authorized to export organization accounts' return results users = Session.query(User)\ .options(joinedload('addresses'))\ .order_by(User.id) if user.is_domain_admin: users = users.join(domain_users, (domain_owners, domain_users.c.domain_id == domain_owners.c.domain_id), (oa, domain_owners.c.organization_id == oa.c.organization_id))\ .filter(oa.c.user_id == user.id) if domainid: users = users.filter( and_(domain_users.c.domain_id == domainid, domain_users.c.user_id == User.id)) if orgid: users = users.filter( and_(domain_users.c.user_id == User.id, domain_users.c.domain_id == domain_owners.c.domain_id, domain_owners.c.organization_id == orgid)) rows = [] for account in users.all(): row = account.to_csv() if account.addresses: row.update(account.addresses[0].to_csv()) rows.append(row) if rows: keys = tuple(ACCOUNTFIELDS + ADDRESSFIELDS) results['f'] = build_csv(rows, keys) logger.info('Export complete, returning csv file') else: results['global_error'] = 'No accounts found' logger.info('Export failed: %s' % results['global_error']) except (NoResultFound, ProgrammingError): results['global_error'] = 'User account does not exist' logger.info('Export failed: %s' % results['global_error']) finally: Session.close() return results
def create_highspam_scores(): "Generate file based highspam scores ruleset" users_q = Session.query(User).filter(User.high_score > 0) users = windowed_query(users_q, User.id, 100) domains = Session.query(Domain).filter(Domain.high_score > 0).all() hosts = Session.query(Relay.address, Relay.high_score)\ .filter(Relay.high_score > 0)\ .filter(Relay.address != u'')\ .distinct(Relay.address).all() kwargs = dict(domains=domains, users=users, hosts=hosts) write_ruleset('highspam.score.rules', kwargs) Session.close()
def exportaccounts(domainid, userid, orgid): "Export Accounts" logger = exportaccounts.get_logger() results = dict(f=None, global_error='') try: logger.info('Starting export of accounts for userid: %s' % userid) user = Session.query(User).get(userid) if user.is_peleb: results['global_error'] = \ 'You are not authorized to export accounts' return results if user.is_domain_admin and orgid: results['global_error'] = \ 'You are not authorized to export organization accounts' return results users = Session.query(User)\ .options(joinedload('addresses'))\ .order_by(User.id) if user.is_domain_admin: users = users.join(domain_users, (domain_owners, domain_users.c.domain_id == domain_owners.c.domain_id), (oa, domain_owners.c.organization_id == oa.c.organization_id) ).filter(oa.c.user_id == user.id) if domainid: users = users.filter(and_(domain_users.c.domain_id == domainid, domain_users.c.user_id == User.id)) if orgid: users = users.filter(and_(domain_users.c.user_id == User.id, domain_users.c.domain_id == \ domain_owners.c.domain_id, domain_owners.c.organization_id == orgid)) rows = [] for account in users.all(): row = account.to_csv() if account.addresses: row.update(account.addresses[0].to_csv()) rows.append(row) if rows: keys = tuple(ACCOUNTFIELDS + ADDRESSFIELDS) results['f'] = build_csv(rows, keys) logger.info('Export complete, returning csv file') else: results['global_error'] = 'No accounts found' logger.info('Export failed: %s' % results['global_error']) except (NoResultFound, ProgrammingError): results['global_error'] = 'User account does not exist' logger.info('Export failed: %s' % results['global_error']) # finally: # Session.close() return results
def check_email(form, field): "check the email address" try: domain = field.data.split('@')[-1] result = Session.query(Domain).filter(Domain.name == domain).one() if result: raise ValidationError( _('Email from a domain that is already registered')) Session.query(User).filter(User.username == field.data).one() raise ValidationError(_('The email address is already in use')) except NoResultFound: pass
def index(self, page=1, format=None): "index page" total_found = 0 search_time = 0 num_items = session.get('organizations_num_items', 10) q = request.GET.get('q', None) kwds = {} if q: kwds['presliced_list'] = True conn = SphinxClient() conn.SetMatchMode(SPH_MATCH_EXTENDED2) if page == 1: conn.SetLimits(0, num_items, 500) else: page = int(page) offset = (page - 1) * num_items conn.SetLimits(offset, num_items, 500) q = clean_sphinx_q(q) results = conn.Query(q, 'organizations, organizations_rt') q = restore_sphinx_q(q) if results and results['matches']: ids = [hit['id'] for hit in results['matches']] orgs = Session.query(Group)\ .filter(Group.id.in_(ids))\ .all() total_found = results['total_found'] search_time = results['time'] orgcount = total_found else: orgs = [] ocount = 0 orgcount = 0 else: orgs = Session.query(Group) ocount = Session.query(Group.id) if not 'orgcount' in locals(): orgcount = ocount.count() items = paginate.Page(orgs, page=int(page), items_per_page=num_items, item_count=orgcount, **kwds) if format == 'json': response.headers['Content-Type'] = 'application/json' data = convert_org_to_json(items) return data c.page = items c.q = q c.total_found = total_found c.search_time = search_time return render('/organizations/index.html')
def exportdomains(userid, orgid=None): "Export domains" logger = exportdomains.get_logger() results = dict(f=None, global_error='') try: logger.info('Starting export of domains for userid: %s' % userid) user = Session.query(User).get(userid) if user.is_peleb: results['global_error'] = \ 'You are not authorized to export domains' return results if user.is_domain_admin and orgid: results['global_error'] = \ 'You are not authorized to export organization domains' return results domains = Session.query(Domain) if orgid: domains = domains.join(domain_owners).filter( domain_owners.c.organization_id == orgid) if user.is_domain_admin: domains = domains.join(domain_owners, (oa, domain_owners.c.organization_id == oa.c.organization_id))\ .filter(oa.c.user_id == user.id) rows = [] for domain in domains.all(): row = domain.to_csv() if domain.servers: row.update(domain.servers[0].to_csv()) if domain.authservers: row.update(domain.authservers[0].to_csv()) if domain.aliases: row.update(domain.aliases[0].to_csv()) rows.append(row) if rows: keys = tuple(DOMAINFIELDS + DAFIELDS + DSFIELDS + ASFIELDS) results['f'] = build_csv(rows, keys) logger.info('Export complete, returning csv file') else: results['global_error'] = 'No domains found' logger.info('Export failed: %s' % results['global_error']) except NoResultFound: results['global_error'] = 'User account does not exist' logger.info('Export failed: %s' % results['global_error']) except TypeError: results['global_error'] = 'Internal error occured' logger.info('Export failed: %s' % results['global_error']) finally: Session.close() return results
def check_domain(form, field): "check domain" try: domain = field.data.split('@')[1] try: Session.query(Domain).filter(Domain.name == domain).one() except NoResultFound: Session.query(DomainAlias).filter(DomainAlias.name == domain).one() except IndexError: raise validators.ValidationError( _(u'Please provide a valid email address')) except NoResultFound: raise validators.ValidationError( _(u'The domain: %(dom)s is not local') % dict(dom=domain))
def check_email(form, field): "check the email address" try: domain = field.data.split('@')[-1] Session.query(Domain).filter(Domain.name == domain).one() raise ValidationError( _('Email from a domain that is already registered')) except NoResultFound: pass try: Session.query(User).filter(User.email == field.data).one() raise ValidationError(_('The email address is already in use')) except NoResultFound: pass
def _get_address(self, addressid): "return address" try: address = Session.query(Address).get(addressid) except NoResultFound: address = None return address
def deletealias(self, aliasid): "Delete alias domain" alias = self._get_alias(aliasid) if not alias: abort(404) c.form = AddDomainAlias(request.POST, alias, csrf_context=session) c.form.domain.query = Session.query(Domain)\ .filter(Domain.id==alias.domain_id) if request.POST and c.form.validate(): domainid = alias.domain_id aliasname = alias.name Session.delete(alias) Session.commit() update_serial.delay() info = DELETEDOMALIAS_MSG % dict(d=aliasname) audit_log(c.user.username, 4, info, request.host, request.remote_addr, now()) flash(_('The domain alias: %s has been deleted') % aliasname) redirect(url('domain-detail', domainid=domainid)) c.aliasid = aliasid c.domainid = alias.domain_id c.domainname = alias.domain.name return render('/domains/deletealias.html')
def _get_server(self, serverid): "returns server object" try: server = Session.query(Server).get(serverid) except NoResultFound: server = None return server
def _get_usrsign(self, sigid): "user signature" try: sign = Session.query(UserSignature).get(sigid) except NoResultFound: sign = None return sign
def _get_domsign(self, sigid): "domain signature" try: sign = Session.query(DomSignature).get(sigid) except NoResultFound: sign = None return sign