def update_invitations(invitations, crypto=True): from ekklesia.data import DataTable from idapi.mails import gnupg_import_init from accounts.models import Account, Invitation import six delete_implicit = getattr(settings, 'INVITATIONS_DELETE_IMPLICT', False) if crypto: gpg, verify, decrypt, sign, encrypt = api_crypto('invitations') if crypto == True: crypto = gpg # not debug else: verify = decrypt = sign = encrypt = False if delete_implicit: delinvs = set(Invitation.objects.values_list('uuid', flat=True)) else: delinvs = [] reader = DataTable(['uuid', 'status', 'code'], gpg=crypto, dataformat='invitation', fileformat='json', version=[1, 0]) reader.open(invitations, 'r', encrypt=decrypt, sign=verify) newinvs = set() # check duplicate data = [] stati = { 'new': Invitation.NEW, 'deleted': Invitation.DELETED, 'failed': Invitation.FAILED, 'registered': Invitation.REGISTERED } for inv in reader: uuid = inv['uuid'] if uuid in newinvs: raise KeyError newinvs.add(uuid) inv['status'] = stati[inv['status']] data.append(inv) for inv in data: uuid = inv['uuid'] if delete_implicit: delinvs.discard(uuid) try: obj = Invitation.objects.get(uuid=uuid) except Invitation.DoesNotExist: if inv['status'] == Invitation.NEW: Invitation.objects.create(**inv) continue if inv['status'] != Invitation.NEW: obj.delete() continue for k, v in six.iteritems(inv): setattr(obj, k, v) if obj.has_changed: obj.save() for uuid in delinvs: Invitation.objects.get(uuid=uuid).delete() return 'ok'
def export_invitations(self, output, allfields=False, encrypt=None, sign=False, format='csv'): from ekklesia.data import DataTable session = self.session Invitation = self.Invitation membercls = self.member_class if allfields: columns = self.invite_columns if membercls: columns = ['uuid'] + columns else: # restricted columns = ['uuid', 'code'] if not membercls: columns.append('email') if encrypt: encrypt = [encrypt] writer = DataTable(columns, coltypes=self.invite_types, gpg=self.gpg, dataformat='invitation', fileformat=format, version=self.version) writer.open(output, 'w', encrypt=encrypt, sign=sign) count = 0 for inv in session.query(Invitation).order_by( Invitation.id if membercls else Invitation.uuid): extra = {} if membercls: extra['uuid'] = inv.member.uuid writer.write(inv, extra) count += 1 writer.close() self.info('%i exported invitations', count)
def export_invitations(self,output,allfields=False,encrypt=None,sign=False,format='csv'): """export invitations, sorted by primary (id, or uuid if joint), to output. allfields is used for backup and writes all columns. encrypt=to io_key, sign=with default key. """ from ekklesia.data import DataTable session = self.session Invitation = self.Invitation membercls = self.member_class if allfields: if membercls: columns = ['uuid']+list(self.invite_columns) columns.remove('id') else: columns = self.invite_columns else: # restricted columns = ['uuid','code'] if not membercls: columns += ['email'] if encrypt: encrypt = [self.io_key] writer = DataTable(columns,coltypes=self.invite_types,gpg=self.gpg, dataformat='invitation',fileformat=format,version=self.version) writer.open(output,'w',encrypt=encrypt,sign=sign) count = 0 for inv in session.query(Invitation).order_by( Invitation.id if membercls else Invitation.uuid).yield_per(1000): extra = {} if membercls: extra['uuid'] = inv.member.uuid writer.write(inv,extra) count += 1 writer.close() self.info('%i exported invitations', count)
def update_invitations(invitations, crypto=True): from ekklesia.data import DataTable from idapi.mails import gnupg_import_init from accounts.models import Account, Invitation import six if crypto: gpg, verify, decrypt, sign, encrypt = api_crypto('invitations') if crypto is True: crypto = gpg # not debug else: verify = decrypt = sign = encrypt = False reader = DataTable(['uuid', 'status', 'code'], gpg=crypto, dataformat='invitation', fileformat='json', version=[1, 0]) reader.open(invitations, 'r', encrypt=decrypt, sign=verify) newinvs = set() # check duplicate data = [] stati = { 'new': Invitation.NEW, 'deleted': Invitation.DELETED, 'failed': Invitation.FAILED, 'registered': Invitation.REGISTERED, 'verify': Invitation.VERIFY, 'verified': Invitation.VERIFIED } for inv in reader: uuid = inv['uuid'] if uuid in newinvs: raise KeyError newinvs.add(uuid) inv['status'] = stati[inv['status']] data.append(inv) for inv in data: uuid = inv['uuid'] try: obj = Invitation.objects.get(uuid=uuid) except Invitation.DoesNotExist: if inv['status'] in (Invitation.NEW, Invitation.VERIFY): Invitation.objects.create(**inv) continue if inv['status'] not in (Invitation.NEW, Invitation.VERIFY): obj.delete() continue for k, v in six.iteritems(inv): setattr(obj, k, v) obj.save() return 'ok'
def get_invitations(onlychanged=False, crypto=True): from ekklesia.data import DataTable from idapi.mails import gnupg_init from accounts.models import Invitation if crypto: gpg, verify, decrypt, sign, encrypt = api_crypto('invitations') if crypto is True: crypto = gpg # not debug else: verify = decrypt = sign = encrypt = False writer = DataTable(['uuid', 'status'], gpg=crypto, dataformat='invitation', fileformat='json', version=[1, 0]) writer.open(mode='w', encrypt=encrypt, sign=sign) count = 0 invs = Invitation.objects.exclude(status=Invitation.DELETED) stati = {Invitation.REGISTERED: 'registered', Invitation.FAILED: 'failed'} if not onlychanged: stati.update({Invitation.REGISTERING: 'new', Invitation.NEW: 'new'}) for inv in invs.values('uuid', 'status'): try: inv['status'] = stati[inv['status']] except KeyError: continue # ignore status writer.write(inv) count += 1 return writer.close()
def get_members(onlynew=False, crypto=True): from ekklesia.data import DataTable from accounts.models import Account, Invitation if crypto: gpg, verify, decrypt, sign, encrypt = api_crypto('members') if crypto is True: crypto = gpg # not debug else: verify = decrypt = sign = encrypt = False columns = ['uuid'] twofactor = settings.TWO_FACTOR_SIGNUP if twofactor: columns.append('activate') writer = DataTable(columns, gpg=crypto, dataformat='member', fileformat='json', version=[1, 0]) writer.open(mode='w', encrypt=encrypt, sign=sign) members = Account.objects.exclude(uuid=None) stati = (Account.NEWMEMBER, ) if not onlynew: stati += (Account.MEMBER, Account.ELIGIBLE) members = members.filter( email_unconfirmed=None, # only confirmed emails status__in=stati) for member in members.values('uuid', 'status'): if twofactor: if member['status'] == Account.NEWMEMBER: try: inv = Invitation.objects.get(uuid=member['uuid'], status=Invitation.REGISTERING) except Invitation.DoesNotExist: continue member['activate'] = inv.secret else: member['activate'] = '' writer.write(member) return writer.close()
def export_members(self, output, encrypt=None, sign=False, allfields=False, format='csv'): from ekklesia.data import DataTable session = self.session Department, Member = self.Department, self.Member if allfields: columns = self.member_columns + ['department'] else: columns = ('uuid', 'email') writer = DataTable(columns, coltypes=self.member_types, gpg=self.gpg, dataformat='member', fileformat=format, version=self.version) if encrypt: encrypt = [encrypt] writer.open(output[0], 'w', encrypt=encrypt, sign=sign) count = 0 if allfields: query = session.query(Member).order_by( Member.memberno if 'memberno' in columns else Member.uuid) else: query = session.query(Member.uuid, Member.email).filter( Member.email != None).order_by(Member.uuid) extra = {} for member in query: if allfields and member.department: depid = member.department.id if self.department_spec == 'number' else member.department.name extra = dict(department=depid) writer.write(member, extra) count += 1 writer.close() self.info('%i exported members', count) if not allfields: return dwriter = DataTable(['id', 'name', 'parent', 'depth'], gpg=self.gpg, dataformat='department', fileformat=format, version=self.version) dwriter.open(output[1], 'w', encrypt=encrypt, sign=sign) count = 0 for dep in session.query(Department).order_by(Department.depth, Department.id): if dep.parent: extra = dict(parent=dep.parent.id if self.department_spec == 'number' else dep.parent.name) else: extra = {} dwriter.write(dep, extra) count += 1 dwriter.close() self.info('%i exported departments', count)
def table_io(self, fmt, obj=False, missing=False, ignore=True, required=False, extra=False): from ekklesia.data import objects_equal columns = ('a', 'b', 'c') coltypes = {'a': int, 'b': int, 'c': (int, )} t = DataTable(columns, coltypes=coltypes, fileformat=fmt, ignore=ignore, required=required) if fmt in ('json', 'jsondict'): f = {} else: f = StringIO() t.open(f, 'w') if obj: t.write(Obj(a=0)) t.write(Obj(a=1)) elif missing: try: t.write({'a': 0, 'b': 2}) assert ignore except: assert not ignore return elif extra: try: t.write({'a': 0, 'b': 2, 'c': [3, 4], 'd': 4}) assert ignore except: assert not ignore return else: for i in range(3): t.write({'a': i, 'b': 2, 'c': [3, 4]}) if fmt in ('json', 'jsondict'): f2 = t.close() assert f is f2 else: t.close() f.seek(0) t = DataTable(columns, coltypes=coltypes, fileformat=fmt) t.open(f, 'r') i = 0 for row in t: if obj: assert objects_equal(Obj(**row), Obj(a=i)) else: if missing: assert row == {'a': 0, 'b': 2, 'c': []} else: assert row == {'a': i, 'b': 2, 'c': [3, 4]} i += 1 t.close()
def import_members(self, memberfile=None, depfile=None, decrypt=False, verify=False, dryrun=False, allfields=False, format='csv'): from ekklesia.data import DataTable from sqlalchemy.orm import aliased session = self.session import_dep = self.department_spec != 'implicit' Department = self.Department depprimary = 'id' if self.department_spec == 'number' else 'name' dquery = session.query(Department) def get_department(id, create=False): # root dep: name,parent=None,depth=0 # fwd-ref: name,parent=None,depth=None dep = dquery.filter_by(**id).first() if not dep and create and not dryrun: dep = Department(parent=None, depth=None, **id) session.add(dep) return dep if depfile and import_dep and self.Department: columns = self.department_columns + ['parent'] if allfields: reqcolumns = columns else: reqcolumns = ('name', 'parent') reader = DataTable(columns, coltypes=self.department_types, required=reqcolumns, dataformat='department', fileformat=format, version=self.version, gpg=self.gpg) count = 0 reader.open(depfile, 'r', encrypt=decrypt, sign=verify) columns, tmp = reader.get_columns() seen = set() for data in reader: if not 'depth' in columns: data['depth'] = None # find existing parent by id, if not exist, create fwd-ref if not data['parent'] is None: data['parent'] = get_department( {depprimary: data['parent']}, create=True) # find existing dep by id id = data[depprimary] if not id: self.warn("identifier missing") continue assert not id in seen, "department %s is duplicate" % id seen.add(id) dep = get_department({depprimary: id}) if dep: parent = data['parent'] while parent: assert parent != dep, "cycle detected for department %s" % id parent = parent.parent # if not exist, create, set depth if parent.depth depth = data['parent'].depth if data['parent'] else None if not depth is None: depth += 1 if data['depth'] is None: data['depth'] = depth elif not depth is None: assert data['depth'] >= depth, "invalid depth" count += 1 if dryrun: continue if not dep: session.add(Department(**data)) else: dep.__init__(**data) self.info('%i imported departments', count) elif not import_dep: dquery.update(dict(depth=None)) # reset all depths Member = self.Member columns = self.member_columns + ['department'] if allfields: reqcolumns = columns else: reqcolumns = ['uuid', 'email'] if not import_dep and 'department' in self.member_import and not 'parent' in columns: columns.append('parent') reqcolumns.append('parent') reader = DataTable(columns, coltypes=self.member_types, required=reqcolumns, dataformat='member', fileformat=format, version=self.version, gpg=self.gpg) count = 0 reader.open(memberfile, 'r', encrypt=decrypt, sign=verify) columns, tmp = reader.get_columns() #primary either memberid or uuid #primary must exist for every member and be unique #primary+email must be unique if 'memberno' in columns: primary, primarykey = 'memberno', Member.memberno else: primary, primarykey = 'uuid', Member.uuid mquery = session.query(primarykey) seen, depseen = set(), set() for data in reader: id = data[primary] if not id: self.warn("uuid missing") continue assert not id in seen, "member %s is duplicate" % id seen.add(id) if 'department' in data: depid = data['department'] if import_dep: # all departments must exist dep = get_department({depprimary: depid}) assert dep, "unknown department %s" % depid else: # create departments from department and parent # find existing parent by id, if not exist, create fwd-ref parent = data['parent'] del data['parent'] if not parent is None: parent = get_department({'name': parent}, create=True) # find existing dep by id dep = get_department({'name': depid}) # if not exist, create, set depth if parent.depth if dep: sup = parent while sup: assert sup != dep, "cycle detected for department %s" % depid sup = sup.parent depseen.add(depid) if not dryrun: if not dep: dep = Department(name=depid, parent=parent, depth=None) session.add(dep) elif dep.parent != parent: # if parent changed and not seen, update assert not depid in depseen, "department %s is duplicate" % depid dep.__init__(parent=parent) data['department'] = dep if 'email' in columns and not data['email']: data['email'] = None member = mquery.filter_by(**{primary: id}).first() if data['email']: email = session.query(primarykey,Member.email).\ filter_by(email=data['email']).first() else: email = None if email and (not member or getattr(email, primary) != getattr(member, primary)): self.error("ignoring: duplicate email %s" % data['email']) continue count += 1 if dryrun: continue if member: member.__init__(**data) else: session.add(Member(**data)) #new self.info('%i imported members', count) # complete missing depths depalias = aliased(Department) fixdeps = session.query(Department.depth).filter_by(depth=None) if fixdeps.first(): fixdeps.filter_by(parent=None).update(dict(depth=0)) # set roots while fixdeps.first(): for sub in session.query(Department).join(depalias,Department.parent).\ filter(depalias.depth!=None): sub.depth = sub.parent.depth + 1 if not dryrun: session.commit()
def test_table_bad_write(self): t, f = DataTable(('a', )), StringIO() t.open(f, 'w') t.write({'a': 0}) t.close() f.seek(0) t = DataTable(('a', )) t.open(f, 'r') with raises(AssertionError): t.write({'a': 1})
def table_io(self, ids, fmt, encrypt=False, sign=False): from ekklesia.data import objects_equal columns = ('a', 'b', 'c') coltypes = {'a': int, 'b': int, 'c': int} t = DataTable(columns, coltypes=coltypes, gpg=ids['id1'], fileformat=fmt, required=False) if fmt in ('json', 'jsondict'): f = {} else: f = StringIO() t.open(f, 'w', [receiver] if encrypt else False, sign) for i in range(3): t.write({'a': i, 'b': 2, 'c': 3}) if fmt in ('json', 'jsondict'): f2 = t.close() assert f is f2 else: t.close() f.seek(0) t = DataTable(columns, coltypes=coltypes, gpg=ids['id2'], fileformat=fmt) t.open(f, 'r', encrypt, sender if sign else False) i = 0 for row in t: assert row == {'a': i, 'b': 2, 'c': 3} i += 1 t.close()
def sync_members(self,download=True,upload=True,dryrun=False,quick=False, input=None,output=None,invitations=None,format='csv'): "sync members with ID server" from ekklesia.data import DataTable from ekklesia.backends import api_init from six.moves import cStringIO as StringIO import requests, json session = self.session Department, Member = self.Department, self.Member check_email = self.check_email check_member = self.check_member coltypes = self.member_types.copy() api = api_init(self.member_api._asdict()) if download: # download registered uuids if input: input = json.load(input) else: # pragma: no cover url = self.member_api.url if quick: url+='?new=1' resp = api.get(url) if resp.status_code != requests.codes.ok: if self.debugging: open('memberdown.html','w').write(resp.content) assert False, 'cannot download used uuids' input = resp.json() if not input: self.warn("input is empty") return columns = ['uuid','echo'] if check_member: columns.append(check_member) if check_email: columns.append(check_email) reader = DataTable(columns,required=['uuid'],coltypes=coltypes,gpg=self.gpg, dataformat='member',fileformat=self.member_api.format,version=self.version) sign = self.member_api.receiver if self.member_api.sign else False reader.open(input,'r',encrypt=self.member_api.encrypt,sign=sign) columns = list(self.member_sync) wcoltypes = dict(coltypes) if check_member: wcoltypes[check_member] = bool if check_email: wcoltypes[check_email] = bool wcoltypes['departments'] = (int,) # replace by list of ids if 'location' in self.member_sync: wcoltypes['gpslat'] = wcoltypes['gpslng'] = float columns += ['gpslat','gpslng'] if download: rcolumns, unknown = reader.get_columns() if unknown: self.warn(UnknownFieldsWarning('ignoring unknown fields %s',unknown)) if check_member and check_member in rcolumns: columns.append(check_member) if check_email and check_email in rcolumns: columns.append(check_email) if 'echo' in rcolumns: columns += ['echo'] encrypt = [self.member_api.receiver] if self.member_api.encrypt else False encryptmail = [self.email_receiver] if self.member_api.encrypt else False writer = DataTable(columns,coltypes=wcoltypes,gpg=self.gpg, dataformat='member',fileformat=self.member_api.format,version=self.version) out = {} writer.open(out,'w',encrypt=encrypt,sign=self.member_api.sign) if not quick and self.export_emails: ewriter = DataTable(('uuid','email'),coltypes=coltypes,gpg=self.gpg, dataformat='member',fileformat=format,version=self.version) eout = output[2] if output else StringIO() ewriter.open(eout,'w',encrypt=encryptmail,sign=self.member_api.sign) def export(member,extra = {}): if 'location' in self.member_import: gps = self.gps_coord(self.get_location(member)) if gps: extra['gpslat'],extra['gpslng'] = gps deps = self.get_departments(member) if self.department_spec=='number': deps = [dep.id for dep in deps] else: deps = [dep.name for dep in deps] if deps: extra['departments'] = deps writer.write(member,extra) if not quick and self.export_emails: ewriter.write(member) if invitations: registered = {} # dict of registered uuids mquery = session.query(Member) count = 0 if download: from datetime import datetime seen = set() for data in reader: uuid = data['uuid'] if not uuid: self.warn("uuid missing") continue if uuid in seen: self.warn("member %s is duplicate" % uuid) continue seen.add(uuid) member = mquery.filter_by(uuid=uuid).first() extra = {} if not member or (self.member_no and member.status == MStatusType.deleted): self.warn("member %s is unknown" % uuid) if check_member in columns and data[check_member]: extra[check_member] = False if check_email in columns and data[check_email]: extra[check_email] = False extra['departments'] = [] writer.write(Member(uuid=uuid,status=MStatusType.deleted),extra) continue if check_email in columns and data[check_email]: extra[check_email] = member.email == data[check_email] if check_member in columns: if data[check_member]: result = self.check_member_func(member,data[check_member]) if 'registered' in self.member_import: if result and not member.registered: member.registered = datetime.utcnow() else: if 'registered' in self.member_import and member.registered: self.warn("member %s is already registered" % uuid) result = None extra[check_member] = result elif 'registered' in self.member_import and not member.registered: member.registered = datetime.utcnow() if 'echo' in columns: extra['echo'] = data['echo'] if not dryrun: export(member,extra) if invitations: registered[member.uuid] = True count += 1 else: for member in mquery.yield_per(1000): if self.member_no and not member.memberno: continue # deleted if not dryrun: export(member) count += 1 self.info('%i members exported', count) if not dryrun and 'registered' in self.member_import: session.commit() writer.close() if not quick and self.export_emails: ewriter.close() if not quick and invitations: iwriter = DataTable(('uuid','email'),coltypes=coltypes,gpg=self.gpg, dataformat='member',fileformat=format,version=self.version) # extra encrypt,sign iwriter.open(invitations,'w',encrypt=encryptmail,sign=self.member_api.sign) if self.member_no: query = session.query(Member.uuid,Member.email,Member.memberno).\ filter(Member.email!=None,Member.memberno!=0) else: query = session.query(Member.uuid,Member.email).filter(Member.email!=None) count = 0 for member in query.yield_per(1000): if member.uuid in registered: continue # skip registered iwriter.write(member) count += 1 iwriter.close() self.info('%i invitations exported', count) if not upload: return dwriter = DataTable(('id','name','parent','depth'),gpg=self.gpg, dataformat='department',fileformat=self.member_api.format,version=self.version) dout = {} dwriter.open(dout,'w',encrypt=encrypt,sign=self.member_api.sign) for dep in session.query(Department).order_by(Department.depth,Department.id).yield_per(1000): if dep.parent: extra = dict(parent=dep.parent.id) # if self.department_spec=='number' else dep.parent.name else: extra = {} dwriter.write(dep,extra) dwriter.close() if output: json.dump(out,output[0]) json.dump(dout,output[1]) elif not dryrun: # pragma: no cover resp = api.post(self.member_api.url,json=dict(members=out,departments=dout)) if resp.status_code != requests.codes.ok: if self.debugging: open('memberup.html','w').write(resp.content) assert False, 'cannot upload data' if not self.export_emails or output or quick: return # pragma: no cover from ekklesia.mail import create_mail, smtp_init smtp = smtp_init(self.smtpconfig) smtp.open() self.info('sending email data') msg = create_mail(self.gpgconfig['sender'],self.email_receiver,'Email data',eout.getvalue()) eout.close() msg, results = self.gpg.encrypt(msg,default_key=True,verify=True, inline=True) assert msg and results, 'error encrypting message' if not dryrun: smtp.send(msg) smtp.close()
def update_members(members, departments, crypto=True): from ekklesia.data import DataTable from idapi.mails import gnupg_import_init from accounts.models import Account, NestedGroup, Invitation import six if crypto: gpg, verify, decrypt, sign, encrypt = api_crypto('members') if crypto == True: crypto = gpg # not debug else: verify = decrypt = sign = encrypt = False deldeps = set( NestedGroup.objects.exclude(syncid=None).values_list('syncid', flat=True)) reader = DataTable(['id', 'parent', 'name', 'depth'], gpg=crypto, remap={'id': 'syncid'}, dataformat='department', fileformat='json', version=[1, 0]) reader.open(departments, 'r', encrypt=decrypt, sign=verify) newdeps = set() # check duplicate deps = [] # FIXME also work with name ids for dep in reader: syncid = dep['syncid'] if syncid in newdeps: raise KeyError newdeps.add(syncid) deps.append(dep) # check whether all parents exist for dep in deps: parent = dep['parent'] if parent and not parent in newdeps: #print parent,'missing' raise KeyError # parent missing columns = ['uuid', 'status', 'verified', 'department'] required = ['uuid', 'status'] twofactor = getattr(settings, 'TWO_FACTOR_SIGNUP') if twofactor: columns.append('activate') required.append('activate') reader = DataTable(columns, required=required, gpg=crypto, dataformat='member', fileformat='json', version=[1, 0]) reader.open(members, 'r', encrypt=decrypt, sign=verify) newmembers = set() # check duplicate stati = { 'deleted': Account.DELETED, 'member': Account.MEMBER, 'eligible': Account.ELIGIBLE } data = [] for member in reader: uuid = member['uuid'] if uuid in newmembers: raise KeyError newmembers.add(uuid) status = stati.get(member['status']) assert not status is None, "invalid status" member['status'] = status if status == Account.DELETED: member['is_active'] = False data.append(member) done = set() while len(deps): todo = [] for dep in deps: parent = dep['parent'] if parent: if not parent in done: todo.append(dep) continue parent = NestedGroup.objects.get(syncid=parent) dep['parent'] = parent done.add(dep['syncid']) try: obj = NestedGroup.objects.get(syncid=dep['syncid']) except NestedGroup.DoesNotExist: NestedGroup.objects.create(**dep) continue deldeps.discard(obj.syncid) # keep later if obj._dict == dep: continue for k, v in six.iteritems(dep): setattr(obj, k, v) obj.save() deps = todo for member in data: uuid = member['uuid'] try: obj = Account.objects.get(uuid=uuid) except Account.DoesNotExist: continue try: if obj.email_unconfirmed: continue # ignore unconfirmed email except AttributeError: pass if obj.status == Account.NEWMEMBER: try: inv = Invitation.objects.get(uuid=uuid) except Invitation.DoesNotExist: inv = None assert inv and inv.status == Invitation.REGISTERING, "invalid state for newmember" if member['status'] == Account.DELETED: obj.delete() inv.delete() continue # check whether activation failed or to be deleted if twofactor and member['activate'] != True: inv.status = Invitation.FAILED inv.save() obj.delete() continue inv.status = Invitation.REGISTERED inv.save() if 'department' in member: dep = member['department'] del member['department'] if not dep: obj.nested_groups = NestedGroup.objects.filter(syncid=None, account=obj) elif not obj.nested_groups.filter( syncid=dep).exists(): # already in dep? ngroups = NestedGroup.objects.filter(syncid=None,account=obj) | \ NestedGroup.objects.filter(syncid=dep) # union existing ngroups + dep obj.nested_groups = ngroups for k, v in six.iteritems(member): setattr(obj, k, v) if obj.has_changed: obj.save() for syncid in deldeps: NestedGroup.objects.get(syncid=syncid).delete() return 'ok'
def table_io(self,ids,fmt,encrypt=False,sign=False): from ekklesia.data import objects_equal columns = ('a','b','c') coltypes = {'a':int,'b':int,'c':int} t = DataTable(columns,coltypes=coltypes,gpg=ids['id1'],fileformat=fmt,required=False) if fmt in ('json','jsondict'): f = {} else: f = StringIO() t.open(f,'w',[receiver] if encrypt else False,sign) for i in range(3): t.write({'a':i,'b':2,'c':3}) if fmt in ('json','jsondict'): f2 = t.close() assert f is f2 else: t.close() f.seek(0) t = DataTable(columns,coltypes=coltypes,gpg=ids['id2'],fileformat=fmt) t.open(f,'r',encrypt,sender if sign else False) i = 0 for row in t: assert row == {'a':i,'b':2,'c':3} i+=1 t.close()
def sync_invitations(self, download=True, upload=True, dryrun=False, quick=False, input=None, output=None): """sync invitations with ID server""" from ekklesia.backends import api_init from ekklesia.data import DataTable from six.moves import cStringIO as StringIO import requests, json session = self.session Invitation = self.Invitation membercls = self.member_class check_email = self.invite_check_email api = api_init(self.invite_api._asdict()) reply = False # whether server requested reply if download: # download registered/failed/verified uuids(used codes), mark used if input: input = json.load(input) if not input: # pragma: no cover url = self.invite_api.url if quick: url += '?changed=1' resp = api.get(url) if resp.status_code != requests.codes.ok: if self.debugging: open('invdown.html', 'w').write(resp.content) assert False, 'cannot download used invite codes' input = resp.json() # only json? if not input: self.warn("input is empty") return columns = ['uuid', 'status', 'code', 'echo'] if check_email: columns.append(check_email) reader = DataTable(columns, coltypes=self.invite_types, required=('uuid', 'status', 'code'), gpg=self.gpg, dataformat='invitation', fileformat=self.invite_api.format, version=self.version) sign = self.invite_api.receiver if self.invite_api.sign else False reader.open(input, 'r', encrypt=self.invite_api.encrypt, sign=sign) rcolumns, unknown = reader.get_columns() if unknown: self.warn('ignoring unknown fields', unknown) reply = 'echo' in rcolumns # reply? if check_email: reply = reply or check_email in rcolumns if upload: # upload responses and non-uploaded,unused uuid&code columns = ['uuid', 'code', 'status'] coltypes = self.invite_types.copy() if check_email: coltypes[check_email] = bool if download and reply: if check_email and check_email in rcolumns: columns.append(check_email) if 'echo' in rcolumns: columns.append('echo') writer = DataTable(columns, coltypes=coltypes, gpg=self.gpg, dataformat='invitation', fileformat=self.invite_api.format, version=self.version) encrypt = [self.invite_api.receiver ] if self.invite_api.encrypt else False out = {} writer.open(out, 'w', encrypt=encrypt, sign=self.invite_api.sign) if download: # process download and generate reply if membercls: query = session.query(membercls) else: query = session.query(Invitation) count = 0 seen = set() for data in reader: # only uploaded codes, reply optional uuid = data['uuid'] if not uuid: self.warn("uuid missing") continue if uuid in seen: self.warn("member %s is duplicate" % uuid) continue seen.add(uuid) valid = ('registered', 'failed', 'verified', 'reset') if not quick: valid += ('new', 'verify') status = data['status'] if not status in valid: self.warn("invalid status %s for %s" % (status, uuid)) continue inv = query.filter_by(uuid=uuid).first() extra = {} if membercls and inv: inv = inv.invitation if not inv: self.error("member %s is unknown" % data['uuid']) if check_email in columns and data[check_email]: extra[check_email] = False extra['uuid'] = uuid # works also for membercls writer.write( Invitation(status=IStatusType.deleted, code=''), extra) continue """compare status and inv.status sync state transitions: backend,idserver -> target new,- -> idserver:new new,new -> idserver:new, backend:uploaded verify,registered/- -> idserver:verify -,* -> backend:deleted,error uploaded,new/registering -> idserver:no response, backend:uploaded/ignore uploaded,registered/failed -> backend:registered/failed uploaded_verify,verify -> idserver:no response, backend:uploaded_verify/ignore uploaded_verify,verified -> backend:verified registered,registered -> backend:registered, idserver:delete failed,failed -> backend:failed, idserver:delete (or new,new) new/uploaded,reset -> backend:new, idserver:new verify/uploaded_verify,reset -> backend:verify, idserver:verify failed,- -> backend:new (check if fail not downloaded) """ if status == IStatusType.new: if inv.status == IStatusType.new: if inv.code == data['code']: # code upload confirmed, prepare for sending inv.status = IStatusType.uploaded inv.sent = ISentStatusType.unsent else: # mismatch, new code needs to be uploaded self.info( "updating old code %s for uuid %s, new %s", data['code'], data['uuid'], inv.code) # write elif inv.status != IStatusType.uploaded: # ignore with uploaded self.error("bad status %s for uuid %s, current %s", status, data['uuid'], inv.status) continue elif status == IStatusType.verify: if inv.status == IStatusType.verify: if inv.code == data['code']: # code upload confirmed, prepare for sending inv.status = IStatusType.uploaded_verify inv.sent = ISentStatusType.unsent else: # mismatch, new code needs to be uploaded self.info( "updating old verify code %s for uuid %s, new %s", data['code'], data['uuid'], inv.code) # write elif inv.status != IStatusType.uploaded_verify: # ignore with uploaded_verify self.error("bad status %s for uuid %s, current %s", status, data['uuid'], inv.status) continue elif status == IStatusType.reset: if inv.status in FinalStates: self.warn("ignoring reset for uuid %s, status %s", data['uuid'], inv.status) continue inv.reset() elif status in FinalStates: if inv.status == IStatusType.uploaded_verify and status==IStatusType.verified or \ inv.status == IStatusType.uploaded and status!=IStatusType.verified: inv.status = status # upload confirmed or failed registration/verification inv.sent = ISentStatusType.unsent inv.change() elif status != inv.status: self.error("bad status %s for uuid %s, current %s", status, data['uuid'], inv.status) continue else: self.error("bad status %s for uuid %s, current %s", status, data['uuid'], inv.status) continue if upload and not inv.status in (IStatusType.uploaded, IStatusType.uploaded_verify): # write response for uploaded if check_email and check_email in columns: if member_class: extra[check_email] = inv.member.email == data[ check_email] else: extra[check_email] = inv.email == data[check_email] if 'echo' in columns: extra['echo'] = data['echo'] if membercls: extra['uuid'] = data['uuid'] writer.write(inv, extra) count += 1 self.info('%i codes used', count) if not dryrun: session.commit() if not upload: return # process failed, which have already been deleted on the server and are ready for reset count = 0 query = session.query(Invitation).filter_by(status=IStatusType.failed, sent=ISentStatusType.sent) for inv in query.yield_per(1000): extra = {} if membercls: uuid = inv.member.uuid else: uuid = inv.uuid if uuid in seen: continue # already replied inv.reset() count += 1 self.info('%i codes resetted', count) if not dryrun: session.commit() if not quick: # append new invitations count = 0 fresh = (IStatusType.new, IStatusType.verify) query = session.query(Invitation).filter( Invitation.status.in_(fresh)) for inv in query.yield_per(1000): extra = {} if membercls: uuid = inv.member.uuid extra['uuid'] = uuid else: uuid = inv.uuid writer.write(inv, extra) count += 1 self.info('%i new codes uploaded', count) writer.close() if output: json.dump(out, output) elif not dryrun: # pragma: no cover resp = api.post(self.invite_api.url, json=out) if resp.status_code != requests.codes.ok: if self.debugging: open('invup.html', 'w').write(resp.content) assert False, 'cannot upload data'
def import_members(self,memberfile=None,depfile=None,decrypt=False,verify=False, dryrun=False,sync=False,allfields=False,format='csv'): """import data from memberfile, optionally depfile (if not implicit). allfields is used for restore and requires all columns. if sync, uuids not seen in input are set to status deleted. decrypt=with the default key, verify=check whether its signed with io_key. rows with missing uuids or duplicate uuids and emails are ignored. """ from ekklesia.data import DataTable, init_object from sqlalchemy.orm import aliased session = self.session import_dep = self.department_spec != 'implicit' # extra Department data? Department = self.Department depprimary = 'id' if self.department_spec=='number' else 'name' dquery = session.query(Department) if verify: verify = self.io_key def get_department(id,create=False): # root dep: name,parent=None,depth=0 # fwd-ref: name,parent=None,depth=None dep = dquery.filter_by(**id).first() if not dep and create and not dryrun: # create forward reference w/o depth, calculate it later dep = Department(parent=None,depth=None,**id) session.add(dep) return dep if depfile and import_dep and self.Department: # import separate department data columns = list(self.department_columns)+['parent'] if allfields: reqcolumns = columns else: reqcolumns = ('name','parent') reader = DataTable(columns,coltypes=self.department_types,required=reqcolumns, dataformat='department',fileformat=format,version=self.version,gpg=self.gpg) count = 0 reader.open(depfile,'r',encrypt=decrypt,sign=verify) columns = reader.get_columns()[0] seen = set() # detect duplicates for data in reader: if not 'depth' in columns: data['depth']=None # find existing parent by id, if not exist, create fwd-ref if not data['parent'] is None: data['parent'] = get_department({depprimary:data['parent']},create=True) # find existing dep by id id = data[depprimary] if not id: self.warn("identifier missing") continue assert not id in seen, "department %s is duplicate" % id seen.add(id) dep = get_department({depprimary:id}) if dep: parent = data['parent'] while parent: assert parent!=dep, "cycle detected for department %s" % id parent = parent.parent # if not exist, create, set depth if parent.depth depth = data['parent'].depth if data['parent'] else None if not depth is None: depth+=1 if data['depth'] is None: data['depth'] = depth elif not depth is None: assert data['depth']>=depth, "invalid depth" count += 1 if dryrun: continue if not dep: session.add(Department(**data)) else: dep.update(**data) self.info('%i imported departments', count) elif not import_dep: # implicit dquery.update(dict(depth=None)) # reset all depths Member = self.Member columns = list(self.member_columns)+['departments'] if allfields: reqcolumns = columns else: reqcolumns = ['uuid','email'] if self.member_no: reqcolumns.append('id') if not import_dep and 'departments' in self.member_import and not 'parent' in columns: # implicit requires parent columns.append('parent') reqcolumns.append('parent') reader = DataTable(columns,coltypes=self.member_types,required=reqcolumns, dataformat='member',fileformat=format,version=self.version,gpg=self.gpg) count = 0 reader.open(memberfile,'r',encrypt=decrypt,sign=verify) columns = reader.get_columns()[0] #primary either memberid or uuid #primary must exist for every member and be unique #primary+email must be unique if self.member_no: primary, primarykey = 'id', Member.id else: primary, primarykey = 'uuid', Member.uuid mquery = session.query(Member) seen, depseen = set(), set() # detect duplicates for data in reader: id = data[primary] if not id: self.warn("uuid missing") continue assert not id in seen, "member %s is duplicate" % id seen.add(id) if 'departments' in data: depids = data['departments'] if import_dep: # all departments must exist deps=set() for depid in depids: dep = get_department({depprimary:depid}) assert dep, "unknown department %s" % depid deps.add(dep) deps = list(deps) else: # implicit departments assert len(depids)==1, "only single implicit department supported" depid = depids[0] # create departments from department and parent # find existing parent by id, if not exist, create fwd-ref parent = data['parent'] del data['parent'] if not parent is None: parent = get_department({'name':parent},create=True) # find existing dep by id dep = get_department({'name':depid}) # if not exist, create, set depth if parent.depth if dep: sup = parent while sup: assert sup!=dep, "cycle detected for department %s" % depid sup = sup.parent depseen.add(depid) if not dryrun: if not dep: dep = Department(name=depid,parent=parent,depth=None) session.add(dep) elif dep.parent != parent: # if parent changed and not seen, update assert not depid in depseen, "department %s is duplicate" % depid dep.update(parent=parent) deps = [dep] data['departments'] = deps if 'email' in columns and not data['email']: data['email'] = None # make sure it's None # find existing member member = mquery.filter(primarykey==id).first() if data['email'] and not (member and member.email==data['email']): # email already used by other member? if session.query(primarykey,Member.email).filter_by(email=data['email']).first(): self.error("ignoring: duplicate email %s" % data['email']) continue count += 1 if dryrun: continue if member: if member.email != data['email']: self.email_change(member,data) if data.get('status') == 'deleted': self.delete_member(member) member.update(**data) else: session.add(Member(**data)) #new self.info('%i imported members', count) if sync: # deleted unseen members count = 0 for member in session.query(Member).yield_per(1000): if member.status==MStatusType.deleted or member.uuid in seen: continue self.delete_member(member) member.status = MStatusType.deleted self.info("member %s deleted" % member.uuid) count += 1 self.info('%i deleted members', count) # complete missing depths depalias = aliased(Department) fixdeps = session.query(Department.depth).filter_by(depth=None) if fixdeps.first(): # any departments with missing depths? fixdeps.filter_by(parent=None).update(dict(depth=0)) # set roots while fixdeps.first(): # fill from roots to leaves for sub in session.query(Department).join(depalias,Department.parent).\ filter(depalias.depth!=None).yield_per(1000): sub.depth = sub.parent.depth+1 if not dryrun: session.commit()
def test_table_bad_read(self): t = DataTable(('a',)) t.open(StringIO(),'w') with raises(AssertionError): for row in t: pass
def export_members(self, output, encrypt=None, sign=False, allfields=False, format='csv'): """export data, sorted by primary (uuid, unless memberno exists), to output. allfields is used for backup and writes all columns. without allfields, data for the invitation DB is generated. output is [members,departments] if allfields, else just [members]. encrypt=to io_key, sign=with default key. """ from ekklesia.data import DataTable session = self.session Department, Member = self.Department, self.Member if allfields: columns = list(self.member_columns) + ['department'] dataformat = 'member' else: columns = ('uuid', 'email') dataformat = 'invitation' writer = DataTable(columns, coltypes=self.member_types, gpg=self.gpg, dataformat=dataformat, fileformat=format, version=self.version) if encrypt: encrypt = [self.io_key] writer.open(output[0], 'w', encrypt=encrypt, sign=sign) count = 0 if allfields: query = session.query(Member).order_by( Member.memberno if 'memberno' in columns else Member.uuid) else: query = session.query(Member.uuid, Member.email, Member.registered).order_by(Member.uuid) if 'registered' in self.member_import: query = query.filter_by( registered=None) # don't export registered extra = {} for member in query.yield_per(1000): if allfields and member.department: # FIXME: use get_department? depid = member.department.id if self.department_spec == 'number' else member.department.name extra = dict(department=depid) writer.write(member, extra) count += 1 writer.close() self.info('%i exported members', count) if not allfields: return dwriter = DataTable(['id', 'name', 'parent', 'depth'], gpg=self.gpg, dataformat='department', fileformat=format, version=self.version) dwriter.open(output[1], 'w', encrypt=encrypt, sign=sign) count = 0 for dep in session.query(Department).order_by( Department.depth, Department.id).yield_per(1000): if dep.parent: extra = dict(parent=dep.parent.id if self.department_spec == 'number' else dep.parent.name) else: extra = {} dwriter.write(dep, extra) count += 1 dwriter.close() self.info('%i exported departments', count)
def import_members(self, memberfile=None, depfile=None, decrypt=False, verify=False, dryrun=False, sync=False, allfields=False, format='csv'): """import data from memberfile, optionally depfile (if not implicit). allfields is used for restore and requires all columns. if sync, uuids not seen in input are set to status deleted. decrypt=with the default key, verify=check whether its signed with io_key. rows with missing uuids or duplicate uuids and emails are ignored. """ from ekklesia.data import DataTable, init_object from sqlalchemy.orm import aliased session = self.session import_dep = self.department_spec != 'implicit' # extra Department data? Department = self.Department depprimary = 'id' if self.department_spec == 'number' else 'name' dquery = session.query(Department) if verify: verify = self.io_key def get_department(id, create=False): # root dep: name,parent=None,depth=0 # fwd-ref: name,parent=None,depth=None dep = dquery.filter_by(**id).first() if not dep and create and not dryrun: # create forward reference w/o depth, calculate it later dep = Department(parent=None, depth=None, **id) session.add(dep) return dep if depfile and import_dep and self.Department: # import separate department data columns = list(self.department_columns) + ['parent'] if allfields: reqcolumns = columns else: reqcolumns = ('name', 'parent') reader = DataTable(columns, coltypes=self.department_types, required=reqcolumns, dataformat='department', fileformat=format, version=self.version, gpg=self.gpg) count = 0 reader.open(depfile, 'r', encrypt=decrypt, sign=verify) columns = reader.get_columns()[0] seen = set() # detect duplicates for data in reader: if not 'depth' in columns: data['depth'] = None # find existing parent by id, if not exist, create fwd-ref if not data['parent'] is None: data['parent'] = get_department( {depprimary: data['parent']}, create=True) # find existing dep by id id = data[depprimary] if not id: self.warn("identifier missing") continue assert not id in seen, "department %s is duplicate" % id seen.add(id) dep = get_department({depprimary: id}) if dep: parent = data['parent'] while parent: assert parent != dep, "cycle detected for department %s" % id parent = parent.parent # if not exist, create, set depth if parent.depth depth = data['parent'].depth if data['parent'] else None if not depth is None: depth += 1 if data['depth'] is None: data['depth'] = depth elif not depth is None: assert data['depth'] >= depth, "invalid depth" count += 1 if dryrun: continue if not dep: session.add(Department(**data)) else: dep.update(**data) self.info('%i imported departments', count) elif not import_dep: # implicit dquery.update(dict(depth=None)) # reset all depths Member = self.Member columns = list(self.member_columns) + ['department'] if allfields: reqcolumns = columns else: reqcolumns = ['uuid', 'email'] if not import_dep and 'department' in self.member_import and not 'parent' in columns: # implicit requires parent columns.append('parent') reqcolumns.append('parent') reader = DataTable(columns, coltypes=self.member_types, required=reqcolumns, dataformat='member', fileformat=format, version=self.version, gpg=self.gpg) count = 0 reader.open(memberfile, 'r', encrypt=decrypt, sign=verify) columns = reader.get_columns()[0] #primary either memberid or uuid #primary must exist for every member and be unique #primary+email must be unique if 'memberno' in columns: primary, primarykey = 'memberno', Member.memberno else: primary, primarykey = 'uuid', Member.uuid mquery = session.query(Member) seen, depseen = set(), set() # detect duplicates for data in reader: id = data[primary] if not id: self.warn("uuid missing") continue assert not id in seen, "member %s is duplicate" % id seen.add(id) if 'department' in data: depid = data['department'] if import_dep: # all departments must exist dep = get_department({depprimary: depid}) assert dep, "unknown department %s" % depid else: # implicit departments # create departments from department and parent # find existing parent by id, if not exist, create fwd-ref parent = data['parent'] del data['parent'] if not parent is None: parent = get_department({'name': parent}, create=True) # find existing dep by id dep = get_department({'name': depid}) # if not exist, create, set depth if parent.depth if dep: sup = parent while sup: assert sup != dep, "cycle detected for department %s" % depid sup = sup.parent depseen.add(depid) if not dryrun: if not dep: dep = Department(name=depid, parent=parent, depth=None) session.add(dep) elif dep.parent != parent: # if parent changed and not seen, update assert not depid in depseen, "department %s is duplicate" % depid dep.update(parent=parent) data['department'] = dep if 'email' in columns and not data['email']: data['email'] = None # make sure it's None # find existing member member = mquery.filter_by(**{primary: id}).first() if data['email'] and not (member and member.email == data['email']): # email already used by other member? if session.query( primarykey, Member.email).filter_by(email=data['email']).first(): self.error("ignoring: duplicate email %s" % data['email']) continue count += 1 if dryrun: continue if member: if member.email != data['email']: self.email_change(member, data) if data.get('status') == 'deleted': self.delete_member(member) member.update(**data) else: session.add(Member(**data)) #new self.info('%i imported members', count) if sync: # deleted unseen members count = 0 for member in session.query(Member).yield_per(1000): if member.status == MStatusType.deleted or member.uuid in seen: continue self.delete_member(member) member.status = MStatusType.deleted self.info("member %s deleted" % member.uuid) count += 1 self.info('%i deleted members', count) # complete missing depths depalias = aliased(Department) fixdeps = session.query(Department.depth).filter_by(depth=None) if fixdeps.first(): # any departments with missing depths? fixdeps.filter_by(parent=None).update(dict(depth=0)) # set roots while fixdeps.first(): # fill from roots to leaves for sub in session.query(Department).join(depalias,Department.parent).\ filter(depalias.depth!=None).yield_per(1000): sub.depth = sub.parent.depth + 1 if not dryrun: session.commit()
def table_io(self, ids, fmt, encrypt=False, sign=False, obj=False, missing=False, ignore=True, required=False, extra=False): from ekklesia.data import objects_equal columns = ('a', 'b', 'c') coltypes = {'a': int, 'b': int, 'c': int} t = DataTable(columns, coltypes=coltypes, gpg=ids['id1'], fileformat=fmt, ignore=ignore, required=required) if fmt in ('json', 'jsondict'): f = {} else: f = StringIO() t.open(f, 'w', receiver if encrypt else False, sign) if obj: t.write(Obj(a=0)) t.write(Obj(a=1)) elif missing: try: t.write({'a': 0, 'b': 2}) assert ignore except: assert not ignore return elif extra: try: t.write({'a': 0, 'b': 2, 'c': 3, 'd': 4}) assert ignore except: assert not ignore return else: for i in range(3): t.write({'a': i, 'b': 2, 'c': 3}) if fmt in ('json', 'jsondict'): f2 = t.close() assert f is f2 else: t.close() f.seek(0) t = DataTable(columns, coltypes=coltypes, gpg=ids['id2'], fileformat=fmt) t.open(f, 'r', encrypt, sender if sign else False) i = 0 for row in t: if obj: assert objects_equal(Obj(**row), Obj(a=i)) else: if missing: assert row == {'a': 0, 'b': 2, 'c': None} else: assert row == {'a': i, 'b': 2, 'c': 3} i += 1 t.close()
def sync_members(self, download=True, upload=True, dryrun=False, input=None, output=None, invitations=None, format='csv'): # format for emails and invitations export from ekklesia.data import DataTable from ekklesia.backends import api_init from six.moves import cStringIO as StringIO import requests, datetime, json session = self.session Department, Member = self.Department, self.Member check_email = self.check_email check_member = self.check_member coltypes = self.member_types.copy() api = api_init(self.member_api._asdict()) if download: # download registered uuids if input: input = json.load(input) else: resp = api.get(self.member_api.url) assert resp.status_code == requests.codes.ok, 'cannot download used uuids' input = resp.json() columns = ['uuid', 'echo'] if check_member: columns.append(check_member) if check_email: columns.append(check_email) reader = DataTable(columns, required=['uuid'], coltypes=coltypes, gpg=self.gpg, dataformat='member', fileformat=self.member_api.format, version=self.version) reader.open(input, 'r', encrypt=self.member_api.encrypt, sign=self.member_api.receiver) columns = list(self.member_sync) wcoltypes = dict(coltypes) if check_member: wcoltypes[check_member] = bool if check_email: wcoltypes[check_email] = bool wcoltypes['department'] = int # replace by ids if 'location' in self.member_sync: wcoltypes['gpslat'] = wcoltypes['gpslng'] = float columns += ['gpslat', 'gpslng'] if download: rcolumns, unknown = reader.get_columns() if unknown: self.warn( UnknownFieldsWarning('ignoring unknown fields %s', unknown)) if check_member and check_member in rcolumns: columns.append(check_member) if check_email and check_email in rcolumns: columns.append(check_email) if 'echo' in rcolumns: columns += ['echo'] encrypt = [self.member_api.receiver ] if self.member_api.encrypt else False encryptmail = [self.email_receiver ] if self.member_api.encrypt else False writer = DataTable(columns, coltypes=wcoltypes, gpg=self.gpg, dataformat='member', fileformat=self.member_api.format, version=self.version) out = {} writer.open(out, 'w', encrypt=encrypt, sign=self.member_api.sign) if self.export_emails: ewriter = DataTable(('uuid', 'email'), coltypes=coltypes, gpg=self.gpg, dataformat='member', fileformat=format, version=self.version) eout = output[2] if output else StringIO() ewriter.open(eout, 'w', encrypt=encryptmail, sign=self.member_api.sign) def export(member, extra={}): if 'location' in self.member_import: gps = self.gps_coord(self.get_location(member)) if gps: extra['gpslat'], extra['gpslng'] = gps dep = self.get_department(member) if dep: extra[ 'department'] = dep.id if self.department_spec == 'number' else dep.name writer.write(member, extra) if self.export_emails: ewriter.write(member) if invitations: registered = {} # dict of registered uuids mquery = session.query(Member) count = 0 check_memberno = 'memberno' in self.member_columns if download: seen = set() for data in reader: uuid = data['uuid'] if not uuid: self.warn("uuid missing") continue if uuid in seen: self.warn("member %s is duplicate" % uuid) continue seen.add(uuid) member = mquery.filter_by(uuid=uuid).first() extra = {} if not member or (check_memberno and not member.memberno): # deleted self.warn("member %s is unknown" % uuid) if check_member in columns and data[check_member]: extra[check_member] = False if check_email in columns and data[check_email]: extra[check_email] = False writer.write(Member(uuid=uuid, status=StatusType.deleted), extra) continue if check_email in columns and data[check_email]: extra[check_email] = member.email == data[check_email] if check_member in columns: if data[check_member]: result = self.check_member_func( member, data[check_member]) if 'registered' in self.member_import: if result and not member.registered: member.registered = datetime.datetime.utcnow() else: result = None extra[check_member] = result if 'echo' in columns: extra['echo'] = data['echo'] if not dryrun: export(member, extra) if invitations: registered[member.uuid] = True count += 1 else: for member in mquery: if check_memberno and not member.memberno: continue # deleted if not dryrun: export(member) count += 1 self.info('%i members exported', count) if not dryrun and 'registered' in self.member_import: session.commit() writer.close() if self.export_emails: ewriter.close() if invitations: iwriter = DataTable(('uuid', 'email'), coltypes=coltypes, gpg=self.gpg, dataformat='member', fileformat=format, version=self.version) # extra encrypt,sign iwriter.open(invitations, 'w', encrypt=encryptmail, sign=self.member_api.sign) if check_memberno: query = session.query(Member.uuid,Member.email,Member.memberno).\ filter(Member.email!=None,Member.memberno!=0) else: query = session.query( Member.uuid, Member.email).filter(Member.email != None) count = 0 for member in query: if member.uuid in registered: continue # skip registered iwriter.write(member) count += 1 iwriter.close() self.info('%i invitations exported', count) if not upload: return dwriter = DataTable(('id', 'name', 'parent', 'depth'), gpg=self.gpg, dataformat='department', fileformat=self.member_api.format, version=self.version) dout = {} dwriter.open(dout, 'w', encrypt=encrypt, sign=self.member_api.sign) for dep in session.query(Department).order_by(Department.depth, Department.id): if dep.parent: extra = dict(parent=dep.parent.id if self.department_spec == 'number' else dep.parent.name) else: extra = {} dwriter.write(dep, extra) dwriter.close() if output: json.dump(out, output[0]) json.dump(dout, output[1]) elif not dryrun: r = api.post(self.member_api.url, json=dict(members=out, departments=dout)) assert r.status_code == requests.codes.ok, 'cannot upload data' if not self.export_emails or output: return from ekklesia.mail import create_mail, smtp_init smtp = smtp_init(self.smtpconfig) smtp.open() self.info('sending email data') msg = create_mail(self.gpgconfig['sender'], self.email_receiver, 'Email data', eout.getvalue()) eout.close() msg, results = self.gpg.encrypt(msg, default_key=True, verify=True, inline=True) assert msg and results, 'error encrypting message' if not dryrun: smtp.send(msg) smtp.close()
def import_invitations(self, input, decrypt=False, verify=False, allfields=False, dryrun=False, format='csv'): from ekklesia.data import DataTable from datetime import datetime session = self.session Invitation = self.Invitation membercls = self.member_class columns = self.invite_columns if membercls: columns.append('uuid') if allfields: reqcolumns = columns elif membercls: reqcolumns.append('uuid') else: reqcolumns = ['uuid', 'email'] reader = DataTable(columns, coltypes=self.invite_types, required=reqcolumns, dataformat='invitation', fileformat=format, version=self.version, gpg=self.gpg) reader.open(input, 'r', encrypt=decrypt, sign=verify) columns, tmp = reader.get_columns() recordchange = 'status' in columns and not 'lastchange' in columns iquery = session.query(Invitation) count = 0 seen = set() for data in reader: uuid = data['uuid'] if not uuid: self.warn("uuid missing") continue assert not uuid in seen, "member %s is duplicate" % uuid seen.add(uuid) if membercls: member = session.query(membercls).filter_by(uuid=uuid).first() if member is None: self.warn("uuid %s not found" % uuid) continue if not member.email: # email removed, disable invitation if member.invitation: self.info( "scheduling invitation for uuid '%s' for deletion", member.uuid) if not dryrun: member.invitation.status = StatusType.deleted member.invitation.lastchange = datetime.utcnow() continue count += 1 if dryrun: continue if member.invitation is None: # create a new invitation session.add(Invitation(member=member, **data)) #new else: if recordchange and data[ 'status'] != member.invitation.status: data['lastchange'] = datetime.utcnow() member.invitation.__init__(**data) #update inv else: inv = iquery.filter_by(uuid=uuid).first() if not data['email']: # email removed, disable invitation if inv is None: self.warn("uuid %s not found" % uuid) continue if inv.status == StatusType.deleted: continue self.info( "scheduling invitation for uuid '%s' for deletion", inv.uuid) if not dryrun: inv.status = StatusType.deleted inv.lastchange = datetime.utcnow() continue # check whether email already used if not inv or inv.email != data['email']: # fixme: what if emails swapped? email = iquery.filter_by(email=data['email']).first() if email and (not inv or inv.uuid != email.uuid): self.error("ignoring: duplicate email %s" % data['email']) continue count += 1 if dryrun: continue if inv: # if email changed and code has been sent, reset invcode and lastchange, unless allfields is set needreset = not allfields and inv.status==StatusType.uploaded and \ inv.sent==SentStatusType.sent and 'email' in data and data['email']!=inv.email and \ (not 'code' in data or data['code']==inv.code) if not needreset and recordchange and data[ 'status'] != inv.status: data['lastchange'] = datetime.utcnow() inv.__init__(**data) #update inv if needreset: inv.reset() else: session.add(Invitation(**data)) #new self.info('%i imported invitations', count) if not dryrun: session.commit()
def test_table_bad_open(self): t = DataTable(('a',)) with raises(AssertionError): t.open(StringIO(),'x')
def export_members(self,output,encrypt=None,sign=False,allfields=False,format='csv'): """export data, sorted by primary (uuid, or id with member_no), to output. allfields is used for backup and writes all columns. without allfields, data for the invitation DB is generated. output is [members,departments] if allfields, else just [members]. encrypt=to io_key, sign=with default key. """ from ekklesia.data import DataTable session = self.session Department, Member = self.Department, self.Member if allfields: columns = list(self.member_columns)+['departments'] if not self.member_no: columns.remove('id') dataformat = 'member' else: columns = ('uuid','email') dataformat = 'invitation' writer = DataTable(columns,coltypes=self.member_types,gpg=self.gpg, dataformat=dataformat,fileformat=format,version=self.version) if encrypt: encrypt = [self.io_key] writer.open(output[0],'w',encrypt=encrypt,sign=sign) count = 0 if allfields: query = session.query(Member).order_by(Member.id if self.member_no else Member.uuid) else: query = session.query(Member.uuid,Member.email,Member.registered).order_by(Member.uuid) if 'registered' in self.member_import: query = query.filter_by(registered=None) # don't export registered extra = {} for member in query.yield_per(1000): if allfields and member.departments: # FIXME: use get_department? if self.department_spec=='number': depids = [department.id for department in member.departments] else: depids = [department.name for department in member.departments] extra = dict(departments=depids) writer.write(member,extra) count += 1 writer.close() self.info('%i exported members', count) if not allfields: return dwriter = DataTable(['id','name','parent','depth'],gpg=self.gpg, dataformat='department',fileformat=format,version=self.version) dwriter.open(output[1],'w',encrypt=encrypt,sign=sign) count = 0 for dep in session.query(Department).order_by(Department.depth,Department.id).yield_per(1000): if dep.parent: extra = dict(parent=dep.parent.id if self.department_spec=='number' else dep.parent.name) else: extra = {} dwriter.write(dep,extra) count += 1 dwriter.close() self.info('%i exported departments', count)
def test_table_bad_write(self): t, f = DataTable(('a',)), StringIO() t.open(f,'w') t.write({'a':0}) t.close() f.seek(0) t = DataTable(('a',)) t.open(f,'r') with raises(AssertionError): t.write({'a':1})
def update_members(members, departments, crypto=True): from ekklesia.data import DataTable from idapi.mails import gnupg_import_init from accounts.models import Account, NestedGroup, Invitation import six if crypto: gpg, verify, decrypt, sign, encrypt = api_crypto('members') if crypto is True: crypto = gpg # not debug else: verify = decrypt = sign = encrypt = False deldeps = set( NestedGroup.objects.exclude(syncid=None).values_list('syncid', flat=True)) reader = DataTable(['id', 'parent', 'name', 'depth'], gpg=crypto, remap={'id': 'syncid'}, dataformat='department', fileformat='json', version=[1, 0]) reader.open(departments, 'r', encrypt=decrypt, sign=verify) newdeps = set() # check duplicate deps = [] # FIXME also work with name ids for dep in reader: syncid = dep['syncid'] if syncid in newdeps: raise KeyError newdeps.add(syncid) deps.append(dep) # check whether all parents exist for dep in deps: parent = dep['parent'] if parent and not parent in newdeps: #print parent,'missing' raise KeyError # parent missing required = ['uuid', 'status'] coltypes = dict(uuid=str, status=str, verified=bool, departments=(int, )) twofactor = settings.TWO_FACTOR_SIGNUP if twofactor: coltypes['activate'] = str required.append('activate') reader = DataTable(coltypes=coltypes, required=required, gpg=crypto, dataformat='member', fileformat='json', version=[1, 0]) reader.open(members, 'r', encrypt=decrypt, sign=verify) newmembers = set() # check duplicate stati = { 'deleted': Account.DELETED, 'member': Account.MEMBER, 'eligible': Account.ELIGIBLE } data = [] for member in reader: uuid = member['uuid'] if uuid in newmembers: raise KeyError newmembers.add(uuid) status = stati.get(member['status']) assert not status is None, "invalid status" member['status'] = status member['is_active'] = status != Account.DELETED data.append(member) done = set() while len(deps): todo = [] # update from root to leaves for dep in deps: dep['level'] = dep.pop('depth') parent = dep.pop('parent') if parent: if not parent in done: todo.append(dep) continue parent = NestedGroup.objects.get(syncid=parent) done.add(dep['syncid']) try: obj = NestedGroup.objects.get(syncid=dep['syncid']) except NestedGroup.DoesNotExist: if parent: parent.add_child(**dep) else: NestedGroup.add_root(**dep) continue deldeps.discard(obj.syncid) # keep later changed = False for k, v in six.iteritems(dep): if not changed and getattr(obj, k) == v: continue setattr(obj, k, v) changed = True if changed: obj.save() move = False if obj.is_root(): move = parent elif not parent: # child becomes root obj.move(obj.get_root(), 'last-sibling') else: move = obj.parent != parent if move: obj.move(parent, 'last-child') obj = NestedGroup.objects.get(pk=obj.pk) deps = todo reg_uuids, fail_uuids = [], [] for member in data: uuid = member['uuid'] try: obj = Account.objects.get(uuid=uuid) except: continue # Account.DoesNotExist try: if obj.email_unconfirmed: continue # ignore unconfirmed email except AttributeError: pass if obj.status == Account.NEWMEMBER: try: inv = Invitation.objects.get(uuid=uuid) except Invitation.DoesNotExist: inv = None assert inv and inv.status == Invitation.REGISTERING, "invalid state for newmember" if member['status'] == Account.DELETED: obj.delete() inv.delete() continue # check whether activation failed or to be deleted if twofactor and member['activate'] != True: inv.status = Invitation.FAILED inv.save() obj.delete() fail_uuids.append(uuid) continue inv.status = Invitation.REGISTERED inv.save() member['is_active'] = True reg_uuids.append(uuid) if twofactor: del member['activate'] changed = False if 'departments' in member: deps = member['departments'] del member['departments'] # server-only groups are not changed by the sync indep_groups = obj.nested_groups.filter(syncid=None) # union existing ngroups + dep obj.nested_groups = indep_groups | NestedGroup.objects.filter( syncid__in=deps) changed = True for k, v in six.iteritems(member): if not changed and getattr(obj, k) == v: continue setattr(obj, k, v) changed = True if changed: obj.save() NestedGroup.objects.filter(syncid__in=deldeps).delete() from accounts.models import notify_registration if reg_uuids: notify_registration(status='registered', uuid=reg_uuids) if fail_uuids: notify_registration(status='failed', uuid=fail_uuids) return 'ok'
def table_io(self,fmt,obj=False, missing=False,ignore=True,required=False,extra=False): from ekklesia.data import objects_equal columns = ('a','b','c') coltypes = {'a':int,'b':int,'c':(int,)} t = DataTable(columns,coltypes=coltypes,fileformat=fmt,ignore=ignore,required=required) if fmt in ('json','jsondict'): f = {} else: f = StringIO() t.open(f,'w') if obj: t.write(Obj(a=0)) t.write(Obj(a=1)) elif missing: try: t.write({'a':0,'b':2}) assert ignore except: assert not ignore return elif extra: try: t.write({'a':0,'b':2,'c':[3,4],'d':4}) assert ignore except: assert not ignore return else: for i in range(3): t.write({'a':i,'b':2,'c':[3,4]}) if fmt in ('json','jsondict'): f2 = t.close() assert f is f2 else: t.close() f.seek(0) t = DataTable(columns,coltypes=coltypes,fileformat=fmt) t.open(f,'r') i = 0 for row in t: if obj: assert objects_equal(Obj(**row),Obj(a=i)) else: if missing: assert row == {'a':0,'b':2,'c':[]} else: assert row == {'a':i,'b':2,'c':[3,4]} i+=1 t.close()
def test_table_bad_init(self): with raises(AssertionError): t = DataTable(('a', ), fileformat='bad')
def import_invitations(self,input,decrypt=False,verify=False, allfields=False,sync=False,dryrun=False,format='csv'): """import data from input. allfields is used for restore and requires all columns. if sync, uuids not seen in input are set to status deleted. decrypt=with the default key, verify=check whether its signed with io_key. """ from ekklesia.data import DataTable session = self.session Invitation = self.Invitation membercls = self.member_class columns = self.invite_columns if membercls: columns = list(columns)+['uuid'] columns.remove('id') if allfields: reqcolumns = columns elif membercls: reqcolumns = ['uuid'] else: reqcolumns = ['uuid','email'] reader = DataTable(columns,coltypes=self.invite_types,required=reqcolumns, dataformat='invitation',fileformat=format,version=self.version,gpg=self.gpg) if not allfields and verify: verify = self.io_key reader.open(input,'r',encrypt=decrypt,sign=verify) columns = reader.get_columns()[0] iquery = session.query(Invitation) count = 0 seen = set() for data in reader: uuid = data['uuid'] if not uuid: self.warn("uuid missing") continue assert not uuid in seen, "member %s is duplicate" % uuid seen.add(uuid) if membercls: member = session.query(membercls).filter_by(uuid=uuid).first() if member is None: self.warn("uuid %s not found" % uuid) continue if not member.email: # email removed, disable invitation inv = member.invitation if inv and not inv.status in (IStatusType.deleted,IStatusType.registered): self.info("scheduling invitation for uuid '%s' for deletion", member.uuid) if not dryrun: inv.delete() continue count += 1 if dryrun: continue if member.invitation is None: # create a new invitation session.add(Invitation(member=member,**data)) #new else: if not 'sent' in columns and data['status'] in (IStatusType.new,IStatusType.uploaded): data['sent'] = ISentStatusType.unsent member.invitation.update(**data) #update inv else: inv = iquery.filter_by(uuid=uuid).first() if not data['email']: # email removed, disable invitation if inv is None: self.warn("uuid %s not found" % uuid) continue if inv.status in (IStatusType.deleted,IStatusType.registered): continue self.info("scheduling invitation for uuid '%s' for deletion", inv.uuid) if not dryrun: inv.delete() continue # check whether email already used if not inv or inv.email != data['email']: # fixme: what if emails swapped? email = iquery.filter_by(email=data['email']).first() if email and (not inv or inv.uuid != email.uuid): self.error("ignoring: duplicate email %s" % data['email']) continue count += 1 if dryrun: continue if inv: # if email changed and code has been sent, reset invcode and lastchange, unless allfields is set needreset = not allfields and inv.status==IStatusType.uploaded and \ inv.sent==ISentStatusType.sent and 'email' in data and data['email']!=inv.email and \ (not 'code' in data or data['code']==inv.code) if not needreset: data['code'] = inv.code # preserve inv.update(**data) if needreset: inv.reset() else: session.add(Invitation(**data)) #new self.info('%i imported invitations', count) if sync: # deleted unseen invitations count = 0 for inv in session.query(membercls if membercls else Invitation).yield_per(1000): uuid = inv.uuid if uuid in seen: continue if membercls: inv = inv.invitation if not inv: continue if inv.status==IStatusType.deleted: continue inv.status = IStatusType.deleted self.info("invitation %s deleted" % uuid) count += 1 self.info('%i deleted invitations', count) if not dryrun: session.commit()
def test_table_bad_open(self): t = DataTable(('a', )) with raises(AssertionError): t.open(StringIO(), 'x')
def sync_invitations(self,download=True,upload=True,dryrun=False,quick=False,input=None,output=None): "sync invitations with ID server" from ekklesia.backends import api_init from ekklesia.data import DataTable from six.moves import cStringIO as StringIO import requests, json session = self.session Invitation = self.Invitation membercls = self.member_class check_email = self.invite_check_email api = api_init(self.invite_api._asdict()) reply = False # whether server requested reply if download: # download registered uuids(used codes), mark used if input: input = json.load(input) if not input: # pragma: no cover url = self.invite_api.url if quick: url+='?changed=1' resp = api.get(url) if resp.status_code != requests.codes.ok: if self.debugging: open('invdown.html','w').write(resp.content) assert False, 'cannot download used invite codes' input = resp.json() if not input: self.warn("input is empty") return columns = ['uuid','status','echo'] if check_email: columns.append(check_email) reader = DataTable(columns,coltypes=self.invite_types,required=('uuid','status'),gpg=self.gpg, dataformat='invitation',fileformat=self.invite_api.format,version=self.version) sign = self.invite_api.receiver if self.invite_api.sign else False reader.open(input,'r',encrypt=self.invite_api.encrypt,sign=sign) rcolumns, unknown = reader.get_columns() if unknown: self.warn('ignoring unknown fields',unknown) reply = 'echo' in rcolumns # reply? if check_email: reply = reply or check_email in rcolumns if upload: # upload responses and non-uploaded,unused uuid&code columns = ['uuid','code','status'] coltypes = self.invite_types.copy() if check_email: coltypes[check_email] = bool if download and reply: if check_email and check_email in rcolumns: columns.append(check_email) if 'echo' in rcolumns: columns.append('echo') writer = DataTable(columns,coltypes=coltypes,gpg=self.gpg, dataformat='invitation',fileformat=self.invite_api.format,version=self.version) encrypt = [self.invite_api.receiver] if self.invite_api.encrypt else False out = {} writer.open(out,'w',encrypt=encrypt,sign=self.invite_api.sign) if download: # process download and generate reply if membercls: query = session.query(membercls) else: query = session.query(Invitation) count = 0 seen = set() for data in reader: # only uploaded codes, reply optional uuid = data['uuid'] if not uuid: self.warn("uuid missing") continue if uuid in seen: self.warn("member %s is duplicate" % uuid) continue seen.add(uuid) status = data['status'] if not status in ('registered','failed','new') or (quick and status=='new'): self.warn("invalid status %s for %s" % (status,uuid)) continue inv = query.filter_by(uuid=uuid).first() extra = {} if membercls and inv: inv = inv.invitation if not inv: self.error("member %s is unknown" % data['uuid']) if check_email in columns and data[check_email]: extra[check_email] = False extra['uuid'] = uuid writer.write(Invitation(status=IStatusType.deleted,code=''),extra) continue status = data['status'] # compare status # new on new -> uploaded # new on uploaded -> ignore # registered/failed on uploaded -> registered/failed # registered/failed on same -> ignore # deleted on failed -> new if status == IStatusType.new: if inv.status == IStatusType.new: inv.status = IStatusType.uploaded inv.sent = ISentStatusType.unsent elif inv.status != IStatusType.uploaded: self.error("bad status %s for uuid %s, current %s", status,data['uuid'],inv.status) continue elif inv.status == IStatusType.uploaded: # status in registered/failed if inv.status != status: inv.change() inv.status = status # upload confirmed or failed registration inv.sent = ISentStatusType.unsent elif status != inv.status: self.error("bad status %s for uuid %s, current %s", status, data['uuid'],inv.status) continue if upload and (status != IStatusType.new or reply): # write response for uploaded if check_email and check_email in columns: if member_class: extra[check_email] = inv.member.email == data[check_email] else: extra[check_email] = inv.email == data[check_email] if 'echo' in columns: extra['echo'] = data['echo'] if membercls: extra['uuid'] = data['uuid'] writer.write(inv,extra) count += 1 self.info('%i codes used', count) if not dryrun: session.commit() if not upload: return # process failed, which have already been deleted on the server and are ready for reset count = 0 query = session.query(Invitation).filter_by(status=IStatusType.failed, sent=ISentStatusType.sent) for inv in query.yield_per(1000): extra = {} if membercls: uuid = inv.member.uuid else: uuid = inv.uuid if uuid in seen: continue # already replied inv.reset() count += 1 self.info('%i codes resetted', count) if not dryrun: session.commit() if not quick: # append new invitations count = 0 query = session.query(Invitation).filter_by(status=IStatusType.new) for inv in query.yield_per(1000): extra = {} if membercls: uuid = inv.member.uuid extra['uuid'] = uuid else: uuid = inv.uuid writer.write(inv,extra) count += 1 self.info('%i new codes uploaded', count) writer.close() if output: json.dump(out,output) elif not dryrun: # pragma: no cover resp = api.post(self.invite_api.url,json=out) if resp.status_code != requests.codes.ok: if self.debugging: open('invup.html','w').write(resp.content) assert False, 'cannot upload data'
def test_table_bad_read(self): t = DataTable(('a', )) t.open(StringIO(), 'w') with raises(AssertionError): for row in t: pass