Ejemplo n.º 1
0
 def import_invitations(self,input,decrypt=False,verify=False,
         allfields=False,sync=False,dryrun=False,format='csv'):
     """import data from input.
     allfields is used for restore and requires all columns.
     if sync, uuids not seen in input are set to status deleted.
     decrypt=with the default key, verify=check whether its signed with io_key.
     """
     from ekklesia.data import DataTable
     session = self.session
     Invitation = self.Invitation
     membercls = self.member_class
     columns = self.invite_columns
     if membercls:
         columns = list(columns)+['uuid']
         columns.remove('id')
     if allfields: reqcolumns = columns
     elif membercls: reqcolumns = ['uuid']
     else: reqcolumns = ['uuid','email']
     reader = DataTable(columns,coltypes=self.invite_types,required=reqcolumns,
         dataformat='invitation',fileformat=format,version=self.version,gpg=self.gpg)
     if not allfields and verify: verify = self.io_key
     reader.open(input,'r',encrypt=decrypt,sign=verify)
     columns = reader.get_columns()[0]
     iquery = session.query(Invitation)
     count = 0
     seen = set()
     for data in reader:
         uuid = data['uuid']
         if not uuid:
             self.warn("uuid missing")
             continue
         assert not uuid in seen, "member %s is duplicate" % uuid
         seen.add(uuid)
         if membercls:
             member = session.query(membercls).filter_by(uuid=uuid).first()
             if member is None:
                 self.warn("uuid %s not found" % uuid)
                 continue
             if not member.email: # email removed, disable invitation
                 inv = member.invitation
                 if inv and not inv.status in (IStatusType.deleted,IStatusType.registered):
                     self.info("scheduling invitation for uuid '%s' for deletion", member.uuid)
                     if not dryrun: inv.delete()
                 continue
             count += 1
             if dryrun: continue
             if member.invitation is None: # create a new invitation
                 session.add(Invitation(member=member,**data)) #new
             else:
                 if not 'sent' in columns and data['status'] in (IStatusType.new,IStatusType.uploaded):
                     data['sent'] = ISentStatusType.unsent
                 member.invitation.update(**data) #update inv
         else:
             inv = iquery.filter_by(uuid=uuid).first()
             if not data['email']: # email removed, disable invitation
                 if inv is None:
                     self.warn("uuid %s not found" % uuid)
                     continue
                 if inv.status in (IStatusType.deleted,IStatusType.registered): continue
                 self.info("scheduling invitation for uuid '%s' for deletion", inv.uuid)
                 if not dryrun: inv.delete()
                 continue
             # check whether email already used
             if not inv or inv.email != data['email']:
                 # fixme: what if emails swapped?
                 email = iquery.filter_by(email=data['email']).first()
                 if email and (not inv or inv.uuid != email.uuid):
                     self.error("ignoring: duplicate email %s" % data['email'])
                     continue
             count += 1
             if dryrun: continue
             if inv:
                 # if email changed and code has been sent, reset invcode and lastchange, unless allfields is set
                 needreset = not allfields and inv.status==IStatusType.uploaded and \
                      inv.sent==ISentStatusType.sent and 'email' in data and data['email']!=inv.email and \
                      (not 'code' in data or data['code']==inv.code)
                 if not needreset: data['code'] = inv.code # preserve
                 inv.update(**data)
                 if needreset: inv.reset()
             else:
                 session.add(Invitation(**data)) #new
     self.info('%i imported invitations', count)
     if sync: # deleted unseen invitations
         count = 0
         for inv in session.query(membercls if membercls else Invitation).yield_per(1000):
             uuid = inv.uuid
             if uuid in seen: continue
             if membercls:
                 inv = inv.invitation
                 if not inv: continue
             if inv.status==IStatusType.deleted: continue
             inv.status = IStatusType.deleted
             self.info("invitation %s deleted" % uuid)
             count += 1
         self.info('%i deleted invitations', count)
     if not dryrun: session.commit()
Ejemplo n.º 2
0
 def sync_invitations(self,download=True,upload=True,dryrun=False,quick=False,input=None,output=None):
     "sync invitations with ID server"
     from ekklesia.backends import api_init
     from ekklesia.data import DataTable
     from six.moves import cStringIO as StringIO
     import requests, json
     session = self.session
     Invitation = self.Invitation
     membercls = self.member_class
     check_email = self.invite_check_email
     api = api_init(self.invite_api._asdict())
     reply = False # whether server requested reply
     if download: # download registered uuids(used codes), mark used
         if input: input = json.load(input)
         if not input: # pragma: no cover
             url = self.invite_api.url
             if quick: url+='?changed=1'
             resp = api.get(url)
             if resp.status_code != requests.codes.ok:
                 if self.debugging: open('invdown.html','w').write(resp.content)
                 assert False, 'cannot download used invite codes'
             input = resp.json()
         if not input:
             self.warn("input is empty")
             return
         columns = ['uuid','status','echo']
         if check_email: columns.append(check_email)
         reader = DataTable(columns,coltypes=self.invite_types,required=('uuid','status'),gpg=self.gpg,
             dataformat='invitation',fileformat=self.invite_api.format,version=self.version)
         sign = self.invite_api.receiver if self.invite_api.sign else False
         reader.open(input,'r',encrypt=self.invite_api.encrypt,sign=sign)
         rcolumns, unknown = reader.get_columns()
         if unknown: self.warn('ignoring unknown fields',unknown)
         reply = 'echo' in rcolumns # reply?
         if check_email: reply = reply or check_email in rcolumns
     if upload:
         # upload responses and non-uploaded,unused uuid&code
         columns = ['uuid','code','status']
         coltypes = self.invite_types.copy()
         if check_email: coltypes[check_email] = bool
         if download and reply:
             if check_email and check_email in rcolumns: columns.append(check_email)
             if 'echo' in rcolumns: columns.append('echo')
         writer = DataTable(columns,coltypes=coltypes,gpg=self.gpg,
                 dataformat='invitation',fileformat=self.invite_api.format,version=self.version)
         encrypt = [self.invite_api.receiver] if self.invite_api.encrypt else False
         out = {}
         writer.open(out,'w',encrypt=encrypt,sign=self.invite_api.sign)
     if download: # process download and generate reply
         if membercls: query = session.query(membercls)
         else: query = session.query(Invitation)
         count = 0
         seen = set()
         for data in reader: # only uploaded codes, reply optional
             uuid = data['uuid']
             if not uuid:
                 self.warn("uuid missing")
                 continue
             if uuid in seen:
                 self.warn("member %s is duplicate" % uuid)
                 continue
             seen.add(uuid)
             status = data['status']
             if not status in ('registered','failed','new') or (quick and status=='new'):
                 self.warn("invalid status %s for %s" % (status,uuid))
                 continue
             inv = query.filter_by(uuid=uuid).first()
             extra = {}
             if membercls and inv:
                 inv = inv.invitation
             if not inv:
                 self.error("member %s is unknown" % data['uuid'])
                 if check_email in columns and data[check_email]:
                     extra[check_email] = False
                 extra['uuid'] = uuid
                 writer.write(Invitation(status=IStatusType.deleted,code=''),extra)
                 continue
             status = data['status'] # compare status
             # new on new -> uploaded
             # new on uploaded -> ignore
             # registered/failed on uploaded -> registered/failed
             # registered/failed on same -> ignore
             # deleted on failed -> new
             if status == IStatusType.new:
                 if inv.status == IStatusType.new:
                     inv.status = IStatusType.uploaded
                     inv.sent = ISentStatusType.unsent
                 elif inv.status != IStatusType.uploaded:
                     self.error("bad status %s for uuid %s, current %s",
                          status,data['uuid'],inv.status)
                     continue
             elif inv.status == IStatusType.uploaded: # status in registered/failed
                 if inv.status != status: inv.change()
                 inv.status = status # upload confirmed or failed registration
                 inv.sent = ISentStatusType.unsent
             elif status != inv.status:
                 self.error("bad status %s for uuid %s, current %s",
                     status, data['uuid'],inv.status)
                 continue
             if upload and (status != IStatusType.new or reply): # write response for uploaded
                 if check_email and check_email in columns:
                     if member_class: extra[check_email] = inv.member.email == data[check_email]
                     else: extra[check_email] = inv.email == data[check_email]
                 if 'echo' in columns: extra['echo'] = data['echo']
                 if membercls: extra['uuid'] = data['uuid']
                 writer.write(inv,extra)
             count += 1
         self.info('%i codes used', count)
         if not dryrun: session.commit()
     if not upload: return
     # process failed, which have already been deleted on the server and are ready for reset
     count = 0
     query = session.query(Invitation).filter_by(status=IStatusType.failed,
         sent=ISentStatusType.sent)
     for inv in query.yield_per(1000):
         extra = {}
         if membercls: uuid = inv.member.uuid
         else: uuid = inv.uuid
         if uuid in seen: continue # already replied
         inv.reset()
         count += 1
     self.info('%i codes resetted', count)
     if not dryrun: session.commit()
     if not quick:
         # append new invitations
         count = 0
         query = session.query(Invitation).filter_by(status=IStatusType.new)
         for inv in query.yield_per(1000):
             extra = {}
             if membercls:
                 uuid = inv.member.uuid
                 extra['uuid'] = uuid
             else: uuid = inv.uuid
             writer.write(inv,extra)
             count += 1
         self.info('%i new codes uploaded', count)
     writer.close()
     if output:
         json.dump(out,output)
     elif not dryrun: # pragma: no cover
         resp = api.post(self.invite_api.url,json=out)
         if resp.status_code != requests.codes.ok:
             if self.debugging: open('invup.html','w').write(resp.content)
             assert False, 'cannot upload data'
Ejemplo n.º 3
0
    def sync_members(self,
                     download=True,
                     upload=True,
                     dryrun=False,
                     input=None,
                     output=None,
                     invitations=None,
                     format='csv'):
        # format for emails and invitations export
        from ekklesia.data import DataTable
        from ekklesia.backends import api_init
        from six.moves import cStringIO as StringIO
        import requests, datetime, json
        session = self.session
        Department, Member = self.Department, self.Member
        check_email = self.check_email
        check_member = self.check_member
        coltypes = self.member_types.copy()
        api = api_init(self.member_api._asdict())
        if download:  # download registered uuids
            if input: input = json.load(input)
            else:
                resp = api.get(self.member_api.url)
                assert resp.status_code == requests.codes.ok, 'cannot download used uuids'
                input = resp.json()
            columns = ['uuid', 'echo']
            if check_member: columns.append(check_member)
            if check_email: columns.append(check_email)
            reader = DataTable(columns,
                               required=['uuid'],
                               coltypes=coltypes,
                               gpg=self.gpg,
                               dataformat='member',
                               fileformat=self.member_api.format,
                               version=self.version)
            reader.open(input,
                        'r',
                        encrypt=self.member_api.encrypt,
                        sign=self.member_api.receiver)
        columns = list(self.member_sync)
        wcoltypes = dict(coltypes)
        if check_member: wcoltypes[check_member] = bool
        if check_email: wcoltypes[check_email] = bool
        wcoltypes['department'] = int  # replace by ids
        if 'location' in self.member_sync:
            wcoltypes['gpslat'] = wcoltypes['gpslng'] = float
            columns += ['gpslat', 'gpslng']
        if download:
            rcolumns, unknown = reader.get_columns()
            if unknown:
                self.warn(
                    UnknownFieldsWarning('ignoring unknown fields %s',
                                         unknown))
            if check_member and check_member in rcolumns:
                columns.append(check_member)
            if check_email and check_email in rcolumns:
                columns.append(check_email)
            if 'echo' in rcolumns: columns += ['echo']
        encrypt = [self.member_api.receiver
                   ] if self.member_api.encrypt else False
        encryptmail = [self.email_receiver
                       ] if self.member_api.encrypt else False
        writer = DataTable(columns,
                           coltypes=wcoltypes,
                           gpg=self.gpg,
                           dataformat='member',
                           fileformat=self.member_api.format,
                           version=self.version)
        out = {}
        writer.open(out, 'w', encrypt=encrypt, sign=self.member_api.sign)

        if self.export_emails:
            ewriter = DataTable(('uuid', 'email'),
                                coltypes=coltypes,
                                gpg=self.gpg,
                                dataformat='member',
                                fileformat=format,
                                version=self.version)
            eout = output[2] if output else StringIO()
            ewriter.open(eout,
                         'w',
                         encrypt=encryptmail,
                         sign=self.member_api.sign)

        def export(member, extra={}):
            if 'location' in self.member_import:
                gps = self.gps_coord(self.get_location(member))
                if gps: extra['gpslat'], extra['gpslng'] = gps
            dep = self.get_department(member)
            if dep:
                extra[
                    'department'] = dep.id if self.department_spec == 'number' else dep.name
            writer.write(member, extra)
            if self.export_emails: ewriter.write(member)

        if invitations: registered = {}  # dict of registered uuids

        mquery = session.query(Member)
        count = 0
        check_memberno = 'memberno' in self.member_columns
        if download:
            seen = set()
            for data in reader:
                uuid = data['uuid']
                if not uuid:
                    self.warn("uuid missing")
                    continue
                if uuid in seen:
                    self.warn("member %s is duplicate" % uuid)
                    continue
                seen.add(uuid)
                member = mquery.filter_by(uuid=uuid).first()
                extra = {}
                if not member or (check_memberno
                                  and not member.memberno):  # deleted
                    self.warn("member %s is unknown" % uuid)
                    if check_member in columns and data[check_member]:
                        extra[check_member] = False
                    if check_email in columns and data[check_email]:
                        extra[check_email] = False
                    writer.write(Member(uuid=uuid, status=StatusType.deleted),
                                 extra)
                    continue
                if check_email in columns and data[check_email]:
                    extra[check_email] = member.email == data[check_email]
                if check_member in columns:
                    if data[check_member]:
                        result = self.check_member_func(
                            member, data[check_member])
                        if 'registered' in self.member_import:
                            if result and not member.registered:
                                member.registered = datetime.datetime.utcnow()
                    else:
                        result = None
                    extra[check_member] = result
                if 'echo' in columns: extra['echo'] = data['echo']
                if not dryrun: export(member, extra)
                if invitations: registered[member.uuid] = True
                count += 1
        else:
            for member in mquery:
                if check_memberno and not member.memberno: continue  # deleted
                if not dryrun: export(member)
                count += 1
        self.info('%i members exported', count)
        if not dryrun and 'registered' in self.member_import: session.commit()

        writer.close()
        if self.export_emails: ewriter.close()

        if invitations:
            iwriter = DataTable(('uuid', 'email'),
                                coltypes=coltypes,
                                gpg=self.gpg,
                                dataformat='member',
                                fileformat=format,
                                version=self.version)
            # extra encrypt,sign
            iwriter.open(invitations,
                         'w',
                         encrypt=encryptmail,
                         sign=self.member_api.sign)
            if check_memberno:
                query = session.query(Member.uuid,Member.email,Member.memberno).\
                    filter(Member.email!=None,Member.memberno!=0)
            else:
                query = session.query(
                    Member.uuid, Member.email).filter(Member.email != None)
            count = 0
            for member in query:
                if member.uuid in registered: continue  # skip registered
                iwriter.write(member)
                count += 1
            iwriter.close()
            self.info('%i invitations exported', count)

        if not upload: return

        dwriter = DataTable(('id', 'name', 'parent', 'depth'),
                            gpg=self.gpg,
                            dataformat='department',
                            fileformat=self.member_api.format,
                            version=self.version)
        dout = {}
        dwriter.open(dout, 'w', encrypt=encrypt, sign=self.member_api.sign)
        for dep in session.query(Department).order_by(Department.depth,
                                                      Department.id):
            if dep.parent:
                extra = dict(parent=dep.parent.id if self.department_spec ==
                             'number' else dep.parent.name)
            else:
                extra = {}
            dwriter.write(dep, extra)
        dwriter.close()

        if output:
            json.dump(out, output[0])
            json.dump(dout, output[1])
        elif not dryrun:
            r = api.post(self.member_api.url,
                         json=dict(members=out, departments=dout))
            assert r.status_code == requests.codes.ok, 'cannot upload data'

        if not self.export_emails or output: return

        from ekklesia.mail import create_mail, smtp_init
        smtp = smtp_init(self.smtpconfig)
        smtp.open()
        self.info('sending email data')
        msg = create_mail(self.gpgconfig['sender'], self.email_receiver,
                          'Email data', eout.getvalue())
        eout.close()
        msg, results = self.gpg.encrypt(msg,
                                        default_key=True,
                                        verify=True,
                                        inline=True)
        assert msg and results, 'error encrypting message'
        if not dryrun: smtp.send(msg)
        smtp.close()
Ejemplo n.º 4
0
 def sync_invitations(self,
                      download=True,
                      upload=True,
                      dryrun=False,
                      quick=False,
                      input=None,
                      output=None):
     """sync invitations with ID server"""
     from ekklesia.backends import api_init
     from ekklesia.data import DataTable
     from six.moves import cStringIO as StringIO
     import requests, json
     session = self.session
     Invitation = self.Invitation
     membercls = self.member_class
     check_email = self.invite_check_email
     api = api_init(self.invite_api._asdict())
     reply = False  # whether server requested reply
     if download:  # download registered/failed/verified uuids(used codes), mark used
         if input: input = json.load(input)
         if not input:  # pragma: no cover
             url = self.invite_api.url
             if quick: url += '?changed=1'
             resp = api.get(url)
             if resp.status_code != requests.codes.ok:
                 if self.debugging:
                     open('invdown.html', 'w').write(resp.content)
                 assert False, 'cannot download used invite codes'
             input = resp.json()  # only json?
         if not input:
             self.warn("input is empty")
             return
         columns = ['uuid', 'status', 'code', 'echo']
         if check_email: columns.append(check_email)
         reader = DataTable(columns,
                            coltypes=self.invite_types,
                            required=('uuid', 'status', 'code'),
                            gpg=self.gpg,
                            dataformat='invitation',
                            fileformat=self.invite_api.format,
                            version=self.version)
         sign = self.invite_api.receiver if self.invite_api.sign else False
         reader.open(input, 'r', encrypt=self.invite_api.encrypt, sign=sign)
         rcolumns, unknown = reader.get_columns()
         if unknown: self.warn('ignoring unknown fields', unknown)
         reply = 'echo' in rcolumns  # reply?
         if check_email: reply = reply or check_email in rcolumns
     if upload:
         # upload responses and non-uploaded,unused uuid&code
         columns = ['uuid', 'code', 'status']
         coltypes = self.invite_types.copy()
         if check_email: coltypes[check_email] = bool
         if download and reply:
             if check_email and check_email in rcolumns:
                 columns.append(check_email)
             if 'echo' in rcolumns: columns.append('echo')
         writer = DataTable(columns,
                            coltypes=coltypes,
                            gpg=self.gpg,
                            dataformat='invitation',
                            fileformat=self.invite_api.format,
                            version=self.version)
         encrypt = [self.invite_api.receiver
                    ] if self.invite_api.encrypt else False
         out = {}
         writer.open(out, 'w', encrypt=encrypt, sign=self.invite_api.sign)
     if download:  # process download and generate reply
         if membercls: query = session.query(membercls)
         else: query = session.query(Invitation)
         count = 0
         seen = set()
         for data in reader:  # only uploaded codes, reply optional
             uuid = data['uuid']
             if not uuid:
                 self.warn("uuid missing")
                 continue
             if uuid in seen:
                 self.warn("member %s is duplicate" % uuid)
                 continue
             seen.add(uuid)
             valid = ('registered', 'failed', 'verified', 'reset')
             if not quick: valid += ('new', 'verify')
             status = data['status']
             if not status in valid:
                 self.warn("invalid status %s for %s" % (status, uuid))
                 continue
             inv = query.filter_by(uuid=uuid).first()
             extra = {}
             if membercls and inv:
                 inv = inv.invitation
             if not inv:
                 self.error("member %s is unknown" % data['uuid'])
                 if check_email in columns and data[check_email]:
                     extra[check_email] = False
                 extra['uuid'] = uuid  # works also for membercls
                 writer.write(
                     Invitation(status=IStatusType.deleted, code=''), extra)
                 continue
             """compare status and inv.status
             sync state transitions:
             backend,idserver -> target
             new,-         -> idserver:new
             new,new       -> idserver:new, backend:uploaded
             verify,registered/- -> idserver:verify
             -,*           -> backend:deleted,error
             uploaded,new/registering -> idserver:no response, backend:uploaded/ignore
             uploaded,registered/failed -> backend:registered/failed
             uploaded_verify,verify -> idserver:no response, backend:uploaded_verify/ignore
             uploaded_verify,verified -> backend:verified
             registered,registered -> backend:registered, idserver:delete
             failed,failed -> backend:failed, idserver:delete (or new,new)
             new/uploaded,reset -> backend:new, idserver:new
             verify/uploaded_verify,reset -> backend:verify, idserver:verify
             failed,-      -> backend:new (check if fail not downloaded)
             """
             if status == IStatusType.new:
                 if inv.status == IStatusType.new:
                     if inv.code == data['code']:
                         # code upload confirmed, prepare for sending
                         inv.status = IStatusType.uploaded
                         inv.sent = ISentStatusType.unsent
                     else:
                         # mismatch, new code needs to be uploaded
                         self.info(
                             "updating old code %s for uuid %s, new %s",
                             data['code'], data['uuid'], inv.code)
                         # write
                 elif inv.status != IStatusType.uploaded:  # ignore with uploaded
                     self.error("bad status %s for uuid %s, current %s",
                                status, data['uuid'], inv.status)
                     continue
             elif status == IStatusType.verify:
                 if inv.status == IStatusType.verify:
                     if inv.code == data['code']:
                         # code upload confirmed, prepare for sending
                         inv.status = IStatusType.uploaded_verify
                         inv.sent = ISentStatusType.unsent
                     else:
                         # mismatch, new code needs to be uploaded
                         self.info(
                             "updating old verify code %s for uuid %s, new %s",
                             data['code'], data['uuid'], inv.code)
                         # write
                 elif inv.status != IStatusType.uploaded_verify:  # ignore with uploaded_verify
                     self.error("bad status %s for uuid %s, current %s",
                                status, data['uuid'], inv.status)
                     continue
             elif status == IStatusType.reset:
                 if inv.status in FinalStates:
                     self.warn("ignoring reset for uuid %s, status %s",
                               data['uuid'], inv.status)
                     continue
                 inv.reset()
             elif status in FinalStates:
                 if inv.status == IStatusType.uploaded_verify and status==IStatusType.verified or \
                     inv.status == IStatusType.uploaded and status!=IStatusType.verified:
                     inv.status = status  # upload confirmed or failed registration/verification
                     inv.sent = ISentStatusType.unsent
                     inv.change()
                 elif status != inv.status:
                     self.error("bad status %s for uuid %s, current %s",
                                status, data['uuid'], inv.status)
                     continue
             else:
                 self.error("bad status %s for uuid %s, current %s", status,
                            data['uuid'], inv.status)
                 continue
             if upload and not inv.status in (IStatusType.uploaded,
                                              IStatusType.uploaded_verify):
                 # write response for uploaded
                 if check_email and check_email in columns:
                     if member_class:
                         extra[check_email] = inv.member.email == data[
                             check_email]
                     else:
                         extra[check_email] = inv.email == data[check_email]
                 if 'echo' in columns: extra['echo'] = data['echo']
                 if membercls: extra['uuid'] = data['uuid']
                 writer.write(inv, extra)
             count += 1
         self.info('%i codes used', count)
         if not dryrun: session.commit()
     if not upload: return
     # process failed, which have already been deleted on the server and are ready for reset
     count = 0
     query = session.query(Invitation).filter_by(status=IStatusType.failed,
                                                 sent=ISentStatusType.sent)
     for inv in query.yield_per(1000):
         extra = {}
         if membercls: uuid = inv.member.uuid
         else: uuid = inv.uuid
         if uuid in seen: continue  # already replied
         inv.reset()
         count += 1
     self.info('%i codes resetted', count)
     if not dryrun: session.commit()
     if not quick:
         # append new invitations
         count = 0
         fresh = (IStatusType.new, IStatusType.verify)
         query = session.query(Invitation).filter(
             Invitation.status.in_(fresh))
         for inv in query.yield_per(1000):
             extra = {}
             if membercls:
                 uuid = inv.member.uuid
                 extra['uuid'] = uuid
             else:
                 uuid = inv.uuid
             writer.write(inv, extra)
             count += 1
         self.info('%i new codes uploaded', count)
     writer.close()
     if output:
         json.dump(out, output)
     elif not dryrun:  # pragma: no cover
         resp = api.post(self.invite_api.url, json=out)
         if resp.status_code != requests.codes.ok:
             if self.debugging: open('invup.html', 'w').write(resp.content)
             assert False, 'cannot upload data'
Ejemplo n.º 5
0
    def import_members(self,
                       memberfile=None,
                       depfile=None,
                       decrypt=False,
                       verify=False,
                       dryrun=False,
                       allfields=False,
                       format='csv'):
        from ekklesia.data import DataTable
        from sqlalchemy.orm import aliased
        session = self.session
        import_dep = self.department_spec != 'implicit'
        Department = self.Department
        depprimary = 'id' if self.department_spec == 'number' else 'name'
        dquery = session.query(Department)

        def get_department(id, create=False):
            # root dep: name,parent=None,depth=0
            # fwd-ref: name,parent=None,depth=None
            dep = dquery.filter_by(**id).first()
            if not dep and create and not dryrun:
                dep = Department(parent=None, depth=None, **id)
                session.add(dep)
            return dep

        if depfile and import_dep and self.Department:
            columns = self.department_columns + ['parent']
            if allfields: reqcolumns = columns
            else: reqcolumns = ('name', 'parent')
            reader = DataTable(columns,
                               coltypes=self.department_types,
                               required=reqcolumns,
                               dataformat='department',
                               fileformat=format,
                               version=self.version,
                               gpg=self.gpg)
            count = 0
            reader.open(depfile, 'r', encrypt=decrypt, sign=verify)
            columns, tmp = reader.get_columns()
            seen = set()
            for data in reader:
                if not 'depth' in columns: data['depth'] = None
                # find existing parent by id, if not exist, create fwd-ref
                if not data['parent'] is None:
                    data['parent'] = get_department(
                        {depprimary: data['parent']}, create=True)
                # find existing dep by id
                id = data[depprimary]
                if not id:
                    self.warn("identifier missing")
                    continue
                assert not id in seen, "department %s is duplicate" % id
                seen.add(id)
                dep = get_department({depprimary: id})
                if dep:
                    parent = data['parent']
                    while parent:
                        assert parent != dep, "cycle detected for department %s" % id
                        parent = parent.parent
                # if not exist, create, set depth if parent.depth
                depth = data['parent'].depth if data['parent'] else None
                if not depth is None: depth += 1
                if data['depth'] is None: data['depth'] = depth
                elif not depth is None:
                    assert data['depth'] >= depth, "invalid depth"
                count += 1
                if dryrun: continue
                if not dep: session.add(Department(**data))
                else: dep.__init__(**data)
            self.info('%i imported departments', count)
        elif not import_dep:
            dquery.update(dict(depth=None))  # reset all depths

        Member = self.Member
        columns = self.member_columns + ['department']
        if allfields: reqcolumns = columns
        else: reqcolumns = ['uuid', 'email']
        if not import_dep and 'department' in self.member_import and not 'parent' in columns:
            columns.append('parent')
            reqcolumns.append('parent')
        reader = DataTable(columns,
                           coltypes=self.member_types,
                           required=reqcolumns,
                           dataformat='member',
                           fileformat=format,
                           version=self.version,
                           gpg=self.gpg)
        count = 0
        reader.open(memberfile, 'r', encrypt=decrypt, sign=verify)
        columns, tmp = reader.get_columns()
        #primary either memberid or uuid
        #primary must exist for every member and be unique
        #primary+email must be unique
        if 'memberno' in columns:
            primary, primarykey = 'memberno', Member.memberno
        else:
            primary, primarykey = 'uuid', Member.uuid
        mquery = session.query(primarykey)
        seen, depseen = set(), set()
        for data in reader:
            id = data[primary]
            if not id:
                self.warn("uuid missing")
                continue
            assert not id in seen, "member %s is duplicate" % id
            seen.add(id)
            if 'department' in data:
                depid = data['department']
                if import_dep:  # all departments must exist
                    dep = get_department({depprimary: depid})
                    assert dep, "unknown department %s" % depid
                else:  # create departments from department and parent
                    # find existing parent by id, if not exist, create fwd-ref
                    parent = data['parent']
                    del data['parent']
                    if not parent is None:
                        parent = get_department({'name': parent}, create=True)
                    # find existing dep by id
                    dep = get_department({'name': depid})
                    # if not exist, create, set depth if parent.depth
                    if dep:
                        sup = parent
                        while sup:
                            assert sup != dep, "cycle detected for department %s" % depid
                            sup = sup.parent
                    depseen.add(depid)
                    if not dryrun:
                        if not dep:
                            dep = Department(name=depid,
                                             parent=parent,
                                             depth=None)
                            session.add(dep)
                        elif dep.parent != parent:  # if parent changed and not seen, update
                            assert not depid in depseen, "department %s is duplicate" % depid
                            dep.__init__(parent=parent)
                data['department'] = dep
            if 'email' in columns and not data['email']: data['email'] = None
            member = mquery.filter_by(**{primary: id}).first()
            if data['email']:
                email = session.query(primarykey,Member.email).\
                    filter_by(email=data['email']).first()
            else:
                email = None
            if email and (not member or
                          getattr(email, primary) != getattr(member, primary)):
                self.error("ignoring: duplicate email %s" % data['email'])
                continue
            count += 1
            if dryrun: continue
            if member: member.__init__(**data)
            else: session.add(Member(**data))  #new
        self.info('%i imported members', count)
        # complete missing depths
        depalias = aliased(Department)
        fixdeps = session.query(Department.depth).filter_by(depth=None)
        if fixdeps.first():
            fixdeps.filter_by(parent=None).update(dict(depth=0))  # set roots
            while fixdeps.first():
                for sub in session.query(Department).join(depalias,Department.parent).\
                    filter(depalias.depth!=None):
                    sub.depth = sub.parent.depth + 1
        if not dryrun: session.commit()
Ejemplo n.º 6
0
    def import_members(self,
                       memberfile=None,
                       depfile=None,
                       decrypt=False,
                       verify=False,
                       dryrun=False,
                       sync=False,
                       allfields=False,
                       format='csv'):
        """import data from memberfile, optionally depfile (if not implicit).
        allfields is used for restore and requires all columns.
        if sync, uuids not seen in input are set to status deleted.
        decrypt=with the default key, verify=check whether its signed with io_key.
        rows with missing uuids or duplicate uuids and emails are ignored.
        """
        from ekklesia.data import DataTable, init_object
        from sqlalchemy.orm import aliased
        session = self.session
        import_dep = self.department_spec != 'implicit'  # extra Department data?
        Department = self.Department
        depprimary = 'id' if self.department_spec == 'number' else 'name'
        dquery = session.query(Department)
        if verify: verify = self.io_key

        def get_department(id, create=False):
            # root dep: name,parent=None,depth=0
            # fwd-ref: name,parent=None,depth=None
            dep = dquery.filter_by(**id).first()
            if not dep and create and not dryrun:
                # create forward reference w/o depth, calculate it later
                dep = Department(parent=None, depth=None, **id)
                session.add(dep)
            return dep

        if depfile and import_dep and self.Department:  # import separate department data
            columns = list(self.department_columns) + ['parent']
            if allfields: reqcolumns = columns
            else: reqcolumns = ('name', 'parent')
            reader = DataTable(columns,
                               coltypes=self.department_types,
                               required=reqcolumns,
                               dataformat='department',
                               fileformat=format,
                               version=self.version,
                               gpg=self.gpg)
            count = 0
            reader.open(depfile, 'r', encrypt=decrypt, sign=verify)
            columns = reader.get_columns()[0]
            seen = set()  # detect duplicates
            for data in reader:
                if not 'depth' in columns: data['depth'] = None
                # find existing parent by id, if not exist, create fwd-ref
                if not data['parent'] is None:
                    data['parent'] = get_department(
                        {depprimary: data['parent']}, create=True)
                # find existing dep by id
                id = data[depprimary]
                if not id:
                    self.warn("identifier missing")
                    continue
                assert not id in seen, "department %s is duplicate" % id
                seen.add(id)
                dep = get_department({depprimary: id})
                if dep:
                    parent = data['parent']
                    while parent:
                        assert parent != dep, "cycle detected for department %s" % id
                        parent = parent.parent
                # if not exist, create, set depth if parent.depth
                depth = data['parent'].depth if data['parent'] else None
                if not depth is None: depth += 1
                if data['depth'] is None: data['depth'] = depth
                elif not depth is None:
                    assert data['depth'] >= depth, "invalid depth"
                count += 1
                if dryrun: continue
                if not dep: session.add(Department(**data))
                else: dep.update(**data)
            self.info('%i imported departments', count)
        elif not import_dep:  # implicit
            dquery.update(dict(depth=None))  # reset all depths

        Member = self.Member
        columns = list(self.member_columns) + ['department']
        if allfields: reqcolumns = columns
        else: reqcolumns = ['uuid', 'email']
        if not import_dep and 'department' in self.member_import and not 'parent' in columns:
            # implicit requires parent
            columns.append('parent')
            reqcolumns.append('parent')
        reader = DataTable(columns,
                           coltypes=self.member_types,
                           required=reqcolumns,
                           dataformat='member',
                           fileformat=format,
                           version=self.version,
                           gpg=self.gpg)
        count = 0
        reader.open(memberfile, 'r', encrypt=decrypt, sign=verify)
        columns = reader.get_columns()[0]
        #primary either memberid or uuid
        #primary must exist for every member and be unique
        #primary+email must be unique
        if 'memberno' in columns:
            primary, primarykey = 'memberno', Member.memberno
        else:
            primary, primarykey = 'uuid', Member.uuid
        mquery = session.query(Member)
        seen, depseen = set(), set()  # detect duplicates
        for data in reader:
            id = data[primary]
            if not id:
                self.warn("uuid missing")
                continue
            assert not id in seen, "member %s is duplicate" % id
            seen.add(id)
            if 'department' in data:
                depid = data['department']
                if import_dep:  # all departments must exist
                    dep = get_department({depprimary: depid})
                    assert dep, "unknown department %s" % depid
                else:  # implicit departments
                    # create departments from department and parent
                    # find existing parent by id, if not exist, create fwd-ref
                    parent = data['parent']
                    del data['parent']
                    if not parent is None:
                        parent = get_department({'name': parent}, create=True)
                    # find existing dep by id
                    dep = get_department({'name': depid})
                    # if not exist, create, set depth if parent.depth
                    if dep:
                        sup = parent
                        while sup:
                            assert sup != dep, "cycle detected for department %s" % depid
                            sup = sup.parent
                    depseen.add(depid)
                    if not dryrun:
                        if not dep:
                            dep = Department(name=depid,
                                             parent=parent,
                                             depth=None)
                            session.add(dep)
                        elif dep.parent != parent:  # if parent changed and not seen, update
                            assert not depid in depseen, "department %s is duplicate" % depid
                            dep.update(parent=parent)
                data['department'] = dep
            if 'email' in columns and not data['email']:
                data['email'] = None  # make sure it's None
            # find existing member
            member = mquery.filter_by(**{primary: id}).first()
            if data['email'] and not (member
                                      and member.email == data['email']):
                # email already used by other member?
                if session.query(
                        primarykey,
                        Member.email).filter_by(email=data['email']).first():
                    self.error("ignoring: duplicate email %s" % data['email'])
                    continue
            count += 1
            if dryrun: continue
            if member:
                if member.email != data['email']:
                    self.email_change(member, data)
                if data.get('status') == 'deleted':
                    self.delete_member(member)
                member.update(**data)
            else:
                session.add(Member(**data))  #new
        self.info('%i imported members', count)
        if sync:  # deleted unseen members
            count = 0
            for member in session.query(Member).yield_per(1000):
                if member.status == MStatusType.deleted or member.uuid in seen:
                    continue
                self.delete_member(member)
                member.status = MStatusType.deleted
                self.info("member %s deleted" % member.uuid)
                count += 1
            self.info('%i deleted members', count)
        # complete missing depths
        depalias = aliased(Department)
        fixdeps = session.query(Department.depth).filter_by(depth=None)
        if fixdeps.first():  # any departments with missing depths?
            fixdeps.filter_by(parent=None).update(dict(depth=0))  # set roots
            while fixdeps.first():  # fill from roots to leaves
                for sub in session.query(Department).join(depalias,Department.parent).\
                    filter(depalias.depth!=None).yield_per(1000):
                    sub.depth = sub.parent.depth + 1
        if not dryrun: session.commit()
Ejemplo n.º 7
0
    def sync_members(self,download=True,upload=True,dryrun=False,quick=False,
            input=None,output=None,invitations=None,format='csv'):
        "sync members with ID server"
        from ekklesia.data import DataTable
        from ekklesia.backends import api_init
        from six.moves import cStringIO as StringIO
        import requests, json
        session = self.session
        Department, Member = self.Department, self.Member
        check_email = self.check_email
        check_member = self.check_member
        coltypes = self.member_types.copy()
        api = api_init(self.member_api._asdict())
        if download: # download registered uuids
            if input: input = json.load(input)
            else: # pragma: no cover
                url = self.member_api.url
                if quick: url+='?new=1'
                resp = api.get(url)
                if resp.status_code != requests.codes.ok:
                    if self.debugging: open('memberdown.html','w').write(resp.content)
                    assert False, 'cannot download used uuids'
                input = resp.json()
            if not input:
                self.warn("input is empty")
                return
            columns = ['uuid','echo']
            if check_member: columns.append(check_member)
            if check_email: columns.append(check_email)
            reader = DataTable(columns,required=['uuid'],coltypes=coltypes,gpg=self.gpg,
                dataformat='member',fileformat=self.member_api.format,version=self.version)
            sign = self.member_api.receiver if self.member_api.sign else False
            reader.open(input,'r',encrypt=self.member_api.encrypt,sign=sign)
        columns = list(self.member_sync)
        wcoltypes = dict(coltypes)
        if check_member: wcoltypes[check_member] = bool
        if check_email: wcoltypes[check_email] = bool
        wcoltypes['departments'] = (int,) # replace by list of ids
        if 'location' in self.member_sync:
            wcoltypes['gpslat'] = wcoltypes['gpslng'] = float
            columns += ['gpslat','gpslng']
        if download:
            rcolumns, unknown = reader.get_columns()
            if unknown: self.warn(UnknownFieldsWarning('ignoring unknown fields %s',unknown))
            if check_member and check_member in rcolumns: columns.append(check_member)
            if check_email and check_email in rcolumns: columns.append(check_email)
            if 'echo' in rcolumns: columns += ['echo']
        encrypt = [self.member_api.receiver] if self.member_api.encrypt else False
        encryptmail = [self.email_receiver] if self.member_api.encrypt else False
        writer = DataTable(columns,coltypes=wcoltypes,gpg=self.gpg,
            dataformat='member',fileformat=self.member_api.format,version=self.version)
        out = {}
        writer.open(out,'w',encrypt=encrypt,sign=self.member_api.sign)

        if not quick and self.export_emails:
            ewriter = DataTable(('uuid','email'),coltypes=coltypes,gpg=self.gpg,
                dataformat='member',fileformat=format,version=self.version)
            eout = output[2] if output else StringIO()
            ewriter.open(eout,'w',encrypt=encryptmail,sign=self.member_api.sign)

        def export(member,extra = {}):
            if 'location' in self.member_import:
                gps = self.gps_coord(self.get_location(member))
                if gps: extra['gpslat'],extra['gpslng'] = gps
            deps = self.get_departments(member)
            if self.department_spec=='number':
                deps = [dep.id for dep in deps]
            else:
                deps = [dep.name for dep in deps]
            if deps:
                extra['departments'] = deps
            writer.write(member,extra)
            if not quick and self.export_emails: ewriter.write(member)

        if invitations: registered = {} # dict of registered uuids

        mquery = session.query(Member)
        count = 0
        if download:
            from datetime import datetime
            seen = set()
            for data in reader:
                uuid = data['uuid']
                if not uuid:
                    self.warn("uuid missing")
                    continue
                if uuid in seen:
                    self.warn("member %s is duplicate" % uuid)
                    continue
                seen.add(uuid)
                member = mquery.filter_by(uuid=uuid).first()
                extra = {}
                if not member or (self.member_no and member.status == MStatusType.deleted):
                    self.warn("member %s is unknown" % uuid)
                    if check_member in columns and data[check_member]:
                        extra[check_member] = False
                    if check_email in columns and data[check_email]:
                        extra[check_email] = False
                    extra['departments'] = []
                    writer.write(Member(uuid=uuid,status=MStatusType.deleted),extra)
                    continue
                if check_email in columns and data[check_email]:
                    extra[check_email] = member.email == data[check_email]
                if check_member in columns:
                    if data[check_member]:
                        result = self.check_member_func(member,data[check_member])
                        if 'registered' in self.member_import:
                            if result and not member.registered:
                                member.registered = datetime.utcnow()
                    else:
                        if 'registered' in self.member_import and member.registered:
                            self.warn("member %s is already registered" % uuid)
                        result = None
                    extra[check_member] = result
                elif 'registered' in self.member_import and not member.registered:
                    member.registered = datetime.utcnow()
                if 'echo' in columns: extra['echo'] = data['echo']
                if not dryrun: export(member,extra)
                if invitations: registered[member.uuid] = True
                count += 1
        else:
            for member in mquery.yield_per(1000):
                if self.member_no and not member.memberno: continue # deleted
                if not dryrun: export(member)
                count += 1
        self.info('%i members exported', count)
        if not dryrun and 'registered' in self.member_import: session.commit()

        writer.close()
        if not quick and self.export_emails: ewriter.close()

        if not quick and invitations:
            iwriter = DataTable(('uuid','email'),coltypes=coltypes,gpg=self.gpg,
                dataformat='member',fileformat=format,version=self.version)
            # extra encrypt,sign
            iwriter.open(invitations,'w',encrypt=encryptmail,sign=self.member_api.sign)
            if self.member_no:
                query = session.query(Member.uuid,Member.email,Member.memberno).\
                    filter(Member.email!=None,Member.memberno!=0)
            else:
                query = session.query(Member.uuid,Member.email).filter(Member.email!=None)
            count = 0
            for member in query.yield_per(1000):
                if member.uuid in registered: continue # skip registered
                iwriter.write(member)
                count += 1
            iwriter.close()
            self.info('%i invitations exported', count)

        if not upload: return

        dwriter = DataTable(('id','name','parent','depth'),gpg=self.gpg,
            dataformat='department',fileformat=self.member_api.format,version=self.version)
        dout = {}
        dwriter.open(dout,'w',encrypt=encrypt,sign=self.member_api.sign)
        for dep in session.query(Department).order_by(Department.depth,Department.id).yield_per(1000):
            if dep.parent:
                extra = dict(parent=dep.parent.id)
                # if self.department_spec=='number' else dep.parent.name
            else: extra = {}
            dwriter.write(dep,extra)
        dwriter.close()

        if output:
            json.dump(out,output[0])
            json.dump(dout,output[1])
        elif not dryrun: # pragma: no cover
            resp = api.post(self.member_api.url,json=dict(members=out,departments=dout))
            if resp.status_code != requests.codes.ok:
                if self.debugging: open('memberup.html','w').write(resp.content)
                assert False, 'cannot upload data'

        if not self.export_emails or output or quick: return

        # pragma: no cover
        from ekklesia.mail import create_mail, smtp_init
        smtp = smtp_init(self.smtpconfig)
        smtp.open()
        self.info('sending email data')
        msg = create_mail(self.gpgconfig['sender'],self.email_receiver,'Email data',eout.getvalue())
        eout.close()
        msg, results = self.gpg.encrypt(msg,default_key=True,verify=True, inline=True)
        assert msg and results, 'error encrypting message'
        if not dryrun: smtp.send(msg)
        smtp.close()
Ejemplo n.º 8
0
    def import_members(self,memberfile=None,depfile=None,decrypt=False,verify=False,
        dryrun=False,sync=False,allfields=False,format='csv'):
        """import data from memberfile, optionally depfile (if not implicit).
        allfields is used for restore and requires all columns.
        if sync, uuids not seen in input are set to status deleted.
        decrypt=with the default key, verify=check whether its signed with io_key.
        rows with missing uuids or duplicate uuids and emails are ignored.
        """
        from ekklesia.data import DataTable, init_object
        from sqlalchemy.orm import aliased
        session = self.session
        import_dep = self.department_spec != 'implicit' # extra Department data?
        Department = self.Department
        depprimary = 'id' if self.department_spec=='number' else 'name'
        dquery = session.query(Department)
        if verify: verify = self.io_key

        def get_department(id,create=False):
            # root dep: name,parent=None,depth=0
            # fwd-ref: name,parent=None,depth=None
            dep = dquery.filter_by(**id).first()
            if not dep and create and not dryrun:
                # create forward reference w/o depth, calculate it later
                dep = Department(parent=None,depth=None,**id)
                session.add(dep)
            return dep

        if depfile and import_dep and self.Department: # import separate department data
            columns = list(self.department_columns)+['parent']
            if allfields: reqcolumns = columns
            else: reqcolumns = ('name','parent')
            reader = DataTable(columns,coltypes=self.department_types,required=reqcolumns,
                dataformat='department',fileformat=format,version=self.version,gpg=self.gpg)
            count = 0
            reader.open(depfile,'r',encrypt=decrypt,sign=verify)
            columns = reader.get_columns()[0]
            seen = set() # detect duplicates
            for data in reader:
                if not 'depth' in columns: data['depth']=None
                # find existing parent by id, if not exist, create fwd-ref
                if not data['parent'] is None:
                    data['parent'] = get_department({depprimary:data['parent']},create=True)
                # find existing dep by id
                id = data[depprimary]
                if not id:
                    self.warn("identifier missing")
                    continue
                assert not id in seen, "department %s is duplicate" % id
                seen.add(id)
                dep = get_department({depprimary:id})
                if dep:
                    parent = data['parent']
                    while parent:
                        assert parent!=dep, "cycle detected for department %s" % id
                        parent = parent.parent
                # if not exist, create, set depth if parent.depth 
                depth = data['parent'].depth if data['parent'] else None
                if not depth is None: depth+=1
                if data['depth'] is None: data['depth'] = depth
                elif not depth is None: assert data['depth']>=depth, "invalid depth"
                count += 1
                if dryrun: continue
                if not dep: session.add(Department(**data))
                else: dep.update(**data)
            self.info('%i imported departments', count)
        elif not import_dep: # implicit
            dquery.update(dict(depth=None)) # reset all depths

        Member = self.Member
        columns = list(self.member_columns)+['departments']
        if allfields: reqcolumns = columns
        else:
            reqcolumns = ['uuid','email']
            if self.member_no: reqcolumns.append('id')
        if not import_dep and 'departments' in self.member_import and not 'parent' in columns:
            # implicit requires parent
            columns.append('parent')
            reqcolumns.append('parent')
        reader = DataTable(columns,coltypes=self.member_types,required=reqcolumns,
            dataformat='member',fileformat=format,version=self.version,gpg=self.gpg)
        count = 0
        reader.open(memberfile,'r',encrypt=decrypt,sign=verify)
        columns = reader.get_columns()[0]
        #primary either memberid or uuid
        #primary must exist for every member and be unique
        #primary+email must be unique
        if self.member_no:
            primary, primarykey = 'id', Member.id
        else:
            primary, primarykey = 'uuid', Member.uuid
        mquery = session.query(Member)
        seen, depseen = set(), set() # detect duplicates
        for data in reader:
            id = data[primary]
            if not id:
                self.warn("uuid missing")
                continue
            assert not id in seen, "member %s is duplicate" % id
            seen.add(id)
            if 'departments' in data:
                depids = data['departments']
                if import_dep: # all departments must exist
                    deps=set()
                    for depid in depids:
                        dep = get_department({depprimary:depid})
                        assert dep, "unknown department %s" % depid
                        deps.add(dep)
                    deps = list(deps)
                else: # implicit departments
                    assert len(depids)==1, "only single implicit department supported"
                    depid = depids[0]
                    # create departments from department and parent
                    # find existing parent by id, if not exist, create fwd-ref
                    parent = data['parent']
                    del data['parent']
                    if not parent is None:
                        parent = get_department({'name':parent},create=True)
                    # find existing dep by id
                    dep = get_department({'name':depid})
                    # if not exist, create, set depth if parent.depth
                    if dep:
                        sup = parent
                        while sup:
                            assert sup!=dep, "cycle detected for department %s" % depid
                            sup = sup.parent
                    depseen.add(depid)
                    if not dryrun:
                        if not dep:
                            dep = Department(name=depid,parent=parent,depth=None)
                            session.add(dep)
                        elif dep.parent != parent: # if parent changed and not seen, update
                            assert not depid in depseen, "department %s is duplicate" % depid
                            dep.update(parent=parent)
                    deps = [dep]
                data['departments'] = deps
            if 'email' in columns and not data['email']:
                data['email'] = None # make sure it's None
            # find existing member
            member = mquery.filter(primarykey==id).first()
            if data['email'] and not (member and member.email==data['email']):
                # email already used by other member?
                if session.query(primarykey,Member.email).filter_by(email=data['email']).first():
                    self.error("ignoring: duplicate email %s" % data['email'])
                    continue
            count += 1
            if dryrun: continue
            if member:
                if member.email != data['email']:
                    self.email_change(member,data)
                if data.get('status') == 'deleted':
                    self.delete_member(member)
                member.update(**data)
            else: session.add(Member(**data)) #new
        self.info('%i imported members', count)
        if sync: # deleted unseen members
            count = 0
            for member in session.query(Member).yield_per(1000):
                if member.status==MStatusType.deleted or member.uuid in seen: continue
                self.delete_member(member)
                member.status = MStatusType.deleted
                self.info("member %s deleted" % member.uuid)
                count += 1
            self.info('%i deleted members', count)
        # complete missing depths
        depalias = aliased(Department)
        fixdeps = session.query(Department.depth).filter_by(depth=None)
        if fixdeps.first(): # any departments with missing depths?
            fixdeps.filter_by(parent=None).update(dict(depth=0)) # set roots
            while fixdeps.first(): # fill from roots to leaves
                for sub in session.query(Department).join(depalias,Department.parent).\
                    filter(depalias.depth!=None).yield_per(1000):
                    sub.depth = sub.parent.depth+1
        if not dryrun: session.commit()
Ejemplo n.º 9
0
 def import_invitations(self,
                        input,
                        decrypt=False,
                        verify=False,
                        allfields=False,
                        dryrun=False,
                        format='csv'):
     from ekklesia.data import DataTable
     from datetime import datetime
     session = self.session
     Invitation = self.Invitation
     membercls = self.member_class
     columns = self.invite_columns
     if membercls: columns.append('uuid')
     if allfields: reqcolumns = columns
     elif membercls: reqcolumns.append('uuid')
     else: reqcolumns = ['uuid', 'email']
     reader = DataTable(columns,
                        coltypes=self.invite_types,
                        required=reqcolumns,
                        dataformat='invitation',
                        fileformat=format,
                        version=self.version,
                        gpg=self.gpg)
     reader.open(input, 'r', encrypt=decrypt, sign=verify)
     columns, tmp = reader.get_columns()
     recordchange = 'status' in columns and not 'lastchange' in columns
     iquery = session.query(Invitation)
     count = 0
     seen = set()
     for data in reader:
         uuid = data['uuid']
         if not uuid:
             self.warn("uuid missing")
             continue
         assert not uuid in seen, "member %s is duplicate" % uuid
         seen.add(uuid)
         if membercls:
             member = session.query(membercls).filter_by(uuid=uuid).first()
             if member is None:
                 self.warn("uuid %s not found" % uuid)
                 continue
             if not member.email:  # email removed, disable invitation
                 if member.invitation:
                     self.info(
                         "scheduling invitation for uuid '%s' for deletion",
                         member.uuid)
                     if not dryrun:
                         member.invitation.status = StatusType.deleted
                         member.invitation.lastchange = datetime.utcnow()
                 continue
             count += 1
             if dryrun: continue
             if member.invitation is None:  # create a new invitation
                 session.add(Invitation(member=member, **data))  #new
             else:
                 if recordchange and data[
                         'status'] != member.invitation.status:
                     data['lastchange'] = datetime.utcnow()
                 member.invitation.__init__(**data)  #update inv
         else:
             inv = iquery.filter_by(uuid=uuid).first()
             if not data['email']:  # email removed, disable invitation
                 if inv is None:
                     self.warn("uuid %s not found" % uuid)
                     continue
                 if inv.status == StatusType.deleted: continue
                 self.info(
                     "scheduling invitation for uuid '%s' for deletion",
                     inv.uuid)
                 if not dryrun:
                     inv.status = StatusType.deleted
                     inv.lastchange = datetime.utcnow()
                 continue
             # check whether email already used
             if not inv or inv.email != data['email']:
                 # fixme: what if emails swapped?
                 email = iquery.filter_by(email=data['email']).first()
                 if email and (not inv or inv.uuid != email.uuid):
                     self.error("ignoring: duplicate email %s" %
                                data['email'])
                     continue
             count += 1
             if dryrun: continue
             if inv:
                 # if email changed and code has been sent, reset invcode and lastchange, unless allfields is set
                 needreset = not allfields and inv.status==StatusType.uploaded and \
                      inv.sent==SentStatusType.sent and 'email' in data and data['email']!=inv.email and \
                      (not 'code' in data or data['code']==inv.code)
                 if not needreset and recordchange and data[
                         'status'] != inv.status:
                     data['lastchange'] = datetime.utcnow()
                 inv.__init__(**data)  #update inv
                 if needreset: inv.reset()
             else:
                 session.add(Invitation(**data))  #new
     self.info('%i imported invitations', count)
     if not dryrun: session.commit()