Example #1
0
 def do(self):
     path = self.page.site.getStaticPath('page:archive_to_import',
                                         'last_archive.pik')
     archive = Bag(path)
     tables = archive.keys()
     for tbl in self.btc.thermo_wrapper(
             tables,
             maximum=len(tables),
             message=lambda item, k, m, **kwargs: '%s %i/%i' % (item, k, m),
             line_code='tables'):
         records = archive[tbl]
         if not records:
             continue
         tblobj = self.db.table(tbl.replace('/', '.'))
         pkeysToAdd = [r[tblobj.pkey] for r in records]
         f = tblobj.query(where='$%s IN :pkeys' % tblobj.pkey,
                          pkeys=pkeysToAdd,
                          addPkeyColumns=False,
                          excludeLogicalDeleted=False,
                          excludeDraft=False,
                          columns='$%s' % tblobj.pkey).fetch()
         pkeysToAdd = set(pkeysToAdd) - set([r[tblobj.pkey] for r in f])
         rlist = [dict(r) for r in records if r[tblobj.pkey] in pkeysToAdd]
         if rlist:
             self.db.setConstraintsDeferred()
             tblobj.insertMany(rlist)
     self.db.commit()
Example #2
0
 def diskDirectory(self):
     pages = Bag(self.site.sitemap['showcase'])
     for k in pages.keys():
         if hasattr(pages[k], '_htraverse'):
             pages[k].sort()
         #print pages
     #self.setPath(pages)
     return pages
Example #3
0
 def cb(row):
     b = Bag(row['data'])
     result = dict()
     if row['version'] >0:
         result['changed_fields'] = '<br/>'.join(b.keys())
     b.pop('__ins_ts')
     b.pop('__version')
     b.pop('__mod_ts')
     self._curr_audit_record.update(b)
     result['__value__'] = self._curr_audit_record.deepcopy()
     return result
Example #4
0
    def writeTransaction(self, mode, action, maintable, data,
                         request_id=None, request_ts=None, user_id=None, session_id=None,
                         user_ip=None, queue_id=None, file_name=None):
        kwargs = {}
        trtable = self.db.table(self.transaction_tname)
        kwargs['id'] = trtable.newPkeyValue()
        kwargs['request'] = datetime.now()
        kwargs['mode'] = mode
        kwargs['action'] = action
        kwargs['maintable'] = maintable
        kwargs['request_id'] = request_id
        kwargs['request_ts'] = request_ts
        kwargs['user_id'] = user_id
        kwargs['session_id'] = session_id
        kwargs['user_ip'] = user_ip
        kwargs['queue_id'] = queue_id
        kwargs['file_name'] = file_name

        if not isinstance(data, Bag):
            data = Bag(data)

        for k in data.keys():
            if k.startswith('@'):
                data.pop(k)
        kwargs['data'] = data.toXml()

        if not request_id: # import
            trtable.insert(kwargs)
        else: # sync triggers
            prevTransactions = trtable.query(columns="$id, $error_id",
                                             where="$request_id=:request_id AND $queue_id = :queue_id",
                                             request_id=request_id, queue_id=queue_id).fetch()
            if len(prevTransactions) == 0: # normal case: is a new transaction
                trtable.insert(kwargs)
            elif len(prevTransactions) == 1: # the transaction yet exists
                if prevTransactions[0]['error_id']:
                    kwargs.pop('request') # leave the old request timestamp in order to not alter the execution order
                    trtable.update(kwargs)
                    gnrlogger.warning(
                            "Replacing old wrong transaction %s with new from file %s" % (request_id, file_name))
                else:
                    gnrlogger.error("Skipping duplicated transaction %s from file %s" % (request_id, file_name))
            else:
                gnrlogger.critical("More than one old transactions with id %s from file %s" % (request_id, file_name))
                raise

        self.db.notify("gnr_transaction_new")
        self.db.commit()
Example #5
0
 def writeSync(self, sync_out, maintable, action, record_data, transaction_id=None, transaction_request=None, queue_id=None):
     syncdata = {}
     syncdata['transaction_id'] = transaction_id
     syncdata['request'] = transaction_request or datetime.datetime.now()
     syncdata['action'] = action
     syncdata['maintable'] = maintable
     
     if not isinstance(record_data, Bag):
         record_data = Bag(record_data)
     for k in record_data.keys():
         if k.startswith('@'):
             record_data.pop(k)
     syncdata['data'] = record_data.toXml()
     
     for sync_client in sync_out:
         if sync_client != queue_id:
             syncdata['client'] = sync_client
             self.insert(syncdata)
Example #6
0
 def do(self):
     path = self.page.site.getStaticPath('page:archive_to_import','last_archive.pik')
     archive = Bag(path)
     tables = archive.keys()
     for tbl in self.btc.thermo_wrapper(tables, maximum=len(tables),message=lambda item, k, m, **kwargs: '%s %i/%i' % (item, k, m), line_code='tables'):
         records = archive[tbl]
         if not records:
             continue
         tblobj = self.db.table(tbl.replace('/','.'))
         pkeysToAdd = [r[tblobj.pkey] for r in records] 
         f = tblobj.query(where='$%s IN :pkeys' %tblobj.pkey,pkeys=pkeysToAdd,
                         addPkeyColumns=False,excludeLogicalDeleted=False,excludeDraft=False,columns='$%s' %tblobj.pkey
                         ).fetch()
         pkeysToAdd = set(pkeysToAdd)-set([r[tblobj.pkey] for r in f])
         rlist = [dict(r) for r in records if r[tblobj.pkey] in pkeysToAdd]
         if rlist:
             self.db.setConstraintsDeferred()
             tblobj.insertMany(rlist)
     self.db.commit()