Ejemplo n.º 1
0
def create_file_versions(previous_node, node, current_version_node, transaction):
    """Create file and node_to_file version changes from diff between files of `previous_node` and `node`.
    `node` must be a the version node that follows `previous_node`.
    `current_version_node` is the newest version node following `previous_node` and `node`
    """
    
    NodeToFileVersion = version_class(NodeToFile)
    FileVersion = version_class(File)
    s = db.session
    
    new_files = set(node.files) - set(previous_node.files)
    removed_files = set(previous_node.files) - set(node.files)
    
    for fi in new_files:
        fv = FileVersion(id=fi.id,
                         path=fi.path,
                         mimetype=fi.mimetype,
                         filetype=fi.filetype,
                         operation_type=Operation.INSERT,
                         transaction=transaction)

        ntfv = NodeToFileVersion(nid=current_version_node.id, 
                                 file_id=fi.id,
                                 operation_type=Operation.INSERT,
                                 transaction=transaction)
        s.add(fv)
        s.add(ntfv)

    for fi in removed_files:
        fv = FileVersion(id=fi.id,
                         path=fi.path,
                         mimetype=fi.mimetype,
                         filetype=fi.filetype,
                         operation_type=Operation.DELETE,
                         transaction=transaction)
        
        # previous_file_version must be the file version with the highest transaction id
        previous_file_version = q(FileVersion).filter_by(id=fi.id).order_by(FileVersion.transaction_id.desc()).limit(1).scalar()
        if previous_file_version is not None:
            previous_file_version.end_transaction_id = transaction.id

        ntfv = NodeToFileVersion(nid=current_version_node.id, 
                                 file_id=fi.id,
                                 operation_type=Operation.DELETE,
                                 transaction=transaction)

        # previous_file_version must be the file version with the highest transaction id
        previous_node_to_file_version = (q(NodeToFileVersion).filter_by(nid=current_version_node.id, file_id=fi.id)
                                                             .order_by(NodeToFileVersion.transaction_id.desc()).limit(1).scalar())
        if previous_node_to_file_version is not None:
            previous_node_to_file_version.end_transaction_id = transaction.id
        s.add(fv)
        s.add(ntfv)
Ejemplo n.º 2
0
def iter_attr_edits(cls, attrs, query=None):
    pk_cols = [k for k in inspect(cls).primary_key]

    cls_version = version_class(cls)
    pk_cols_version = [getattr(cls_version, k.name) for k in pk_cols]
    attrs_version = [getattr(cls_version, a) for a in attrs]
    cls_transaction = transaction_class(cls)

    if query is None:
        query = cls_version.query

    all_versions = (query.join(cls_version.transaction).with_entities(
        *pk_cols_version + attrs_version +
        [cls_transaction.issued_at]).order_by(*pk_cols_version +
                                              [cls_version.transaction_id]))

    def get_pk(row):
        return [getattr(row, k.name) for k in pk_cols_version]

    for pk, versions in groupby(all_versions, get_pk):
        # We don't yet process inserts/deletes, but should
        first = next(versions)
        attr_vals = {a: getattr(first, a) for a in attrs}
        attr_times = {a: [first.issued_at] for a in attrs}
        for version in versions:
            for attr in attrs:
                val = getattr(version, attr)
                if val != attr_vals[attr]:
                    attr_times[attr].append(version.issued_at)
                    attr_vals[attr] = val

        yield (pk, attr_times)
Ejemplo n.º 3
0
def iter_attr_edits(cls, attrs, query=None):
    pk_cols = [k for k in inspect(cls).primary_key]

    cls_version = version_class(cls)
    pk_cols_version = [getattr(cls_version, k.name) for k in pk_cols]
    attrs_version = [getattr(cls_version, a) for a in attrs]
    cls_transaction = transaction_class(cls)

    if query is None:
        query = cls_version.query

    all_versions = query.join(cls_version.transaction) \
                        .with_entities(*pk_cols_version + attrs_version + [cls_transaction.issued_at]) \
                        .order_by(*pk_cols_version + [cls_version.transaction_id])

    def get_pk(row):
        return [getattr(row, k.name) for k in pk_cols_version]

    for pk, versions in groupby(all_versions, get_pk):
        # We don't yet process inserts/deletes, but should
        first = next(versions)
        attr_vals = {a: getattr(first, a) for a in attrs}
        attr_times = {a: [first.issued_at] for a in attrs}
        for version in versions:
            for attr in attrs:
                val = getattr(version, attr)
                if val != attr_vals[attr]:
                    attr_times[attr].append(version.issued_at)
                    attr_vals[attr] = val

        yield (pk, attr_times)
Ejemplo n.º 4
0
 def get_published_version(self):
     Transaction = versioning_manager.transaction_cls
     TransactionMeta = versioning_manager.transaction_meta_cls
     version_cls = version_class(self.__class__)
     published_versions = self.versions.join(Transaction, version_cls.transaction_id == Transaction.id).\
             join(Transaction.meta_relation). filter(TransactionMeta.key == u"publish")
     return published_versions.scalar()
Ejemplo n.º 5
0
            def __enter__(self):
                self.session = s = object_session(node)
                if s.new or s.dirty:
                    raise Exception("Refusing to create a new tagged node version. Session must be clean!")

                uow = versioning_manager.unit_of_work(s)
                tx = uow.create_transaction(s)

                if user is not None:
                    tx.user = user

                if tag:
                    if node.get_tagged_version(tag):
                        raise ValueError("tag already exists")
                    tx.meta[u"tag"] = tag
                elif publish:
                    if node.get_published_version():
                        raise ValueError("publish version already exists")
                    tx.meta[u"publish"] = publish
                else:
                    NodeVersion = version_class(node.__class__)
                    # in case you were wondering: order_by(None) resets the default order_by
                    last_tagged_version = node.tagged_versions.order_by(None).order_by(NodeVersion.transaction_id.desc()).first()
                    if last_tagged_version is not None:
                        next_version = int(last_tagged_version.tag) + 1
                    else:
                        node.versions[-1].tag = u"1"
                        next_version = 2

                    tx.meta[u"tag"] = unicode(next_version)

                if comment:
                    tx.meta[u"comment"] = comment

                return tx
Ejemplo n.º 6
0
 def get_published_version(self):
     Transaction = versioning_manager.transaction_cls
     TransactionMeta = versioning_manager.transaction_meta_cls
     version_cls = version_class(self.__class__)
     published_versions = self.versions.join(Transaction, version_cls.transaction_id == Transaction.id).\
             join(Transaction.meta_relation). filter(TransactionMeta.key == u"publish")
     return published_versions.scalar()
Ejemplo n.º 7
0
def create_current_version(current_version_node):
    s = db.session
    Transaction = versioning_manager.transaction_cls
    tx = Transaction()
    version_id = current_version_node.system_attrs[u"version.id"]
    operation_type = Operation.UPDATE
    TransactionMeta = versioning_manager.transaction_meta_cls
    tx.meta_relation[u"tag"] = TransactionMeta(key=u"tag", value=unicode(version_id))
    tx.meta_relation[u"mysql_migration"] = TransactionMeta(key=u"mysql_migration", value=u"migrated current node version")

    if u"version.comment" in current_version_node.system_attrs:
        tx.meta_relation[u"comment"] = TransactionMeta(key=u"comment", value=current_version_node.system_attrs[u"version.comment"])

    s.add(tx)
    NodeVersion = version_class(Node)
    nv = NodeVersion(id=current_version_node.id,
                     name=current_version_node.name,
                     type=current_version_node.type,
                     schema=current_version_node.schema,
                     attrs=current_version_node.attrs,
                     orderpos=current_version_node.orderpos,
                     transaction=tx,
                     operation_type=operation_type)

    s.add(nv)
    s.flush()
    return nv
Ejemplo n.º 8
0
def _send_thumbnail(thumb_type, req):
    try:
        nid = node_id_from_req_path(req)
    except ValueError:
        return 400

    version_id = version_id_from_req(req)

    node_or_version = get_node_or_version(nid, version_id, Data)

    if not node_or_version.has_read_access():
        return 404

    FileVersion = version_class(File)
    if version_id:
        version = node_or_version
        files = version.files.filter_by(
            filetype=thumb_type, transaction_id=version.transaction_id).all()
        if not files:
            # files may be None if in this version only metadata changed
            # then try previous transaction_ids
            files = version.files.filter(FileVersion.filetype==thumb_type, FileVersion.transaction_id<=version.transaction_id). \
                order_by(FileVersion.transaction_id.desc())
        for f in files:
            if f.exists:
                return _request_handler.sendFile(req, f.abspath, f.mimetype)

        ntype, schema = version.type, version.schema
    else:
        # no version id given
        # XXX: better to use scalar(), but we must ensure that we have no dupes first
        node = node_or_version
        for f in node.files.filter_by(filetype=thumb_type):
            if f.exists:
                return _request_handler.sendFile(req, f.abspath, f.mimetype)

        try:
            ntype, schema = node.type, node.schema
        except NoResultFound:
            return 404

    for p in _request_handler.getFileStorePaths("/img/"):
        for test in [
                "default_thumb_%s_%s.*" % (ntype, schema),
                "default_thumb_%s.*" % schema,
                "default_thumb_%s.*" % ntype
        ]:
            fps = glob.glob(os.path.join(p, test))
            if fps:
                thumb_mimetype, thumb_type = utils.utils.getMimeType(fps[0])
                return _request_handler.sendFile(req,
                                                 fps[0],
                                                 thumb_mimetype,
                                                 force=1)

    return _request_handler.sendFile(req,
                                     config.basedir +
                                     "/web/img/questionmark.png",
                                     "image/png",
                                     force=1)
Ejemplo n.º 9
0
def get_table_version(db: Session) -> int:
    version = version_class(AssociationTypePhPWDB)
    last_changed = db.query(version).order_by(desc(version.transaction_id)).limit(1)
    if last_changed.count() > 0:
        return last_changed[0].transaction_id
    else:
        return -1
Ejemplo n.º 10
0
def test_old_node_version_support(content_node_versioned_with_alias_id):
    """Tests the q(Node).get hack (in MtQuery)"""
    node = content_node_versioned_with_alias_id
    q = db.query
    version = q(Node).get(23)
    version_cls = version_class(node.__class__)
    assert isinstance(version, version_cls)
    assert version.orderpos == 23
Ejemplo n.º 11
0
 def tagged_versions(self):
     Transaction = versioning_manager.transaction_cls
     TransactionMeta = versioning_manager.transaction_meta_cls
     version_cls = version_class(self.__class__)
     return (self.versions.join(
         Transaction, version_cls.transaction_id == Transaction.id).join(
             Transaction.meta_relation).filter(
                 TransactionMeta.key == u"tag"))
Ejemplo n.º 12
0
 def refresh(self, node):
     """Return a refreshed copy of `node`.
     Workaround for Node objects which are kept between requests.
     XXX: must be removed later
     """
     from .node import Node
     NodeVersion = version_class(Node)
     if isinstance(node, NodeVersion):
         return self.session.query(NodeVersion).get((node.id, node.transaction.id))
     else:
         return self.session.query(Node).get(node.id)
Ejemplo n.º 13
0
 def refresh(self, node):
     """Return a refreshed copy of `node`.
     Workaround for Node objects which are kept between requests.
     XXX: must be removed later
     """
     from .node import Node
     NodeVersion = version_class(Node)
     if isinstance(node, NodeVersion):
         return self.session.query(NodeVersion).get((node.id, node.transaction.id))
     else:
         return self.session.query(Node).get(node.id)
Ejemplo n.º 14
0
            def _calculate_tx_id(self, obj):
                session = sa.orm.object_session(self)
                if obj:
                    object_version = version_obj(session, obj)
                    if object_version:
                        return object_version.transaction_id

                    version_cls = version_class(obj.__class__)
                    return session.query(
                        sa.func.max(version_cls.transaction_id)).filter(
                            version_cls.id == obj.id).scalar()
Ejemplo n.º 15
0
    def get_export_data(cls):
        if cls.__name__ == 'Payment':
            # Export stats for each payment type separately
            return {}

        purchase_counts = cls.query.outerjoin(cls.purchases).group_by(cls.id).with_entities(func.count(models.Ticket.id))
        refund_counts = cls.query.outerjoin(cls.refunds).group_by(cls.id).with_entities(func.count(Refund.id))

        cls_version = version_class(cls)
        cls_transaction = transaction_class(cls)
        changes = cls.query.join(cls.versions).group_by(cls.id)
        change_counts = changes.with_entities(func.count(cls_version.id))
        first_changes = changes.join(cls_version.transaction) \
                               .with_entities(func.min(cls_transaction.issued_at).label('created')) \
                               .from_self()

        cls_ver_new = aliased(cls.versions)
        cls_ver_paid = aliased(cls.versions)
        cls_txn_new = aliased(cls_version.transaction)
        cls_txn_paid = aliased(cls_version.transaction)
        active_time = func.max(cls_txn_paid.issued_at) - func.max(cls_txn_new.issued_at)
        active_times = cls.query \
            .join(cls_ver_new, cls_ver_new.id == cls.id) \
            .join(cls_ver_paid, cls_ver_paid.id == cls.id) \
            .join(cls_txn_new, cls_txn_new.id == cls_ver_new.transaction_id) \
            .join(cls_txn_paid, cls_txn_paid.id == cls_ver_paid.transaction_id) \
            .filter(cls_ver_new.state == 'new') \
            .filter(cls_ver_paid.state == 'paid') \
            .with_entities(active_time.label('active_time')) \
            .group_by(cls.id)

        time_buckets = [timedelta(0), timedelta(minutes=1), timedelta(hours=1)] + \
                       [timedelta(d) for d in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 28, 60]]

        data = {
            'public': {
                'payments': {
                    'counts': {
                        'purchases': bucketise(purchase_counts, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20]),
                        'refunds': bucketise(refund_counts, [0, 1, 2, 3, 4]),
                        'changes': bucketise(change_counts, range(10)),
                        'created_week': export_intervals(first_changes, column('created'), 'week', 'YYYY-MM-DD'),
                        'active_time': bucketise([r.active_time for r in active_times], time_buckets),
                        'amounts': bucketise(cls.query.with_entities(cls.amount_int / 100), [0, 10, 20, 30, 40, 50, 100, 150, 200]),
                    },
                },
            },
            'tables': ['payment', 'payment_version'],
        }

        count_attrs = ['state', 'reminder_sent', 'currency']
        data['public']['payments']['counts'].update(export_attr_counts(cls, count_attrs))

        return data
Ejemplo n.º 16
0
    def get(self, ident):
        nodeclass = self._find_nodeclass()
        if not nodeclass:
            return Query.get(self, ident)
        active_version = Query.get(self, ident)
        Transaction = versioning_manager.transaction_cls
        if active_version is None:
            ver_cls = version_class(nodeclass)
            return (self.session.query(ver_cls).join(Transaction, ver_cls.transaction_id == Transaction.id)
                    .join(Transaction.meta_relation)
                    .filter_by(key=u'alias_id', value=unicode(ident)).scalar())

        return active_version
Ejemplo n.º 17
0
def _send_thumbnail(thumb_type, req):
    try:
        nid = node_id_from_req_path(req)
    except ValueError:
        return 400


    version_id = version_id_from_req(req)

    node_or_version = get_node_or_version(nid, version_id, Data)

    if not node_or_version.has_read_access():
        return 404

    FileVersion = version_class(File)
    if version_id:
        version = node_or_version
        files = version.files.filter_by(filetype=thumb_type, transaction_id=version.transaction_id).all()
        if not files:
            # files may be None if in this version only metadata changed
            # then try previous transaction_ids
            files = version.files.filter(FileVersion.filetype==thumb_type, FileVersion.transaction_id<=version.transaction_id). \
                order_by(FileVersion.transaction_id.desc())
        for f in files:
            if f.exists:
                return req.sendFile(f.abspath, f.mimetype)

        ntype, schema = version.type, version.schema
    else:
        # no version id given
        # XXX: better to use scalar(), but we must ensure that we have no dupes first
        node = node_or_version
        for f in node.files.filter_by(filetype=thumb_type):
            if f.exists:
                return req.sendFile(f.abspath, f.mimetype)

        try:
            ntype, schema = node.type, node.schema
        except NoResultFound:
            return 404

    for p in athana.getFileStorePaths("/img/"):
        for test in ["default_thumb_%s_%s.*" % (ntype, schema),
                     "default_thumb_%s.*" % schema,
                     "default_thumb_%s.*" % ntype]:
            fps = glob.glob(os.path.join(p, test))
            if fps:
                thumb_mimetype, thumb_type = utils.utils.getMimeType(fps[0])
                return req.sendFile(fps[0], thumb_mimetype, force=1)

    return req.sendFile(config.basedir + "/web/img/questionmark.png", "image/png", force=1)
Ejemplo n.º 18
0
    def get(self, ident):
        nodeclass = self._find_nodeclass()
        if not nodeclass:
            return Query.get(self, ident)
        else:
            nodeclass = nodeclass[0]
        active_version = Query.get(self, ident)
        Transaction = versioning_manager.transaction_cls
        if active_version is None:
            ver_cls = version_class(nodeclass)
            return (self.session.query(ver_cls).join(Transaction, ver_cls.transaction_id == Transaction.id)
                    .join(Transaction.meta_relation)
                    .filter_by(key=u'alias_id', value=unicode(ident)).scalar())

        return active_version
Ejemplo n.º 19
0
            def __enter__(self):
                self.session = s = object_session(node)
                if s.new or s.dirty:
                    raise Exception(
                        "Refusing to create a new tagged node version. Session must be clean!"
                    )

                uow = versioning_manager.unit_of_work(s)
                tx = uow.create_transaction(s)

                if user is not None:
                    tx.user = user

                if tag:
                    if node.get_tagged_version(tag):
                        raise ValueError("tag already exists")
                    tx.meta[u"tag"] = tag
                elif publish:
                    if node.get_published_version():
                        raise ValueError("publish version already exists")
                    tx.meta[u"publish"] = publish
                else:
                    NodeVersion = version_class(node.__class__)
                    # in case you were wondering: order_by(None) resets the default order_by
                    last_tagged_version = node.tagged_versions.order_by(
                        None).order_by(
                            NodeVersion.transaction_id.desc()).first()
                    if last_tagged_version is not None:
                        next_version = int(last_tagged_version.tag) + 1
                    else:
                        node.versions[-1].tag = u"1"
                        next_version = 2

                    tx.meta[u"tag"] = unicode(next_version)

                if comment:
                    tx.meta[u"comment"] = comment

                return tx
Ejemplo n.º 20
0
def _send_file_with_type(filetype, mimetype, req, checkonly=False):
    try:
        nid = node_id_from_req_path(req)
    except ValueError:
        return 400

    version_id = version_id_from_req(req)
    node = get_node_or_version(nid, version_id, Content)

    if node is None or not node.has_data_access():
        return 404

    fileobj = None
    file_query = node.files.filter_by(filetype=filetype)
    # if version_id == u"published":
    if version_id:
        file_query = file_query.filter_by(transaction_id=node.transaction_id)
        fileobj = file_query.scalar()
        # fileobj may be None if in this version only metadata changed
        # then try previous transaction_ids
        if not fileobj:
            FileVersion = version_class(File)
            # this a long lasting query
            file_query = node.files.filter_by(filetype=filetype)
            fileobj = file_query.filter(FileVersion.transaction_id <= node.transaction_id).\
                order_by(FileVersion.transaction_id.desc()).first()
    if mimetype:
        file_query = file_query.filter_by(mimetype=mimetype)

    if not fileobj:
        fileobj = file_query.scalar()
    if fileobj is not None:
        if checkonly:
            return 200
        return _request_handler.sendFile(req, fileobj.abspath,
                                         fileobj.mimetype)

    return 404
Ejemplo n.º 21
0
def _send_file_with_type(filetype, mimetype, req, checkonly=False):
    try:
        nid = node_id_from_req_path(req)
    except ValueError:
        return 400

    version_id = version_id_from_req(req)
    node = get_node_or_version(nid, version_id, Content)

    if node is None or not node.has_data_access():
        return 404

    fileobj = None
    file_query = node.files.filter_by(filetype=filetype)
    # if version_id == u"published":
    if version_id:
        file_query = file_query.filter_by(transaction_id=node.transaction_id)
        fileobj = file_query.scalar()
        # fileobj may be None if in this version only metadata changed
        # then try previous transaction_ids
        if not fileobj:
            FileVersion = version_class(File)
            # this a long lasting query
            file_query = node.files.filter_by(filetype=filetype)
            fileobj = file_query.filter(FileVersion.transaction_id <= node.transaction_id).\
                order_by(FileVersion.transaction_id.desc()).first()
    if mimetype:
        file_query = file_query.filter_by(mimetype=mimetype)

    if not fileobj:
        fileobj = file_query.scalar()
    if fileobj is not None:
        if checkonly:
            return 200
        return req.sendFile(fileobj.abspath, fileobj.mimetype)

    return 404
Ejemplo n.º 22
0
    def run(self):
        # As we go, we check against the list of all tables, in case we forget about some
        # new object type (e.g. association table).

        # Exclude tables we know will never be exported
        ignore = ['alembic_version', 'transaction']

        all_model_classes = {cls for cls in db.Model._decl_class_registry.values()
                             if isinstance(cls, type) and issubclass(cls, db.Model)}

        all_version_classes = {version_class(c) for c in all_model_classes if is_versioned(c)}

        seen_model_classes = set()
        remaining_tables = set(db.metadata.tables)

        year = datetime.utcnow().year
        path = os.path.join('exports', str(year))
        for dirname in ['public', 'private']:
            os.makedirs(os.path.join(path, dirname), exist_ok=True)

        for model_class in all_model_classes:
            if model_class in seen_model_classes:
                continue

            seen_model_classes.add(model_class)

            table = model_class.__table__.name
            model = model_class.__name__

            if table in ignore:
                app.logger.debug('Ignoring %s', model)
                remaining_tables.remove(table)
                continue

            if not getattr(model_class, '__export_data__', True):
                # We don't remove the version table, as we want
                # to be explicit about chucking away edit stats
                app.logger.debug('Skipping %s', model)
                remaining_tables.remove(table)
                continue

            if model_class in all_version_classes:
                # Version tables are explicitly dumped by their parents,
                # as they don't make sense to be exported on their own
                app.logger.debug('Ignoring version model %s', model)
                continue

            if hasattr(model_class, 'get_export_data'):
                try:
                    export = model_class.get_export_data()
                    for dirname in ['public', 'private']:
                        if dirname in export:
                            filename = os.path.join(path, dirname, '{}.json'.format(model))
                            simplejson.dump(export[dirname], open(filename, 'w'), indent=4, cls=ExportEncoder)
                            app.logger.info('Exported data from %s to %s', model, filename)

                except Exception as e:
                    app.logger.error('Error exporting %s', model)
                    raise

                exported_tables = export.get('tables', [table])
                remaining_tables -= set(exported_tables)

        if remaining_tables:
            app.logger.warning('Remaining tables: %s', ', '.join(remaining_tables))

        data = {
            'timestamp': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'),
            'remaining_tables': sorted(list(remaining_tables))
        }
        filename = os.path.join(path, 'export.json')
        simplejson.dump(data, open(filename, 'w'), indent=4, cls=ExportEncoder)

        with app.test_client() as client:
            for schedule in ['schedule.frab', 'schedule.json', 'schedule.ics']:
                resp = client.get('/{}'.format(schedule))
                with open(os.path.join(path, 'public', schedule), 'wb') as f:
                    f.write(resp.data)

        app.logger.info('Export complete, summary written to %s', filename)
Ejemplo n.º 23
0
 def tagged_versions(self):
     Transaction = versioning_manager.transaction_cls
     TransactionMeta = versioning_manager.transaction_meta_cls
     version_cls = version_class(self.__class__)
     return (self.versions.join(Transaction, version_cls.transaction_id == Transaction.id).join(Transaction.meta_relation).
             filter(TransactionMeta.key == u"tag"))
Ejemplo n.º 24
0
 def select_style_link(self, style):
     version = self._node.tag if isinstance(self._node, version_class(Node)) else None
     return node_url(self.id, version=version, style=style)
Ejemplo n.º 25
0
 def select_style_link(self, style):
     version = self._node.tag if isinstance(self._node, version_class(Node)) else None
     return node_url(self.id, version=version, style=style)
Ejemplo n.º 26
0
def filebrowser(node, req):
    filesize = 0
    ret = list()
    if isinstance(node, Node):
        file_entity = File
    else:
        file_entity = version_class(File)

    paths = [
        t[0] for t in node.files.with_entities(file_entity.path).filter_by(
            filetype=u"attachment")
    ]

    if len(paths) == 1 and os.path.isdir(
            config.get("paths.datadir") + paths[0]):
        # single file with no path
        path = paths[0]
    elif len(paths) > 0:
        # some single files
        files = []
        for path in paths:
            file = {}
            if not os.path.isdir(config.get("paths.datadir") + path):  # file
                file["mimetype"], file["type"] = getMimeType(
                    config.get("paths.datadir") + path)
                icon = fileicons.get(file["mimetype"])
                if not icon:
                    icon = fileicons["other"]

                file["icon"] = icon
                file["path"] = path
                file["name"] = os.path.basename(path)
                if os.path.exists(config.get("paths.datadir") + path):
                    size = os.path.getsize(config.get("paths.datadir") + path)
                else:
                    size = 0
                file["size"] = format_filesize(size)
                filesize += int(size)
                files.append(file)

        return files, filesize
    else:
        path = ""

    if path == "":
        # no attachment directory -> test for single file

        for f in node.files.filter(
                ~file_entity.filetype.in_(node.get_sys_filetypes())):
            file = {}
            file["mimetype"], file["type"] = getMimeType(f.getName())
            file["icon"] = fileicons[file["mimetype"]]
            file["path"] = f.path
            file["name"] = f.base_name
            file["size"] = format_filesize(f.size)
            filesize += f.size
            ret.append(file)
        return ret, filesize

    if not path.endswith("/") and not req.params.get("path",
                                                     "").startswith("/"):
        path += "/"
    path += req.params.get("path", "")

    if req.params.get("path", "") != "":
        file = {}
        file["type"] = "back"
        file["mimetype"] = "back"
        file["icon"] = fileicons[file["mimetype"]]
        file["name"] = ".."
        file["path"] = req.params.get("path", "")
        file["req_path"] = req.params.get(
            "path", "")[:req.params.get("path", "").rfind("/")]
        ret.append(file)

    for name in os.listdir(config.settings["paths.datadir"] + path + "/"):

        if name.endswith(".thumb") or name.endswith(".thumb2"):
            continue
        file = {}

        file_path = os.path.join(config.settings["paths.datadir"] + path, name)
        if os.path.isdir(file_path):
            # directory
            file["type"] = "dir"
            file["mimetype"] = "directory"
        else:
            # file
            file["mimetype"], file["type"] = getMimeType(name)
            file["size"] = format_filesize(os.path.getsize(file_path))
            filesize += os.path.getsize(file_path)

        file["icon"] = fileicons[file["mimetype"]]
        file["path"] = os.path.join(path, name)
        file["name"] = name
        file["req_path"] = req.params.get("path", "") + "/" + file["name"]
        ret.append(file)

    return ret, format_filesize(filesize)
Ejemplo n.º 27
0
def export_db():
    """ Export data from the DB to disk.

    This command is run as a last step before wiping the DB after an event, to export
    all the data we want to save. It saves a private and a public export to the
    exports directory.

    Model classes should implement get_export_data, which returns a dict with keys:
        public   Public data to save in git
        private  Private data that should be stored for a limited amount of time
        tables   Tables this method exported, used to sanity check the export process

    Alternatively, add __export_data__ = False to a class to state that get_export_data
    shouldn't be called, and that its associated table doesn't need to be checked.
    """

    # As we go, we check against the list of all tables, in case we forget about some
    # new object type (e.g. association table).

    # Exclude tables we know will never be exported
    ignore = ["alembic_version", "transaction"]

    all_model_classes = {
        cls
        for cls in db.Model._decl_class_registry.values()
        if isinstance(cls, type) and issubclass(cls, db.Model)
    }

    all_version_classes = {
        version_class(c)
        for c in all_model_classes if is_versioned(c)
    }

    seen_model_classes = set()
    remaining_tables = set(db.metadata.tables)

    year = datetime.utcnow().year
    path = os.path.join("exports", str(year))
    for dirname in ["public", "private"]:
        os.makedirs(os.path.join(path, dirname), exist_ok=True)

    for model_class in all_model_classes:
        if model_class in seen_model_classes:
            continue

        seen_model_classes.add(model_class)

        table = model_class.__table__.name
        model = model_class.__name__

        if table in ignore:
            app.logger.debug("Ignoring %s", model)
            remaining_tables.remove(table)
            continue

        if not getattr(model_class, "__export_data__", True):
            # We don't remove the version table, as we want
            # to be explicit about chucking away edit stats
            app.logger.debug("Skipping %s", model)
            remaining_tables.remove(table)
            continue

        if model_class in all_version_classes:
            # Version tables are explicitly dumped by their parents,
            # as they don't make sense to be exported on their own
            app.logger.debug("Ignoring version model %s", model)
            continue

        if hasattr(model_class, "get_export_data"):
            try:
                export = model_class.get_export_data()
                for dirname in ["public", "private"]:
                    if dirname in export:
                        filename = os.path.join(path, dirname,
                                                "{}.json".format(model))
                        simplejson.dump(
                            export[dirname],
                            open(filename, "w"),
                            indent=4,
                            cls=ExportEncoder,
                        )
                        app.logger.info("Exported data from %s to %s", model,
                                        filename)

            except Exception as e:
                app.logger.error("Error exporting %s", model)
                raise

            exported_tables = export.get("tables", [table])
            remaining_tables -= set(exported_tables)

    if remaining_tables:
        app.logger.warning("Remaining tables: %s", ", ".join(remaining_tables))

    data = {
        "timestamp": datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
        "remaining_tables": sorted(list(remaining_tables)),
    }
    filename = os.path.join(path, "export.json")
    simplejson.dump(data, open(filename, "w"), indent=4, cls=ExportEncoder)

    with app.test_client() as client:
        for schedule in ["schedule.frab", "schedule.json", "schedule.ics"]:
            resp = client.get("/{}".format(schedule))
            with open(os.path.join(path, "public", schedule), "wb") as f:
                f.write(resp.data)

    app.logger.info("Export complete, summary written to %s", filename)
Ejemplo n.º 28
0
    def get_export_data(cls):
        if cls.__name__ == "Payment":
            # Export stats for each payment type separately
            return {}

        purchase_counts = (
            cls.query.outerjoin(cls.purchases)
            .group_by(cls.id)
            .with_entities(func.count(Ticket.id))
        )
        refund_counts = (
            cls.query.outerjoin(cls.refunds)
            .group_by(cls.id)
            .with_entities(func.count(Refund.id))
        )

        cls_version = version_class(cls)
        cls_transaction = transaction_class(cls)
        changes = cls.query.join(cls.versions).group_by(cls.id)
        change_counts = changes.with_entities(func.count(cls_version.id))
        first_changes = (
            changes.join(cls_version.transaction)
            .with_entities(func.min(cls_transaction.issued_at).label("created"))
            .from_self()
        )

        cls_ver_new = aliased(cls.versions)
        cls_ver_paid = aliased(cls.versions)
        cls_txn_new = aliased(cls_version.transaction)
        cls_txn_paid = aliased(cls_version.transaction)
        active_time = func.max(cls_txn_paid.issued_at) - func.max(cls_txn_new.issued_at)
        active_times = (
            cls.query.join(cls_ver_new, cls_ver_new.id == cls.id)
            .join(cls_ver_paid, cls_ver_paid.id == cls.id)
            .join(cls_txn_new, cls_txn_new.id == cls_ver_new.transaction_id)
            .join(cls_txn_paid, cls_txn_paid.id == cls_ver_paid.transaction_id)
            .filter(cls_ver_new.state == "new")
            .filter(cls_ver_paid.state == "paid")
            .with_entities(active_time.label("active_time"))
            .group_by(cls.id)
        )

        time_buckets = [timedelta(0), timedelta(minutes=1), timedelta(hours=1)] + [
            timedelta(d)
            for d in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 28, 60]
        ]

        data = {
            "public": {
                "payments": {
                    "counts": {
                        "purchases": bucketise(
                            purchase_counts, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20]
                        ),
                        "refunds": bucketise(refund_counts, [0, 1, 2, 3, 4]),
                        "changes": bucketise(change_counts, range(10)),
                        "created_week": export_intervals(
                            first_changes, column("created"), "week", "YYYY-MM-DD"
                        ),
                        "active_time": bucketise(
                            [r.active_time for r in active_times], time_buckets
                        ),
                        "amounts": bucketise(
                            cls.query.with_entities(cls.amount_int / 100),
                            [0, 10, 20, 30, 40, 50, 100, 150, 200],
                        ),
                    }
                }
            },
            "tables": ["payment", "payment_version"],
        }

        count_attrs = ["state", "reminder_sent", "currency"]
        data["public"]["payments"]["counts"].update(
            export_attr_counts(cls, count_attrs)
        )

        return data