Exemple #1
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 7: reset all blob mimetypes '
        'to nominal USE_MAGIC value'
        )
    if magic:
        objectmap = find_objectmap(root)
        if objectmap is not None:
            oids = objectmap.get_extent(get_dotted_name(File))
            if oids is not None:
                for oid in oids:
                    f = objectmap.object_for(oid)
                    try:
                        if f.get_size():
                            blob = f.blob
                            fp = blob.open('r')
                            for chunk in chunks(fp):
                                m = magic.Magic(mime=True)
                                mimetype = m.from_buffer(chunk)
                                f.mimetype = mimetype
                                break
                    except POSKeyError:
                        logger.error(
                            'Missing blob for file %s, overwriting with '
                            'empty blob' % resource_path(f)
                            )
                        f.blob = Blob()
                        f.mimetype = 'application/octet-stream'
Exemple #2
0
def update_catalogs_evolve(root, registry):
    # commit first to be sure deferred indexing actions coming from previous
    # evolve steps are executed before we modify catalogs' indexes
    transaction.commit()
    # code taken from substanced/catalog/subscribers.py:on_startup
    request = get_current_request()
    request.root = root  # needed when executing the step via sd_evolve script
    objectmap = find_objectmap(root)
    if objectmap is not None:
        content = registry.content
        factory_type = content.factory_type_for_content_type('Catalog')
        oids = objectmap.get_extent(factory_type)
        for oid in oids:
            catalog = objectmap.object_for(oid)
            if catalog is not None:
                try:
                    catalog.update_indexes(
                        registry=registry,
                        reindex=True
                        )
                except ComponentLookupError:
                    # could not find a catalog factory
                    pass

    log.info('catalogs updated and new indexes reindexed')
Exemple #3
0
    def move_finish(self):
        request = self.request
        context = self.context
        objectmap = find_objectmap(context)
        tomove = request.session['tomove']
        del request.session['tomove']

        num_moved = 0

        try:
            for oid in tomove:
                obj = objectmap.object_for(oid)
                moved = self.move_here_if_addable(obj)
                if moved:
                    num_moved += 1
        except FolderKeyError as e:
            self.request.sdiapi.flash(e.args[0], 'danger')
            raise self.get_redirect_response()

        ungettext = request.localizer.pluralize
        msg = ungettext('Moved ${num} item',
                        'Moved ${num} items',
                        num_moved,
                        domain="substanced",
                        mapping=dict(num=num_moved))
        request.sdiapi.flash_with_undo(msg, 'success')
        return self.get_redirect_response()
Exemple #4
0
def evolve(root, registry):
    logger.info('Running substanced evolve step 7: reset all blob mimetypes '
                'to nominal USE_MAGIC value')
    if magic:
        objectmap = find_objectmap(root)
        if objectmap is not None:
            oids = objectmap.get_extent(get_dotted_name(File))
            if oids is not None:
                for oid in oids:
                    f = objectmap.object_for(oid)
                    try:
                        if f.get_size():
                            blob = f.blob
                            fp = blob.open('r')
                            for chunk in chunks(fp):
                                m = magic.Magic(mime=True)
                                mimetype = m.from_buffer(chunk)
                                f.mimetype = mimetype
                                break
                    except POSKeyError:
                        logger.error(
                            'Missing blob for file %s, overwriting with '
                            'empty blob' % resource_path(f))
                        f.blob = Blob()
                        f.mimetype = 'application/octet-stream'
Exemple #5
0
    def copy_finish(self):
        request = self.request
        context = self.context
        objectmap = find_objectmap(context)
        tocopy = request.session['tocopy']
        del request.session['tocopy']

        num_copied = 0

        try:
            for oid in tocopy:
                obj = objectmap.object_for(oid)
                copied = self.move_here_if_addable(obj, copy=True)
                if copied:
                    num_copied += 1
        except FolderKeyError as e:
            self.request.sdiapi.flash(e.args[0], 'danger')
            raise self.get_redirect_response()

        if num_copied == 0:
            msg = 'No items copied'
            request.sdiapi.flash(msg, 'warning')
        elif num_copied == 1:
            msg = 'Copied 1 item'
            request.sdiapi.flash_with_undo(msg, 'success')
        else:
            msg = 'Copied %s items' % num_copied
            request.sdiapi.flash_with_undo(msg, 'success')

        return self.get_redirect_response()
Exemple #6
0
    def move_finish(self):
        request = self.request
        context = self.context
        objectmap = find_objectmap(context)
        tomove = request.session['tomove']
        del request.session['tomove']

        num_moved = 0

        try:
            for oid in tomove:
                obj = objectmap.object_for(oid)
                moved = self.move_here_if_addable(obj)
                if moved:
                    num_moved += 1
        except FolderKeyError as e:
            self.request.session.flash(e.args[0], 'error')
            raise self.get_redirect_response()

        if num_moved == 0:
            msg = 'No items moved'
            request.session.flash(msg)
        elif num_moved == 1:
            msg = 'Moved 1 item'
            request.sdiapi.flash_with_undo(msg)
        else:
            msg = 'Moved %s items' % num_moved
            request.sdiapi.flash_with_undo(msg)

        return self.get_redirect_response()
Exemple #7
0
    def move_finish(self):
        request = self.request
        context = self.context
        objectmap = find_objectmap(context)
        tomove = request.session['tomove']
        del request.session['tomove']

        num_moved = 0

        try:
            for oid in tomove:
                obj = objectmap.object_for(oid)
                moved = self.move_here_if_addable(obj)
                if moved:
                    num_moved += 1
        except FolderKeyError as e:
            self.request.session.flash(e.args[0], 'error')
            raise self.get_redirect_response()

        if num_moved == 0:
            msg = 'No items moved'
            request.session.flash(msg)
        elif num_moved == 1:
            msg = 'Moved 1 item'
            request.sdiapi.flash_with_undo(msg)
        else:
            msg = 'Moved %s items' % num_moved
            request.sdiapi.flash_with_undo(msg)

        return self.get_redirect_response()
Exemple #8
0
    def move_finish(self):
        request = self.request
        context = self.context
        objectmap = find_objectmap(context)
        tomove = request.session['tomove']
        del request.session['tomove']

        num_moved = 0

        try:
            for oid in tomove:
                obj = objectmap.object_for(oid)
                moved = self.move_here_if_addable(obj)
                if moved:
                    num_moved += 1
        except FolderKeyError as e:
            self.request.sdiapi.flash(e.args[0], 'danger')
            raise self.get_redirect_response()

        ungettext = request.localizer.pluralize
        msg = ungettext('Moved ${num} item',
                        'Moved ${num} items',
                         num_moved,
                         domain="substanced",
                         mapping=dict(num=num_moved))
        request.sdiapi.flash_with_undo(msg, 'success')
        return self.get_redirect_response()
Exemple #9
0
def main():
    parser = OptionParser(description=__doc__)
    parser.add_option('-d', '--dry-run', dest='dry_run',
        action="store_true", default=False,
        help="Don't commit the transactions")
    parser.add_option('-i', '--interval', dest='commit_interval',
        action="store", default=3000,
        help="Commit every N transactions")
    parser.add_option('-p', '--path', dest='path',
        action="store", default=None, metavar='EXPR',
        help="Reindex only objects whose path matches a regular expression")
    parser.add_option('-n', '--index', dest='indexes',
        action="append", help="Reindex only the given index (can be repeated)")
    parser.add_option('-c', '--catalog', dest='catalog_specs', action="append",
        help=("Reindex only the catalog provided (may be a path or a name "
              "and may be specified multiple times)"))

    options, args = parser.parse_args()

    if args:
        config_uri = args[0]
    else:
        parser.error("Requires a config_uri as an argument")

    commit_interval = int(options.commit_interval)
    if options.path:
        path_re = re.compile(options.path)
    else:
        path_re = None

    kw = {}
    if options.indexes:
        kw['indexes'] = options.indexes

    setup_logging(config_uri)
    env = bootstrap(config_uri)
    site = env['root']
    registry = env['registry']

    kw['registry'] = registry

    objectmap = find_objectmap(site)

    catalog_oids = objectmap.get_extent(get_dotted_name(Catalog))

    for oid in catalog_oids:

        catalog = objectmap.object_for(oid)

        path = resource_path(catalog)

        if options.catalog_specs:

            if ( (not path in options.catalog_specs) and 
                 (not catalog.__name__ in options.catalog_specs) ):
                    continue

        catalog.reindex(path_re=path_re, commit_interval=commit_interval,
                        dry_run=options.dry_run, **kw)
Exemple #10
0
 def __init__(self, origin, object_type, state=None):
     self._origin = origin
     self._state = state or []
     self._origin_iter = None
     self._finished = False
     objectmap = find_objectmap(get_current_request().root)
     self.resolver = objectmap.object_for
     self.object_type = object_type
Exemple #11
0
def main(argv=sys.argv):
    def usage(msg):
        print (msg)
        sys.exit(2)
    description = "Handle new lyric retimings as they are made."
    parser = optparse.OptionParser(
        "usage: %prog config_uri",
        description=description
    )
    opts, args = parser.parse_args(argv[1:])
    try:
        config_uri = args[0]
    except KeyError:
        usage('Requires a config_uri as an argument')

    setup_logging(config_uri)
    env = bootstrap(config_uri)
    root = env['root']
    redis = get_redis(env['request'])
    objectmap = find_objectmap(root)
    while True:
        logger.info('Waiting for another retiming')
        oidandtime = redis.blpop('yss.new-retimings', 0)[1] # blocking pop
        oidandtime = oidandtime.decode('utf-8')
        try:
            oid, enqueued = oidandtime.rsplit('|', 1)
        except ValueError:
            oid = int(oidandtime)
            enqueued = time.time()
        else:
            oid = int(oid)
            enqueued = float(enqueued)
        time.sleep(1)
        transaction.abort()
        song = objectmap.object_for(oid)
        if song is None:
            logger.warning(f'Could not find {oid}')

        else:
            progress_key = f'retimeprogress-{oid}'
            try:
                if not bool(song.retiming_blob):
                    # not committed yet
                    redis.rpush('yss.new-retimings', oidandtime)
                else:
                    retime(song, redis, env)
            except SystemExit:
                redis.persist(progress_key) # clear only on good
                redis.rpush('yss.new-retimings', oidandtime)
                raise
            except:
                redis.hmset(
                    progress_key,
                    {'pct':-1, 'status':'Retiming failed; unexpected error'}
                )
                redis.persist(progress_key) # clear only on good
                redis.rpush('yss.new-retimings', oidandtime)
                raise
Exemple #12
0
def treesetify_catalog_pathindexes(root):  # pragma: no cover
    # to avoid having huge pickles
    objectmap = find_objectmap(root)

    index_oids = objectmap.get_extent(get_dotted_name(PathIndex))

    for oid in index_oids:
        pathindex = objectmap.object_for(oid)
        pathindex._not_indexed = objectmap.family.IF.TreeSet(pathindex._not_indexed)
Exemple #13
0
def treesetify_catalog_pathindexes(root):  # pragma: no cover
    # to avoid having huge pickles
    objectmap = find_objectmap(root)

    index_oids = objectmap.get_extent(get_dotted_name(PathIndex))

    for oid in index_oids:
        pathindex = objectmap.object_for(oid)
        pathindex._not_indexed = objectmap.family.IF.TreeSet(
            pathindex._not_indexed)
Exemple #14
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 4: add an extentmap to the objectmap')
    objectmap = find_objectmap(root)
    if objectmap is not None:
        objectmap.extentmap = ExtentMap()
    for oid in objectmap.objectid_to_path:
        obj = objectmap.object_for(oid, root)
        logger.info('Adding oid %s to extentmap' % oid)
        if obj is not None:
            objectmap.extentmap.add(obj, oid)
Exemple #15
0
def evolve(root, registry):
    logger.info(
        'Running substanced evolve step 6: files should not have USE_MAGIC '
        'as a mimetype or any other non-string value')
    objectmap = find_objectmap(root)
    if objectmap is not None:
        oids = objectmap.get_extent(get_dotted_name(File))
        if oids is not None:
            for oid in oids:
                f = objectmap.object_for(oid)
                if not type(f.mimetype) in string_types:
                    f.mimetype = 'application/octet-stream'
Exemple #16
0
def evolve(root):
    logger.info("Running substanced evolve step 2: add PRINCIPAL_TO_ACL_BEARING " "relationships")
    objectmap = find_objectmap(root)
    if objectmap is None:
        return
    for obj in postorder(root):
        logger.info("Substanced evolve step 2: trying %s" % (obj,))
        acl = getattr(obj, "__acl__", _marker)
        if acl is _marker:
            continue
        for princid in _referenceable_principals(acl):
            objectmap.connect(princid, obj, PrincipalToACLBearing)
Exemple #17
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 4: add an extentmap to the objectmap'
        )
    objectmap = find_objectmap(root)
    if objectmap is not None:
        objectmap.extentmap = ExtentMap()
    for oid in objectmap.objectid_to_path:
        obj = objectmap.object_for(oid, root)
        logger.info('Adding oid %s to extentmap' % oid)
        if obj is not None:
            objectmap.extentmap.add(obj, oid)
Exemple #18
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 6: files should not have USE_MAGIC '
        'as a mimetype or any other non-string value'
        )
    objectmap = find_objectmap(root)
    if objectmap is not None:
        oids = objectmap.get_extent(get_dotted_name(File))
        if oids is not None:
            for oid in oids:
                f = objectmap.object_for(oid)
                if not type(f.mimetype) in string_types:
                    f.mimetype = 'application/octet-stream'
Exemple #19
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 9: reindex all allowed indices '
        'due to change in discriminator principal repr')

    site = root

    objectmap = find_objectmap(site)

    index_oids = objectmap.get_extent(get_dotted_name(AllowedIndex))

    for oid in index_oids:
        index = objectmap.object_for(oid)
        catalog = find_interface(index, Catalog)
        catalog.reindex(indexes=(index.__name__, ))
Exemple #20
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 5: remove None as default for index '
        'action mode (MODE_ATCOMMIT should be implicit default)')
    objectmap = find_objectmap(root)
    if objectmap is not None:
        oids = objectmap.get_extent(get_dotted_name(Catalog))
        for oid in oids:
            catalog = objectmap.object_for(oid)
            if catalog is not None:
                for index in catalog.values():
                    # wake up index via getattr first
                    if (index.action_mode is None
                            and 'action_mode' in index.__dict__):
                        del index.action_mode
Exemple #21
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 9: reindex all allowed indices '
        'due to change in discriminator principal repr'
        )

    site = root

    objectmap = find_objectmap(site)

    index_oids = objectmap.get_extent(get_dotted_name(AllowedIndex))

    for oid in index_oids:
        index = objectmap.object_for(oid)
        catalog = find_interface(index, Catalog)
        catalog.reindex(indexes=(index.__name__,))
Exemple #22
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 5: remove None as default for index '
        'action mode (MODE_ATCOMMIT should be implicit default)'
        )
    objectmap = find_objectmap(root)
    if objectmap is not None:
        oids = objectmap.get_extent(get_dotted_name(Catalog))
        for oid in oids:
            catalog = objectmap.object_for(oid)
            if catalog is not None:
                for index in catalog.values():
                    # wake up index via getattr first
                    if (index.action_mode is None and
                        'action_mode' in index.__dict__):
                        del index.action_mode
Exemple #23
0
def evolve(root):
    logger.info(
        'Running substanced evolve step 2: add PRINCIPAL_TO_ACL_BEARING '
        'relationships')
    objectmap = find_objectmap(root)
    if objectmap is None:
        return
    for obj in postorder(root):
        logger.info('Substanced evolve step 2: trying %s' % (obj, ))
        acl = getattr(obj, '__acl__', _marker)
        if acl is _marker:
            continue
        for princid in _referenceable_principals(acl):
            objectmap.connect(
                princid,
                obj,
                PrincipalToACLBearing,
            )
Exemple #24
0
def ideas_folder_columns(folder, subobject, request, default_columnspec):
    subobject_name = getattr(subobject, '__name__', str(subobject))
    objectmap = find_objectmap(folder)

    #user_oid = getattr(subobject, '__creator__', None)
    created = getattr(subobject, '__created__', None)
    modified = getattr(subobject, '__modified__', None)
    #if user_oid is not None:
    #    user = objectmap.object_for(user_oid)
    #    user_name = getattr(user, '__name__', 'anonymous')
    #else:
    #    user_name = 'anonymous'
    if created is not None:
        created = created.isoformat()
    if modified is not None:
        modified = modified.isoformat()
    return default_columnspec + [
        {
            'name': 'Title',
            'field': 'title',
            'value': getattr(subobject, 'title', ''),
            'sortable': True,
            'formatter': 'icon_label_url',
        },
        {
            'name': 'Created',
            'field': 'created',
            'value': created,
            'sortable': True,
            'formatter': 'date',
        },
        #         {'name': 'Last edited',
        #         'field': 'modified',
        #         'value': modified,
        #         'sortable': True,
        #         'formatter': 'date',
        #         },
        {
            'name': 'Author',
            'field': 'author',
            'value': getattr(subobject, 'author', ''),
            'sortable': True,
        }
    ]
Exemple #25
0
def ideas_folder_columns(folder, subobject, request, default_columnspec):
    subobject_name = getattr(subobject, '__name__', str(subobject))
    objectmap = find_objectmap(folder)
    
    #user_oid = getattr(subobject, '__creator__', None)
    created = getattr(subobject, '__created__', None)
    modified = getattr(subobject, '__modified__', None)
    #if user_oid is not None:
    #    user = objectmap.object_for(user_oid)
    #    user_name = getattr(user, '__name__', 'anonymous')
    #else:
    #    user_name = 'anonymous'
    if created is not None:
        created = created.isoformat()
    if modified is not None:
        modified = modified.isoformat()
    return default_columnspec + [
        {'name': 'Title',
        'field': 'title',
        'value': getattr(subobject, 'title', ''),
        'sortable': True,
        'formatter': 'icon_label_url',
        },
        {'name': 'Created',
        'field': 'created',
        'value': created,
        'sortable': True,
        'formatter': 'date',
        },
#         {'name': 'Last edited',
#         'field': 'modified',
#         'value': modified,
#         'sortable': True,
#         'formatter': 'date',
#         },
        {'name': 'Author',
        'field': 'author',
        'value': getattr(subobject, 'author', ''),
        'sortable': True,
        }
        ]
Exemple #26
0
def binder_columns(folder, subobject, request, default_columnspec):
    subobject_name = getattr(subobject, '__name__', str(subobject))
    objectmap = find_objectmap(folder)
    user_oid = getattr(subobject, '__creator__', None)
    created = getattr(subobject, '__created__', None)
    modified = getattr(subobject, '__modified__', None)
    if user_oid is not None:
        user = objectmap.object_for(user_oid)
        user_name = getattr(user, '__name__', 'anonymous')
    else:
        user_name = 'anonymous'
    if created is not None:
        created = created.isoformat()
    if modified is not None:
        modified = modified.isoformat()

    def make_sorter(index_name):
        def sorter(folder, resultset, limit=None, reverse=False):
            index = find_index(folder, 'sdidemo', index_name)
            if index is None:
                return resultset
            return resultset.sort(index, limit=limit, reverse=reverse)

        return sorter

    return default_columnspec + [
        {
            'name': 'Title',
            'value': getattr(subobject, 'title', subobject_name),
            'sorter': make_sorter('title'),
        }, {
            'name': 'Modified Date',
            'value': modified,
            'sorter': make_sorter('modified'),
            'formatter': 'date',
        }, {
            'name': 'Creator',
            'value': user_name,
        }
    ]
Exemple #27
0
def update_catalogs_evolve(root, registry):
    # commit first to be sure deferred indexing actions coming from previous
    # evolve steps are executed before we modify catalogs' indexes
    transaction.commit()
    # code taken from substanced/catalog/subscribers.py:on_startup
    request = get_current_request()
    request.root = root  # needed when executing the step via sd_evolve script
    objectmap = find_objectmap(root)
    if objectmap is not None:
        content = registry.content
        factory_type = content.factory_type_for_content_type('Catalog')
        oids = objectmap.get_extent(factory_type)
        for oid in oids:
            catalog = objectmap.object_for(oid)
            if catalog is not None:
                try:
                    catalog.update_indexes(registry=registry, reindex=True)
                except ComponentLookupError:
                    # could not find a catalog factory
                    pass

    log.info('catalogs updated and new indexes reindexed')
Exemple #28
0
def binder_columns(folder, subobject, request, default_columnspec):
    subobject_name = getattr(subobject, '__name__', str(subobject))
    objectmap = find_objectmap(folder)
    user_oid = getattr(subobject, '__creator__', None)
    created = getattr(subobject, '__created__', None)
    modified = getattr(subobject, '__modified__', None)
    if user_oid is not None:
        user = objectmap.object_for(user_oid)
        user_name = getattr(user, '__name__', 'anonymous')
    else:
        user_name = 'anonymous'
    if created is not None:
        created = created.isoformat()
    if modified is not None:
        modified = modified.isoformat()

    def make_sorter(index_name):
        def sorter(folder, resultset, limit=None, reverse=False):
            index = find_index(folder, 'sdidemo', index_name)
            if index is None:
                return resultset
            return resultset.sort(index, limit=limit, reverse=reverse)
        return sorter

    return default_columnspec + [
        {'name': 'Title',
        'value': getattr(subobject, 'title', subobject_name),
        'sorter': make_sorter('title'),
        },
        {'name': 'Modified Date',
        'value': modified,
        'sorter':make_sorter('modified'),
        'formatter': 'date',
        },
        {'name': 'Creator',
        'value': user_name,
        }
        ]
Exemple #29
0
    def _folder_contents(
            self,
            start=None,
            end=None,
            reverse=None,
            sort_column_name=None,
            filter_values=(),
    ):
        """
        Returns a dictionary containing:

        ``length``

          The folder's length (ie. `len(folder)`)
          
        ``records``

          A sequence of dictionaries that represent the folder's subobjects.
          The sequence is implemented as a generator.  Each dictionary in the
          ``records`` sequence reflects information about a single subobject in
          the folder, and will have the following keys:

          ``name``

            The name of the subobject.

          ``url``

            The URL to the subobject.  This will be
            ``/path/to/subob/@@manage_main``.

          ``columns``

            The column values obtained from this subobject's attributes, as
            defined by the ``columns`` content-type hook (or the default
            columns, if no hook was supplied).
          
        ``sort_column_name``

          The crrent sort_column_name

        ``sort_reverse``

          True if the current sort should be reversed.

        ``columns``

          A sequence of column header values.
        
        XXX TODO Document ``sort_column_name``, ``reverse``, and
        ``filter_values`` arguments.  Document ``columns`` return value.
        """
        folder = self.context
        request = self.request
        objectmap = find_objectmap(folder)

        if start is None:
            start = 0

        if end is None:
            end = start + self.minimum_load

        q = self.get_query()

        columns = self.get_columns(None)

        for name, value in filter_values:
            if name:
                for col in columns:
                    if col['name'] == name:
                        filt = col.get('filter')
                        if filt is not None:
                            q = filt(folder, value, q)
            else:
                q = self._global_text_filter(folder, value, q)

        resultset = q.execute()
        # NB: must take snapshot of folder_length before limiting the length
        # of the resultset via any sort
        folder_length = len(resultset)

        sort_info = self._sort_info(
            columns,
            sort_column_name=sort_column_name,
        )

        sorter = sort_info['sorter']
        sort_column_name = sort_info['column_name']
        if reverse is None:
            reverse = False
            column = sort_info['column']
            if column:
                reverse = column.get('initial_sort_reverse', False)

        if sorter is not None:
            resultset = sorter(folder, resultset, reverse=reverse, limit=end)

        ids = resultset.ids

        buttons = self.get_buttons()
        show_checkbox_column = self.show_checkbox_column(
            buttons, columns, resultset)

        records = []

        for oid in itertools.islice(ids, start, end):
            resource = objectmap.object_for(oid)
            name = getattr(resource, '__name__', '')
            record = dict(
                # Use the unique name as an id.  (A unique row id is needed
                # for slickgrid.  In addition, we will pass back this same id
                # from the client, when a row is selected for an operation.)
                id=name,
                name=name,
            )
            cols = self.get_columns(resource)
            for col in cols:
                # XXX CM: adding arbitrary keys to the record based on
                # configuration input is a bad idea here because we can't
                # guarantee a column name won't override the "reserved" names
                # (name, id) added to the record above.  Ree?
                cname = col['name']
                record[cname] = col['value']
            disable = []
            for button_group in buttons:
                for button in button_group['buttons']:
                    if 'enabled_for' not in button:
                        continue
                    condition = button['enabled_for']
                    if not callable(condition):
                        continue
                    if not condition(folder, resource, request):
                        disable.append(button['id'])
            record['disable'] = disable
            records.append(record)

        return {
            'length': folder_length,
            'records': records,
            'sort_column_name': sort_column_name,
            'sort_reverse': reverse,
            'columns': columns,
            'show_checkbox_column': show_checkbox_column,
        }
Exemple #30
0
 def __init__(self, origin, object_type):
     super(ResolverLazyList, self).__init__(origin, state=None)
     objectmap = find_objectmap(get_current_request().root)
     self.resolver = objectmap.object_for
     self.object_type = object_type
Exemple #31
0
    def _folder_contents(
        self,
        start=None,
        end=None,
        reverse=None,
        sort_column_name=None,
        filter_values=(),
        ):

        """
        Returns a dictionary containing:

        ``length``

          The folder's length (ie. `len(folder)`)
          
        ``records``

          A sequence of dictionaries that represent the folder's subobjects.
          The sequence is implemented as a generator.  Each dictionary in the
          ``records`` sequence reflects information about a single subobject in
          the folder, and will have the following keys:

          ``name``

            The name of the subobject.

          ``url``

            The URL to the subobject.  This will be
            ``/path/to/subob/@@manage_main``.

          ``columns``

            The column values obtained from this subobject's attributes, as
            defined by the ``columns`` content-type hook (or the default
            columns, if no hook was supplied).
          
        ``sort_column_name``

          The crrent sort_column_name

        ``sort_reverse``

          True if the current sort should be reversed.

        ``columns``

          A sequence of column header values.
        
        XXX TODO Document ``sort_column_name``, ``reverse``, and
        ``filter_values`` arguments.  Document ``columns`` return value.
        """
        folder = self.context
        request = self.request
        objectmap = find_objectmap(folder)

        if start is None:
            start = 0

        if end is None:
            end = start + 40

        q = self.get_query()

        columns = self.get_columns(None)
        
        for name, value in filter_values:
            if name:
                for col in columns:
                    if col['name'] == name:
                        filt = col.get('filter')
                        if filt is not None:
                            q = filt(folder, value, q)
            else:
                q = self._global_text_filter(folder, value, q)

        resultset = q.execute()
        # NB: must take snapshot of folder_length before limiting the length
        # of the resultset via any sort
        folder_length = len(resultset)

        sort_info = self._sort_info(
            columns,
            sort_column_name=sort_column_name,
            )

        sorter = sort_info['sorter']
        sort_column_name = sort_info['column_name']
        if reverse is None:
            reverse = False
            column = sort_info['column']
            if column:
                reverse = column.get('initial_sort_reverse', False)

        if sorter is not None:
            resultset = sorter(
                folder, resultset, reverse=reverse, limit=end
                )

        ids = resultset.ids

        buttons = self.get_buttons()
        show_checkbox_column = self.show_checkbox_column(
            buttons, columns, resultset)

        records = []

        for oid in itertools.islice(ids, start, end):
            resource = objectmap.object_for(oid)
            name = getattr(resource, '__name__', '')
            record = dict(
                # Use the unique name as an id.  (A unique row id is needed
                # for slickgrid.  In addition, we will pass back this same id
                # from the client, when a row is selected for an operation.)
                id=name,
                name=name,
                )
            cols = self.get_columns(resource)
            for col in cols:
                # XXX CM: adding arbitrary keys to the record based on
                # configuration input is a bad idea here because we can't
                # guarantee a column name won't override the "reserved" names
                # (name, id) added to the record above.  Ree?
                cname = col['name']
                record[cname] = col['value']
            disable = []
            for button_group in buttons:
                for button in button_group['buttons']:
                    if 'enabled_for' not in button:
                        continue
                    condition = button['enabled_for']
                    if not callable(condition):
                        continue
                    if not condition(folder, resource, request):
                        disable.append(button['id'])
            record['disable'] = disable
            records.append(record)

        return {
            'length':folder_length,
            'records':records,
            'sort_column_name':sort_column_name,
            'sort_reverse':reverse,
            'columns':columns,
            'show_checkbox_column':show_checkbox_column,
            }
Exemple #32
0
def main(argv=sys.argv):
    def usage(msg):
        print (msg)
        sys.exit(2)
    description = "Mix new recordings as they are made."
    parser = optparse.OptionParser(
        "usage: %prog config_uri",
        description=description
    )
    opts, args = parser.parse_args(argv[1:])
    try:
        config_uri = args[0]
    except KeyError:
        usage('Requires a config_uri as an argument')

    setup_logging(config_uri)
    env = bootstrap(config_uri)
    root = env['root']
    redis = get_redis(env['request'])
    objectmap = find_objectmap(root)
    while True:
        logger.info('Waiting for another recording')
        oidandtime = redis.blpop('yss.new-recordings', 0)[1] # blocking pop
        oidandtime = oidandtime.decode('utf-8')
        try:
            oid, enqueued = oidandtime.rsplit('|', 1)
        except ValueError:
            oid = int(oidandtime)
            enqueued = time.time()
        else:
            oid = int(oid)
            enqueued = float(enqueued)
        logger.info(f'Received request for {oid}')
        time.sleep(0.25)
        transaction.abort()
        recording = objectmap.object_for(oid)
        if recording is None:
            logger.warning(f'Could not find {oid}')
        else:
            try:
                if not bool(recording.dry_blob):
                    logger.warning(f'not committed yet: {recording.__name__}')
                    redis.rpush('yss.new-recordings', oidandtime)
                else:
                    logger.info(f'Processing {oid} enqueued at {enqueued}')
                    postprocess(recording, redis, env)
                    end = time.time()
                    logger.info(
                        f'Time from enqeue-to-done for {oid}: {end-enqueued}')
            except UnrecoverableError:
                logger.warning(
                    f'Unrecoverable error when processing {oid}',
                    exc_info=True
                )
                redis.hmset(
                    recording.mixprogress_key,
                    {'pct':-1, 'status':'Unrecoverable error'}
                )
            except:
                logger.warning(
                    f'Unexpected error when processing {oid}',
                    exc_info=True
                )
                redis.hmset(
                    recording.mixprogress_key,
                    {'pct':-1, 'status':'Mix failed; unexpected error'}
                )
                redis.persist(recording.mixprogress_key) # clear only on good
                redis.rpush('yss.new-recordings', oidandtime)
                raise
Exemple #33
0
    def update(self):
        if self.request.POST and 'login_form.submitted' in self.request.POST:
            log_result = self.login()
            if not isinstance(log_result, dict):
                return log_result

        self.execute(None)
        site = get_site_folder(True)
        self.title = site.title
        site_id = get_oid(site)
        user = get_current()
        folders = find_entities(
            interfaces=[ISmartFolder],
            metadata_filter={'states': ['published']},
            force_local_control=True)
        my_folders = []
        if self.request.user:
            my_folders = getattr(user, 'folders', [])
            my_folders = [folder for folder in my_folders
                          if isinstance(folder, SmartFolder) and
                          not folder.parents and
                          'private' in folder.state]

        folders = [folder for folder in folders
                   if not folder.parents and
                   getattr(folder, 'add_as_a_block', False)]
        folders.extend(my_folders)
        foldersdata = []
        old_date = datetime.datetime.now(tz=pytz.UTC) - datetime.timedelta(
            days=getattr(site, 'days_visibility', DEFAULT_DAYS_VISIBILITY))
        old_date = old_date.replace(tzinfo=pytz.UTC)
        lac_catalog = find_catalog('lac')
        release_date_index = lac_catalog['release_date']
        query = release_date_index.ge(old_date)
        content_types = getattr(site, 'home_content_types',
                                ['review', 'cinema_review',
                                 'brief', 'interview'])
        for folder in folders:
            all_folders = [folder]
            all_folders.extend(folder.all_sub_folders('published'))
            contents_oids = set()
            for sub_folder in all_folders:
                result_set = get_folder_content(
                    sub_folder, user,
                    sort_on='release_date',
                    reverse=True,
                    limit=MORE_NB,
                    add_query=query,
                    metadata_filter={'content_types': content_types,
                                     'states': ['published']}
                    )
                contents_oids |= set(result_set.ids)

            if contents_oids:
                contents_oids = release_date_index.sort(
                    contents_oids, reverse=True, limit=MORE_NB)
                objectmap = find_objectmap(get_current_request().root)
                resolver = objectmap.object_for
                contents = [resolver(oid) for oid in contents_oids]
                foldersdata.append({'folder': folder,
                                    'contents': contents,
                                    'order': folder.get_order(site_id)})

        foldersdata = sorted(foldersdata, key=lambda e: e['order'])
        result = {}
        values = {'folders': foldersdata,
                  'content_types': content_types,
                  'row_len': math.ceil(len(foldersdata)/2)}
        body = self.content(args=values, template=self.template)['body']
        item = self.adapt_item(body, self.viewid)
        result['coordinates'] = {self.coordinates: [item]}
        result = merge_dicts(self.requirements_copy, result)
        return result
Exemple #34
0
    def update(self):
        if self.request.POST and 'login_form.submitted' in self.request.POST:
            log_result = self.login()
            if not isinstance(log_result, dict):
                return log_result

        self.execute(None)
        site = get_site_folder(True)
        self.title = site.title
        site_id = get_oid(site)
        user = get_current()
        folders = find_entities(interfaces=[ISmartFolder],
                                metadata_filter={'states': ['published']},
                                force_local_control=True)
        my_folders = []
        if self.request.user:
            my_folders = getattr(user, 'folders', [])
            my_folders = [
                folder for folder in my_folders
                if isinstance(folder, SmartFolder) and not folder.parents
                and 'private' in folder.state
            ]

        folders = [
            folder for folder in folders
            if not folder.parents and getattr(folder, 'add_as_a_block', False)
        ]
        folders.extend(my_folders)
        foldersdata = []
        old_date = datetime.datetime.now(tz=pytz.UTC) - datetime.timedelta(
            days=getattr(site, 'days_visibility', DEFAULT_DAYS_VISIBILITY))
        old_date = old_date.replace(tzinfo=pytz.UTC)
        lac_catalog = find_catalog('lac')
        release_date_index = lac_catalog['release_date']
        query = release_date_index.ge(old_date)
        content_types = getattr(
            site, 'home_content_types',
            ['review', 'cinema_review', 'brief', 'interview'])
        for folder in folders:
            all_folders = [folder]
            all_folders.extend(folder.all_sub_folders('published'))
            contents_oids = set()
            for sub_folder in all_folders:
                result_set = get_folder_content(sub_folder,
                                                user,
                                                sort_on='release_date',
                                                reverse=True,
                                                limit=MORE_NB,
                                                add_query=query,
                                                metadata_filter={
                                                    'content_types':
                                                    content_types,
                                                    'states': ['published']
                                                })
                contents_oids |= set(result_set.ids)

            if contents_oids:
                contents_oids = release_date_index.sort(contents_oids,
                                                        reverse=True,
                                                        limit=MORE_NB)
                objectmap = find_objectmap(get_current_request().root)
                resolver = objectmap.object_for
                contents = [resolver(oid) for oid in contents_oids]
                foldersdata.append({
                    'folder': folder,
                    'contents': contents,
                    'order': folder.get_order(site_id)
                })

        foldersdata = sorted(foldersdata, key=lambda e: e['order'])
        result = {}
        values = {
            'folders': foldersdata,
            'content_types': content_types,
            'row_len': math.ceil(len(foldersdata) / 2)
        }
        body = self.content(args=values, template=self.template)['body']
        item = self.adapt_item(body, self.viewid)
        result['coordinates'] = {self.coordinates: [item]}
        result = merge_dicts(self.requirements_copy, result)
        return result
Exemple #35
0
 def objectmap(self):
     return find_objectmap(self)
Exemple #36
0
 def objectmap(self):
     return find_objectmap(self)