Example #1
0
 def update(self):
     srch = self.request.params.get('search', '')
     if srch:
         self.users = list(Session.query(User)
             .filter(User.email.contains('%%%s%%'%srch)).order_by(asc('id')).all())
     elif self.request.params.get('showall', ''):
         self.users = list(Session.query(User).order_by(asc('id')).all())
Example #2
0
 def get_accounts_and_loans(self):
     if not self.accounts_and_loans:
         self.accounts_and_loans = AccountsTable.query.filter(AccountsTable.user == self.user_id)\
         .filter(AccountsTable.balance != 0)\
         .outerjoin((UsersTable, AccountsTable.name == UsersTable.id))\
         .add_columns(UsersTable.name, UsersTable.slug)\
         .order_by(asc(AccountsTable.type)).order_by(asc(AccountsTable.id))
     return self.accounts_and_loans
Example #3
0
 def _get_by_disabled_from_db(context, disabled):
     if disabled:
         return context.session.query(api_models.CellMapping).filter_by(
             disabled=true()).order_by(asc(api_models.CellMapping.id)).all()
     else:
         return context.session.query(api_models.CellMapping).filter_by(
             disabled=false()).order_by(asc(
                 api_models.CellMapping.id)).all()
Example #4
0
def get_stat(context, request):
    user_name = None
    if hasattr(request, 'cookies') and 'sk' in request.cookies.keys() and 'sk' in request.session and \
                    request.session['sk'] == request.cookies['sk'] and 'u_name' in request.session:
        user_name = request.session['u_name']

    session = DBSession()
    uiks_from_db = session.query(Uik, Uik.point.x, Uik.point.y) \
        .join('geocoding_precision') \
        .join('tik') \
        .join('region')

    clauses = []
    if request.POST:
        if exist_filter_parameter('geocoding_precision', request):
            clauses.append(Uik.geocoding_precision_id == request.POST['geocoding_precision'])
        if exist_filter_parameter('is_applied', request):
            clauses.append(Uik.is_applied == (request.POST['is_applied'] == 'True'))
        if exist_filter_parameter('number_official', request):
            clauses.append(Uik.number_official == request.POST['number_official'])
        if exist_filter_parameter('region', request):
            clauses.append(Uik.region_id == int(request.POST['region']))
        if exist_filter_parameter('place_voting', request):
            clauses.append(Uik.place_voting.ilike('%' + request.POST['place_voting'].encode('UTF-8').strip() + '%'))
        if exist_filter_parameter('tik', request):
            clauses.append(Uik.tik_id == int(request.POST['tik']))
        if exist_filter_parameter('user_id', request):
            user_uiks_subq = (session.query(distinct(UikVersions.uik_id).label("uik_id"))
                              .filter(UikVersions.user_id == int(request.POST['user_id']))) \
                              .subquery()
            uiks_from_db = uiks_from_db.join(user_uiks_subq, and_(Uik.id == user_uiks_subq.c.uik_id))

    uiks_from_db = uiks_from_db.filter(*clauses)

    if 'jtSorting' in request.params:
        sort = request.params['jtSorting']
        sort = sort.split(' ')
        if sort[1] == 'ASC':
            uiks_from_db = uiks_from_db.order_by(asc(get_sort_param(sort[0])))
        if sort[1] == 'DESC':
            uiks_from_db = uiks_from_db.order_by(desc(get_sort_param(sort[0])))
    else:
        uiks_from_db = uiks_from_db.order_by(asc(Uik.number_official))

    count = uiks_from_db.count()

    uiks_from_db = uiks_from_db.offset(request.params['jtStartIndex']) \
        .limit(request.params['jtPageSize']) \
        .all()

    records = [create_uik_stat(uik) for uik in uiks_from_db]
    session.close()

    return Response(json.dumps({
        'Result': 'OK',
        'Records': records,
        'TotalRecordCount': count
    }), content_type='application/json')
def flavor_get_by_flavor_id(context, flavor_id, read_deleted):
    """Returns a dict describing specific flavor_id."""
    result = _flavor_get_query(context, read_deleted=read_deleted).\
                        filter_by(flavorid=flavor_id).\
                        order_by(asc("deleted"), asc("id")).\
                        first()
    if not result:
        raise Exception("plop")
    return _dict_with_extra_specs(result)
Example #6
0
    def jobs(self, _id=None, category_id=None, page=0, per_page=None, sort='category_id.asc', include_runs=False):
        session = self.session
        categories = list(self.categories())

        query = session.query(Job)

        if _id is not None:
            query = query.filter_by(id=_id)

        if category_id is not None:
            query = query.filter_by(category_id=category_id)

        field, order = sort.split('.', 1)
        if order == 'asc':
            query = query.order_by(asc(getattr(Job, field)))
            if field != 'id':
                query = query.order_by(asc(Job.id))
        elif order == 'desc':
            query = query.order_by(desc(getattr(Job, field)))
            if field != 'id':
                query = query.order_by(asc(Job.id))

        if per_page is not None:
            query = query.limit(per_page)

        if page > 0:
            query = query.offset(page * per_page)

        for job in query:
            data = {
                'id': job.id,
                'category_id': job.category_id,
                'name': job.name,
                'interval': job.interval,
                'next_run': job.next_run,
                'last_run_start': job.last_run_start,
                'last_run_stop': job.last_run_stop,
                'last_run_status': job.last_run_status,
                'last_run_text': job.last_run_text(),
                'current_status': job.current_status or 'none',
                'routing_key': job.routing_key,
                'command': job.command,
                'locked_by': job.locked_by,
            }
            if include_runs:
                data['runs'] = chunks_to_runs(self.last_event_chunks_for_job(job.id, 20))
            category = filter(lambda c: c['id'] == job.category_id, categories)
            if len(category) == 1:
                data['category'] = category[0]
            yield data
Example #7
0
    def list_episode(self, page, count, sort_field, sort_order, status):
        try:

            session = SessionManager.Session()
            query_object = session.query(Episode).\
                filter(Episode.delete_mark == None)

            if status is not None:
                query_object = query_object.filter(Episode.status==status)
                # count total rows
                total = session.query(func.count(Episode.id)).filter(Episode.status==status).scalar()
            else:
                total = session.query(func.count(Episode.id)).scalar()

            offset = (page - 1) * count

            if sort_order == 'desc':
                episode_list = query_object.\
                    order_by(desc(getattr(Episode, sort_field))).\
                    offset(offset).\
                    limit(count).\
                    all()
            else:
                episode_list = query_object.\
                    order_by(asc(getattr(Episode, sort_field))).\
                    offset(offset).limit(count).\
                    all()

            episode_dict_list = [row2dict(episode, Episode) for episode in episode_list]

            return json_resp({'data': episode_dict_list, 'total': total})
        finally:
            SessionManager.Session.remove()
Example #8
0
    def set_ip(self, I):

        ok_ip = self.db2.query(IPPool).filter_by(
            instance_id = None ).order_by(asc(IPPool.id)).first()

        if not ok_ip: return

        nic_config = {
            'type': 'networkpool', # TODO: show use global flag
            'mac': I.mac,
            'ip': ok_ip.ip,
            'netmask': ok_ip.network.netmask,
            'gateway': ok_ip.network.gateway
            }

        try:
            I.set_network( nic_config, 1 )
            ok_ip.instance_id = I.id
            ok_ip.updated = datetime.now()

            T = self.lytrace(
                ttype = LY_TARGET['IP'], tid = ok_ip.id,
                do = self.trans(_('get ip %(ip)s for instance %(instance_id)s(%(instance_name)s)')) % {
                    'ip': ok_ip.ip, 'instance_id': I.id, 'instance_name': I.name } )

            self.db2.commit()
        except Exception, e:
            logging.error('set_ip(): %s' % e)
Example #9
0
    def page_view_my_appliances(self):

        catalog_id = int( self.get_argument('c', 1) )
        page_size = int( self.get_argument('sepa', 10) )
        cur_page = int( self.get_argument('p', 1) )
        by = self.get_argument('by', 'id')
        sort = self.get_argument('sort', 'ASC')

        by_exp = desc(by) if sort == 'DESC' else asc(by)
        start = (cur_page - 1) * page_size
        stop = start + page_size

        apps = self.db2.query(Appliance).filter_by(
            catalog_id=catalog_id).filter_by(
            user_id=self.current_user.id).order_by(by_exp)

        total = apps.count()
        apps = apps.slice(start, stop)
            
        catalogs = self.db2.query(ApplianceCatalog).all()
        for c in catalogs:
            c.total = self.db2.query(Appliance.id).filter_by( catalog_id = c.id ).count()

        pagination = Pagination(
            total = total, page_size = page_size, cur_page = cur_page )

        page_html = pagination.html( self.get_page_url )

        return apps, page_html
Example #10
0
    def get(self, id):

        user = self.db2.query(User).get(id)

        if not user:
            return self.write('Have not found user by id %s' % id)

        by = self.get_argument('by', 'updated')
        sort = self.get_argument('sort', 'desc')

        if by == 'created':
            by_obj = Instance.created
        elif by == 'updated':
            by_obj = Instance.updated
        else:
            by_obj = Instance.id

        sort_by_obj = desc(by_obj) if sort == 'desc' else asc(by_obj)

        instances = self.db2.query(Instance).filter_by(
            user_id = id ).filter(
            Instance.isprivate != True ).order_by( sort_by_obj )

        total = instances.count()
        instances = instances.slice(0, 20).all() # TODO: show only

        d = { 'title': self.trans(_('View User %s')) % user.username,
              'USER': user, 'INSTANCE_LIST': instances,
              'TOTAL_INSTANCE': total }

        self.render( 'account/view_user.html', **d )
Example #11
0
def setup_orm():
    tables = meta.metadata.tables
    columns = tables['group_mailing_list_messages'].c
    orm.mapper(GroupMailingListMessage,
               tables['group_mailing_list_messages'],
               inherits=ContentItem,
               polymorphic_identity='mailing_list_message',
               polymorphic_on=tables['content_items'].c.content_type,
               properties = {
                             'reply_to': relation(GroupMailingListMessage,
                                                  backref=backref('replies'),
                                                  foreign_keys=(columns.reply_to_message_machine_id),
                                                  primaryjoin=columns.id == columns.reply_to_message_machine_id,
                                                  remote_side=(columns.id)),
                             'thread': relation(GroupMailingListMessage,
                                                post_update=True,
                                                order_by=[asc(columns.sent)],
                                                backref=backref('posts'),
                                                foreign_keys=(columns.thread_message_machine_id),
                                                primaryjoin=columns.id == columns.thread_message_machine_id,
                                                remote_side=(columns.id)),
                             'author': relation(User,
                                                backref=backref('messages')),
                             'group': relation(Group,
                                               primaryjoin=(columns.group_id == tables['groups'].c.id)),
                             'attachments': synonym("files")
                             })
Example #12
0
 def categories(self):
     session = self.session
     for category in session.query(Category).order_by(asc(Category.id)):
         yield {
             'id': category.id,
             'name': category.name,
         }
    def get_entity_events(self, stored_entity_id, after=None, until=None, limit=None, query_ascending=True,
                          results_ascending=True):
        try:
            query = self.db_session.query(SqlStoredEvent)
            query = query.filter_by(stored_entity_id=stored_entity_id)
            if query_ascending:
                query = query.order_by(asc(SqlStoredEvent.timestamp_long))
            else:
                query = query.order_by(desc(SqlStoredEvent.timestamp_long))

            if after is not None:
                if query_ascending:
                    query = query.filter(SqlStoredEvent.timestamp_long > timestamp_long_from_uuid(after))
                else:
                    query = query.filter(SqlStoredEvent.timestamp_long >= timestamp_long_from_uuid(after))

            if until is not None:
                if query_ascending:
                    query = query.filter(SqlStoredEvent.timestamp_long <= timestamp_long_from_uuid(until))
                else:
                    query = query.filter(SqlStoredEvent.timestamp_long < timestamp_long_from_uuid(until))

            if limit is not None:
                query = query.limit(limit)
            events = self.map(from_sql, query)
            events = list(events)
        finally:
            self.db_session.close()
        if results_ascending != query_ascending:
            events.reverse()
        return events
Example #14
0
    def get(self):

        catalog_id = self.get_argument_int('c', 1)
        page_size = self.get_argument_int('sepa', 20)
        cur_page = self.get_argument_int('p', 1)
        by = self.get_argument('by', 'updated')
        sort = self.get_argument('sort', 'DESC')

        by_exp = desc(by) if sort == 'DESC' else asc(by)
        start = (cur_page - 1) * page_size
        stop = start + page_size

        apps = self.db2.query(Appliance).filter_by(
            catalog_id=catalog_id).filter_by(
            isprivate=False).order_by(by_exp)

        total = apps.count()
        apps = apps.slice(start, stop)
            
        catalogs = self.db2.query(ApplianceCatalog).all()
        for c in catalogs:
            c.total = self.db2.query(Appliance.id).filter_by(
                catalog_id = c.id ).filter_by(
                isprivate=False).count()

        page_html = pagination(self.request.uri, total,  page_size, cur_page)

        d = { 'title': self.title,
              'appliances': apps,
              'catalogs': catalogs,
              'cur_catalog': catalog_id,
              'page_html': page_html }

        self.render("appliance/index.html", **d)
Example #15
0
    def get(self):

        catalog_id = int( self.get_argument('c', 1) )
        page_size = int( self.get_argument('sepa', 10) )
        cur_page = int( self.get_argument('p', 1) )
        by = self.get_argument('by', 'updated')
        sort = self.get_argument('sort', 'DESC')

        by_exp = desc(by) if sort == 'DESC' else asc(by)
        start = (cur_page - 1) * page_size
        stop = start + page_size

        apps = self.db2.query(Appliance).filter_by(
            catalog_id=catalog_id).order_by(by_exp).slice(start, stop)
            
        catalogs = self.db2.query(ApplianceCatalog).all()
        for c in catalogs:
            c.total = self.db2.query(Appliance.id).filter_by( catalog_id = c.id ).count()

        pagination = Pagination(
            total = self.db2.query(Appliance.id).count(),
            page_size = page_size, cur_page = cur_page )

        page_html = pagination.html( self.get_page_url )

        d = { 'title': "Appliance Home",
              'appliances': apps,
              'catalogs': catalogs,
              'cur_catalog': catalog_id,
              'page_html': page_html }

        self.render("appliance/index.html", **d)
Example #16
0
    def _addSortingToMoleculesQuery(self,
                                    sorts,
                                    scanid,
                                    q,
                                    fragal,
                                    assign_q):
        for col3 in sorts:
            if col3['property'] == 'assigned':
                col2 = assign_q.c.assigned
            elif (col3['property'] == 'score'):
                if (scanid is not None):
                    col2 = fragal.score
                else:
                    raise ScanRequiredError()
            elif (col3['property'] == 'deltappm'):
                if (scanid is not None):
                    col2 = fragal.deltappm
                else:
                    raise ScanRequiredError()
            elif (col3['property'] == 'mz'):
                if (scanid is not None):
                    col2 = fragal.mz
                else:
                    raise ScanRequiredError()
            else:
                cprop = col3['property']
                col2 = Molecule.__dict__[cprop]  # @UndefinedVariable
            if (col3['direction'] == 'DESC'):
                q = q.order_by(desc(col2))
            elif (col3['direction'] == 'ASC'):
                q = q.order_by(asc(col2))

        return q
Example #17
0
 def order_query(self, query, domain_class, kw, reverse):
     if kw.get("sort_on", None) and hasattr(domain_class, str(kw.get("sort_on"))):
         if reverse:
             return query.order_by(expression.desc(getattr(domain_class, str(kw.get("sort_on")))))
         else:
             return query.order_by(expression.asc(getattr(domain_class, str(kw.get("sort_on")))))
     return query
Example #18
0
    def sorting(self):
        """Construct the query, by adding sorting(ORDER BY) on the columns needed to be applied on
        """
        sorting = []

        Order = namedtuple('order', ['name', 'dir'])

        if self.request_values.get('iSortCol_0') \
            and self.request_values.get('iSortingCols') > 0:

            for i in range(int(self.request_values['iSortingCols'])):
                sorting.append(Order( self.columns[int(self.request_values['iSortCol_'+str(i)])].column_name,
                                      self.request_values['sSortDir_'+str(i)]))

        for sort in sorting:
            tmp_sort_name = sort.name.split('.')
            obj = getattr(self.sqla_object, tmp_sort_name[0])
            if isinstance(obj.property, RelationshipProperty): # Ex: ForeignKey
            # Ex: address.description => description => addresses.description
                sort_name = "".join(tmp_sort_name[1:])
                if not sort_name:
                    # Find first piramry key
                    sort_name = obj.property.table.primary_key.columns \
                        .values()[0].name
                tablename = obj.property.table.name
            else: #-> ColumnProperty
                sort_name = sort.name
                tablename = self.sqla_object.__tablename__
            sort_name = "%s.%s" % (tablename, sort_name)
            self.query = self.query.order_by(
                asc(sort_name) if sort.dir == 'asc' else desc(sort_name))
Example #19
0
def tile_exists(target_url, bg_color, title, type, image_uri, enhanced_image_uri, locale, conn=None, *args, **kwargs):
    """
    Return the id of a tile having the data provided
    """
    from splice.environment import Environment
    env = Environment.instance()

    if conn is not None:
        sm = sessionmaker(bind=conn)
        session = sm()
    else:
        session = env.db.session

    # we add order_by in the query although it shouldn't be necessary
    # this is because of a previous bug where duplicate tiles could be created
    results = (
        session
        .query(Tile.id)
        .filter(Tile.target_url == target_url)
        .filter(Tile.bg_color == bg_color)
        .filter(Tile.title == title)
        .filter(Tile.image_uri == image_uri)
        .filter(Tile.enhanced_image_uri == enhanced_image_uri)
        .filter(Tile.locale == locale)
        .order_by(asc(Tile.id))
        .first()
    )

    if results:
        return results[0]

    return results
Example #20
0
    def get_tasks_for_client(self, client, sort_on='modified'):
        """Return a sqlachemy query of all task on the specified client.
        """

        sort_on = getattr(Task, sort_on)
        return Session().query(Task).filter(Task.client_id == client
                                            ).order_by(asc(sort_on)).all()
Example #21
0
    def get_post_votes(self):

        page_size = self.get_argument_int('ps', 20)
        cur_page = self.get_argument_int('p', 1)
        by = self.get_argument('by', 'created')
        order = self.get_argument_int('order', 1)

        start = (cur_page - 1) * page_size
        stop = start + page_size

        VOTE_LIST = self.db.query(ForumPostVote)

        if by not in ['id', 'created', 'updated']:
            by = 'id'

        total = VOTE_LIST.count()

        sort_by_obj = desc(by) if order else asc(by)
        VOTE_LIST = VOTE_LIST.order_by(
            sort_by_obj).slice(start, stop).all()

        page_html = pagination(
            self.request.uri, total, page_size, cur_page )

        d = { 'title': _('Forum Post Vote History'),
              'VOTE_LIST': VOTE_LIST,
              'VOTE_TOTAL': total,
              'page_html': page_html }

        self.render('forum/index_post_votes.html', **d)
Example #22
0
def construct_query(table, query_tree):

    select_list = query_tree.get("select")

    q = select(col_list(table, select_list))

    distinct = query_tree.get("distinct")

    if distinct:
        q = q.distinct()

    raw_where = query_tree.get("where")

    if raw_where:
        q = q.where(construct_clauses(table, raw_where))

    order_by = query_tree.get("order_by")

    if order_by:
        for order_col in order_by:
            col_name = order_col["col"]
            type = order_col["type"]
            col = get_col(table, col_name)

            if type == "asc" or type == None:
                q = q.order_by(asc(col))
            elif type == "desc":
                q = q.order_by(desc(col))

    limit = query_tree.get("limit")
    if limit:
        q = q.limit(limit)

    return q
Example #23
0
def prune_keep_record(job_id, keep, storage):
    event = storage.session.query(Event).filter_by(job_id=job_id).\
        order_by(asc(Event.id)).limit(1).first()

    min_id = None
    if event is not None:
        min_id = event.id

    events = storage.session.query(Event).filter_by(job_id=job_id).\
        order_by(desc(Event.id)).limit(keep)
    event_ids = [e.id for e in events]
    if len(event_ids) == 0:
        logger.info('No events for {0}'.format(job_id))
        return

    max_id = min(event_ids)
    if min_id == max_id:
        logger.info('Min and max event ids for {0} are the same: {1} - {2}'.format(  # noqa
            job_id, min_id, max_id))
        return

    if min_id > max_id:
        logger.info('Min event id for {0} is larger than max event id: {1} - {2}'.format(  # noqa
            job_id, min_id, max_id))
        return

    logger.info('Job ID {0}, Pruning events {1} - {2}'.format(
        job_id, min_id, max_id))

    stmt = Event.__table__.delete()\
                          .where(between(Event.id, min_id, max_id))\
                          .where(Event.job_id == job_id)
    storage._engine.execute(stmt)
    storage.session.commit()
def test4():
    url = 'sqlite://'
    dbinfo = open_db(url, 
                        object_number_in_cache = 3000,
                        #~ memmap_path = './memmap1',
                        #~ memmap_path = None,
                        )
    
    import pickle
    treedescription = TreeDescription(
                            dbinfo = dbinfo,
                            table_children = { 
                                                    'Block' : ['Segment' ],
                                                    'Segment' : [ 'AnalogSignal'],
                                                    },
                            columns_to_show = { },
                            table_on_top = 'Block',
                            table_order = { 'Block' : desc('name'),
                                                    'Segment' : asc('name'),
                                                    #~ 'Segment' : desc('name'),
                                                    } ,
                            )
    session = dbinfo.Session()
    session.query('Block')
    for k,v in treedescription.__dict__.items():
        print k, v
    treedescription2 = pickle.loads(pickle.dumps(treedescription))
    print treedescription2.name
    print treedescription2.table_order
    for k,v in treedescription2.table_order.items():
        print k, str(v)
def test5():
    """
    Context menu
    """
    dbinfo = create_and_open()
    session = dbinfo.Session()
    #fill_a_db(session,dbinfo.mapped_classes)
    
    
    from OpenElectrophy.gui.contextmenu import context_menu
    
    treedescription = TreeDescription(
                            dbinfo = dbinfo,
                            table_children = { 
                                                    'Block' : ['Segment' ],
                                                    'Segment' : [ 'AnalogSignal'],
                                                    },
                            columns_to_show = { },
                            table_on_top = 'Block',
                            table_order = {'Block' : desc('name'),
                                                    'Segment' : asc('name'),
                                                    #~ 'segment' : desc('name'),
                                                    } ,
                            )
    app = QApplication([ ])
    w = QtSqlTreeView(session = session, treedescription = treedescription, context_menu = context_menu)
    w.show()
    sys.exit(app.exec_())
Example #26
0
def map_captimes_data(request):
    map_id = int(request.matchdict['id'])

    current_page = request.params.get('page', 1)

    try:
        mmap = DBSession.query(Map).filter_by(map_id=map_id).one()

        mct_q = DBSession.query(PlayerCaptime.fastest_cap, PlayerCaptime.create_dt,
                PlayerCaptime.player_id, PlayerCaptime.game_id,
                Game.server_id, Server.name.label('server_name'),
                PlayerGameStat.nick.label('player_nick')).\
                filter(PlayerCaptime.map_id==map_id).\
                filter(PlayerCaptime.game_id==Game.game_id).\
                filter(PlayerCaptime.map_id==Map.map_id).\
                filter(Game.server_id==Server.server_id).\
                filter(PlayerCaptime.player_id==PlayerGameStat.player_id).\
                filter(PlayerCaptime.game_id==PlayerGameStat.game_id).\
                order_by(expr.asc(PlayerCaptime.fastest_cap))

    except Exception as e:
        raise HTTPNotFound

    map_captimes = Page(mct_q, current_page, items_per_page=20, url=page_url)

    map_captimes.items = [MapCapTime(row) for row in map_captimes.items]

    return {
            'map_id':map_id,
            'map':mmap,
            'captimes':map_captimes,
        }
Example #27
0
    def try_to_get_phrase(template, phrase, url, portal_id=None, allow_html=''):

        a_filter = dict(template=template, name=phrase, portal_id=portal_id)

        # TODO: OZ by OZ: this functions exists because we sometemes inmsert recort in flashing process (see on_value_changed decorator)
        # and we can`t use ORM
        def insert_record(**values):
            from profapp import utils
            g.db().execute(('INSERT INTO "%s" (template,   name,    portal_id, allow_html,   url,  %s) '
                            'VALUES           (:template, :name, :portal_id, :allow_html, :url,  :%s)') %
                           (TranslateTemplate.__tablename__, ', '.join(TranslateTemplate.languages),
                            ", :".join(TranslateTemplate.languages)),
                           params=utils.dict_merge(a_filter, {'allow_html': allow_html, 'url': url},
                                                   {l: phrase for l in TranslateTemplate.languages}, values))
            return db(TranslateTemplate, **a_filter).first()

        exist = db(TranslateTemplate, **a_filter).first()

        if portal_id and not exist:
            exist_for_another = db(TranslateTemplate, template=template, name=phrase,
                                   portal_id=TranslateTemplate.exemplary_portal_id).first()
            # TODO: OZ by OZ: how to select template portal? now we grab phrases from most recent portal, and there can be some unappropriate values
            if not exist_for_another:
                exist_for_another = db(TranslateTemplate, template=template, name=phrase).filter(
                    TranslateTemplate.portal != None).order_by(expression.asc(TranslateTemplate.cr_tm)).first()
            if exist_for_another:
                return insert_record(**{l: getattr(exist_for_another, l) for l in TranslateTemplate.languages})
        if not exist:
            return insert_record()

        return exist
Example #28
0
def set_from_and_to_dates():
    try:
        index_str = 'CREATE INDEX ' +\
                    '"ix_GEMSData__ODM_SQL_SiteID_ODM_Param_asc" ON ' +\
                    '"tbl_TexasHIS_Vector_TWDB_ODM_Data" ("ODM_SQL_SiteID",' +\
                    '"ODM_Param_Date" ASC);'
        gems_engine.execute(index_str)
    except OperationalError:
        pass
    sites_query = gems_session.query(GEMSSite)
    site_count = sites_query.count()
    for index, site in enumerate(page_query(sites_query)):
        if index == 1 or index % 1000 == 0:
            print "setting from and to dates for %s of %s sites" % (
                index, site_count)

        if len(gems_session.dirty) > 1500:
            gems_session.commit()

        from_value = site.values.order_by(asc('ODM_Param_Date')).first()
        to_value = site.values.order_by(desc('ODM_Param_Date')).first()
        if from_value:
            site.ODM_Param_FromDate = from_value.ODM_Param_Date
            site.ODM_Param_ToDate = to_value.ODM_Param_Date
        else:
            # if for some reason there are no values for a site
            gems_session.delete(site)
    gems_session.commit()
Example #29
0
    def get_index(self):

        by = self.get_argument('by', 'id')
        order = self.get_argument('order', 'DESC')
        if ( order == 'DESC' ):
            order_func = desc( by )
        else:
            order_func = asc( by )

        page_size = int(self.get_argument('sepa', 10))
        cur_page = int(self.get_argument('p', 1))

        start = (cur_page - 1) * page_size
        stop = start + page_size

        JOB_LIST = self.db2.query(Job).order_by( order_func )
        JOB_LIST = JOB_LIST.slice(start, stop)


        JOB_TOTAL = self.db2.query(Job.id).count()


        page_html = Pagination(
            total = JOB_TOTAL, page_size = page_size,
            cur_page = cur_page ).html(self.get_page_url)

        d = { 'title': 'Jobs',
              'JOB_TOTAL': JOB_TOTAL,
              'JOB_LIST': JOB_LIST,
              'page_html': page_html }

        self.render('admin/job/index.html', **d)
Example #30
0
    def get(self):
        # These are seperate from post() so they are not all required
        # as they just define ways to search for items
        parser = reqparse.RequestParser()
        parser.add_argument('item_id', type=int)
        parser.add_argument('status', type=int)
        parser.add_argument('ascending', type=bool, default=False)
        parser.add_argument('event_id', type=int)
        args = parser.parse_args()
        
        actions_query = Action.query

        # By default newest items are returned first
        if args.ascending:
            actions_query = actions_query.order_by(asc(Action.date))
        else:
            actions_query = actions_query.order_by(desc(Action.date))

        if args.item_id != None:
            actions_query = actions_query.filter(Action.item_id == args.item_id)

        if args.status != None:
            actions_query = actions_query.filter(Action.status == args.status)

        if args.event_id != None:
            actions_query = actions_query.filter(Action.event_id == args.event_id)

        return actions_query.all()
Example #31
0
 def __query__(self):
     if self.order == self.DESCENDANT:
         query = desc(self.attribute)
     elif self.order == self.ASCENDANT:
         query = asc(self.attribute)
     return query
Example #32
0
    def list(self, status = ['active'], limit_offset = None, starts_with = None, search = None):

        db = get_session()

        # Make a list from string
        if not isinstance(status, (list, tuple)):
            status = [status]

        q = db.query(Movie) \
            .join(Movie.library, Library.titles) \
            .filter(LibraryTitle.default == True) \
            .filter(or_(*[Movie.status.has(identifier = s) for s in status])) \
            .group_by(Movie.id)

        total_count = q.count()

        filter_or = []
        if starts_with:
            starts_with = toUnicode(starts_with.lower())
            if starts_with in ascii_lowercase:
                filter_or.append(LibraryTitle.simple_title.startswith(starts_with))
            else:
                ignore = []
                for letter in ascii_lowercase:
                    ignore.append(LibraryTitle.simple_title.startswith(toUnicode(letter)))
                filter_or.append(not_(or_(*ignore)))

        if search:
            filter_or.append(LibraryTitle.simple_title.like('%%' + search + '%%'))

        if filter_or:
            q = q.filter(or_(*filter_or))

        q = q.order_by(asc(LibraryTitle.simple_title))

        q = q.subquery()
        q2 = db.query(Movie).join((q, q.c.id == Movie.id)) \
            .options(joinedload_all('releases')) \
            .options(joinedload_all('profile.types')) \
            .options(joinedload_all('library.titles')) \
            .options(joinedload_all('library.files')) \
            .options(joinedload_all('status')) \
            .options(joinedload_all('files'))

        if limit_offset:
            splt = [x.strip() for x in limit_offset.split(',')]
            limit = splt[0]
            offset = 0 if len(splt) is 1 else splt[1]
            q2 = q2.limit(limit).offset(offset)

        results = q2.all()
        movies = []
        for movie in results:
            temp = movie.to_dict({
                'profile': {'types': {}},
                'releases': {'files':{}, 'info': {}},
                'library': {'titles': {}, 'files':{}},
                'files': {},
            })
            movies.append(temp)

        #db.close()
        return (total_count, movies)
Example #33
0
 def _get_all_from_db(context):
     return context.session.query(api_db_models.CellMapping).order_by(
         expression.asc(api_db_models.CellMapping.id)).all()
Example #34
0
    def list(self,
             status=None,
             release_status=None,
             limit_offset=None,
             starts_with=None,
             search=None,
             order=None):

        db = get_session()

        # Make a list from string
        if status and not isinstance(status, (list, tuple)):
            status = [status]
        if release_status and not isinstance(release_status, (list, tuple)):
            release_status = [release_status]

        # query movie ids
        q = db.query(Movie) \
            .with_entities(Movie.id) \
            .group_by(Movie.id)

        # Filter on movie status
        if status and len(status) > 0:
            statuses = fireEvent('status.get', status, single=len(status) > 1)
            statuses = [s.get('id') for s in statuses]

            q = q.filter(Movie.status_id.in_(statuses))

        # Filter on release status
        if release_status and len(release_status) > 0:
            q = q.join(Movie.releases)

            statuses = fireEvent('status.get',
                                 release_status,
                                 single=len(release_status) > 1)
            statuses = [s.get('id') for s in statuses]

            q = q.filter(Release.status_id.in_(statuses))

        # Only join when searching / ordering
        if starts_with or search or order != 'release_order':
            q = q.join(Movie.library, Library.titles) \
                .filter(LibraryTitle.default == True)

        # Add search filters
        filter_or = []
        if starts_with:
            starts_with = toUnicode(starts_with.lower())
            if starts_with in ascii_lowercase:
                filter_or.append(
                    LibraryTitle.simple_title.startswith(starts_with))
            else:
                ignore = []
                for letter in ascii_lowercase:
                    ignore.append(
                        LibraryTitle.simple_title.startswith(
                            toUnicode(letter)))
                filter_or.append(not_(or_(*ignore)))

        if search:
            filter_or.append(
                LibraryTitle.simple_title.like('%%' + search + '%%'))

        if len(filter_or) > 0:
            q = q.filter(or_(*filter_or))

        total_count = q.count()
        if total_count == 0:
            return 0, []

        if order == 'release_order':
            q = q.order_by(desc(Release.last_edit))
        else:
            q = q.order_by(asc(LibraryTitle.simple_title))

        if limit_offset:
            splt = splitString(limit_offset) if isinstance(
                limit_offset, (str, unicode)) else limit_offset
            limit = splt[0]
            offset = 0 if len(splt) is 1 else splt[1]
            q = q.limit(limit).offset(offset)

        # Get all movie_ids in sorted order
        movie_ids = [m.id for m in q.all()]

        # List release statuses
        releases = db.query(Release) \
            .filter(Release.movie_id.in_(movie_ids)) \
            .all()

        release_statuses = dict((m, set()) for m in movie_ids)
        releases_count = dict((m, 0) for m in movie_ids)
        for release in releases:
            release_statuses[release.movie_id].add(
                '%d,%d' % (release.status_id, release.quality_id))
            releases_count[release.movie_id] += 1

        # Get main movie data
        q2 = db.query(Movie) \
            .options(joinedload_all('library.titles')) \
            .options(joinedload_all('library.files')) \
            .options(joinedload_all('status')) \
            .options(joinedload_all('files'))

        q2 = q2.filter(Movie.id.in_(movie_ids))

        results = q2.all()

        # Create dict by movie id
        movie_dict = {}
        for movie in results:
            movie_dict[movie.id] = movie

        # List movies based on movie_ids order
        movies = []
        for movie_id in movie_ids:

            releases = []
            for r in release_statuses.get(movie_id):
                x = splitString(r)
                releases.append({'status_id': x[0], 'quality_id': x[1]})

            # Merge releases with movie dict
            movies.append(
                mergeDicts(
                    movie_dict[movie_id].to_dict({
                        'library': {
                            'titles': {},
                            'files': {}
                        },
                        'files': {},
                    }), {
                        'releases': releases,
                        'releases_count': releases_count.get(movie_id),
                    }))

        db.expire_all()
        return total_count, movies
Example #35
0
    def get_index(self):

        view = self.get_argument('view', 'all')
        by = self.get_argument('by', 'id')
        sort = self.get_argument('sort', 'desc')
        status = self.get_argument('status', 'all')
        page_size = int(self.get_argument('sepa', 30))
        cur_page = int(self.get_argument('p', 1))
        uid = int(self.get_argument('uid', 0)) # sort by user
        aid = int(self.get_argument('aid', 0)) # sort by appliance

        start = (cur_page - 1) * page_size
        stop = start + page_size

        instances = self.db2.query(Instance)

        if status != 'all':
            if status == 'stoped':
                slist = settings.INSTANCE_SLIST_STOPED
            else: # show running
                slist = settings.INSTANCE_SLIST_RUNING
            instances = instances.filter(Instance.status.in_( slist))

        U = self.db2.query(User).get( uid )
        if U:
            instances = instances.filter_by( user_id = uid )

        APPLIANCE = self.db2.query(Appliance).get( aid )
        if APPLIANCE:
            instances = instances.filter_by( appliance_id = aid )

        if by == 'created':
            by_obj = Instance.created
        elif by == 'updated':
            by_obj = Instance.updated
        else:
            by_obj = Instance.id


        sort_by_obj = desc(by_obj) if sort == 'desc' else asc(by_obj)

        instances = instances.order_by( sort_by_obj )

        # TODO: may have a more highly active count ( use id )
        total = instances.count()

        instances = instances.slice(start, stop)

        if total > page_size:
            page_html = Pagination(
                total = total,
                page_size = page_size,
                cur_page = cur_page ).html(self.get_page_url)
        else:
            page_html = ""

        d = { 'title': _('Instance Management'),
              'INSTANCE_LIST': instances, 'TOTAL_INSTANCE': total,
              'PAGE_HTML': page_html,
              'SORT_USER': U, 'SORT_APPLIANCE': APPLIANCE }

        self.render( 'admin/instance/index.html', **d )
Example #36
0
    def getSoonView(self, limit_offset = None, random = False, late = False, **kwargs):

        db = get_session()
        now = time.time()

        # Get profiles first, determine pre or post theater
        profiles = fireEvent('profile.all', single = True)
        qualities = fireEvent('quality.all', single = True)
        pre_releases = fireEvent('quality.pre_releases', single = True)

        id_pre = {}
        for quality in qualities:
            id_pre[quality.get('id')] = quality.get('identifier') in pre_releases

        # See what the profile contain and cache it
        profile_pre = {}
        for profile in profiles:
            contains = {}
            for profile_type in profile.get('types', []):
                contains['theater' if id_pre.get(profile_type.get('quality_id')) else 'dvd'] = True

            profile_pre[profile.get('id')] = contains

        # Add limit
        limit = 12
        if limit_offset:
            splt = splitString(limit_offset) if isinstance(limit_offset, (str, unicode)) else limit_offset
            limit = tryInt(splt[0])

        # Get all active movies
        active_status, ignored_status = fireEvent('status.get', ['active', 'ignored'], single = True)
        q = db.query(Media) \
            .join(Library) \
            .outerjoin(Media.releases) \
            .filter(Media.status_id == active_status.get('id')) \
            .with_entities(Media.id, Media.profile_id, Library.info, Library.year) \
            .group_by(Media.id) \
            .filter(or_(Release.id == None, Release.status_id == ignored_status.get('id')))

        if not random:
            q = q.join(LibraryTitle) \
                .filter(LibraryTitle.default == True) \
                .order_by(asc(LibraryTitle.simple_title))

        active = q.all()
        movies = []
        now_year = date.today().year

        if len(active) > 0:

            # Do the shuffle
            if random:
                rndm.shuffle(active)

            movie_ids = []
            for movie in active:
                movie_id, profile_id, info, year = movie

                pp = profile_pre.get(profile_id)
                if not pp: continue

                eta = info.get('release_date', {}) or {}
                coming_soon = False

                # Theater quality
                if pp.get('theater') and fireEvent('movie.searcher.could_be_released', True, eta, year, single = True):
                    coming_soon = True
                elif pp.get('dvd') and fireEvent('movie.searcher.could_be_released', False, eta, year, single = True):
                    coming_soon = True

                if coming_soon:

                    # Don't list older movies
                    if ((not late and (year >= now_year-1) and (not eta.get('dvd') and not eta.get('theater') or eta.get('dvd') and eta.get('dvd') > (now - 2419200))) or
                            (late and ((year < now_year-1) or ((eta.get('dvd', 0) > 0 or eta.get('theater')) and eta.get('dvd') < (now - 2419200))))):
                        movie_ids.append(movie_id)

                        if len(movie_ids) >= limit:
                            break

            if len(movie_ids) > 0:

                # Get all movie information
                movies_raw = db.query(Media) \
                    .options(joinedload_all('library.titles')) \
                    .options(joinedload_all('library.files')) \
                    .options(joinedload_all('files')) \
                    .filter(Media.id.in_(movie_ids)) \
                    .all()

                # Create dict by movie id
                movie_dict = {}
                for movie in movies_raw:
                    movie_dict[movie.id] = movie

                for movie_id in movie_ids:
                    movies.append(movie_dict[movie_id].to_dict({
                        'library': {'titles': {}, 'files': {}},
                        'files': {},
                    }))

        return {
            'success': True,
            'empty': len(movies) == 0,
            'movies': movies,
        }
Example #37
0
def series_list():
    """Add series list to all pages under series"""
    return {
        'report': db_session.query(Series).order_by(asc(Series.name)).all()
    }
Example #38
0
File: sql.py Project: msryu2016/py
    def find(self,
             signature=None,
             order=0,
             since=None,
             until=None,
             offset=None,
             limit=None):
        """
        Retrieve all crash dumps in the database, optionally filtering them by
        signature and timestamp, and/or sorting them by timestamp.

        Results can be paged to avoid consuming too much memory if the database
        is large.

        @see: L{find_by_example}

        @type  signature: object
        @param signature: (Optional) Return only through crashes matching
            this signature. See L{Crash.signature} for more details.

        @type  order: int
        @param order: (Optional) Sort by timestamp.
            If C{== 0}, results are not sorted.
            If C{> 0}, results are sorted from older to newer.
            If C{< 0}, results are sorted from newer to older.

        @type  since: datetime
        @param since: (Optional) Return only the crashes after and
            including this date and time.

        @type  until: datetime
        @param until: (Optional) Return only the crashes before this date
            and time, not including it.

        @type  offset: int
        @param offset: (Optional) Skip the first I{offset} results.

        @type  limit: int
        @param limit: (Optional) Return at most I{limit} results.

        @rtype:  list(L{Crash})
        @return: List of Crash objects.
        """

        # Validate the parameters.
        if since and until and since > until:
            warnings.warn("CrashDAO.find() got the 'since' and 'until'"
                          " arguments reversed, corrected automatically.")
            since, until = until, since
        if limit is not None and not limit:
            warnings.warn("CrashDAO.find() was set a limit of 0 results,"
                          " returning without executing a query.")
            return []

        # Build the SQL query.
        query = self._session.query(CrashDTO)
        if signature is not None:
            sig_pickled = pickle.dumps(signature, protocol=0)
            query = query.filter(CrashDTO.signature == sig_pickled)
        if since:
            query = query.filter(CrashDTO.timestamp >= since)
        if until:
            query = query.filter(CrashDTO.timestamp < until)
        if order:
            if order > 0:
                query = query.order_by(asc(CrashDTO.timestamp))
            else:
                query = query.order_by(desc(CrashDTO.timestamp))
        else:
            # Default ordering is by row ID, to get consistent results.
            # Also some database engines require ordering when using offsets.
            query = query.order_by(asc(CrashDTO.id))
        if offset:
            query = query.offset(offset)
        if limit:
            query = query.limit(limit)

        # Execute the SQL query and convert the results.
        try:
            return [dto.toCrash() for dto in query.all()]
        except NoResultFound:
            return []
Example #39
0
    def sorting(self):
        """Construct the query: sorting.

        Add sorting(ORDER BY) on the columns needed to be applied on.
        """
        sorting = []

        Order = namedtuple('order', ['name', 'dir', 'nullsOrder'])

        if self.legacy:
            columnOrder = 'iSortCol_%s'
            dirOrder = 'sSortDir_%s'
        else:
            columnOrder = 'order[%s][column]'
            dirOrder = 'order[%s][dir]'

        i = 0
        if self.request_values.get(columnOrder % i) is not None:
            sorting.append(
                Order(
                    self.columns[int(self.request_values[columnOrder %
                                                         i])].column_name,
                    self.request_values[dirOrder % i], self.columns[int(
                        self.request_values[columnOrder % i])].nulls_order))

        for sort in sorting:
            tmp_sort_name = sort.name.split('.')
            for tmp_name in tmp_sort_name:
                # iterate over the list so we can support things like x.y.z.a
                if tmp_sort_name.index(tmp_name) == 0:
                    obj = getattr(self.sqla_object, tmp_name)
                    parent = self.sqla_object
                elif isinstance(obj.property, RelationshipProperty):
                    # otherwise try and see if we can percolate down the list
                    # for relationships of relationships.
                    parent = obj.property.mapper.class_
                    obj = getattr(parent, tmp_name)

                if not hasattr(obj, 'property'):  # hybrid_property or property
                    sort_name = tmp_name
                    if hasattr(parent, '__tablename__'):
                        tablename = parent.__tablename__
                    else:
                        tablename = parent.__table__.name
                # Ex: ForeignKey
                elif isinstance(obj.property, RelationshipProperty):
                    # Ex: address.description => description =>
                    # addresses.description
                    sort_name = tmp_name
                    if not sort_name:
                        # Find first primary key
                        sort_name = obj.property.table.primary_key.columns \
                            .values()[0].name
                    tablename = obj.property.table.name
                else:  # -> ColumnProperty
                    sort_name = tmp_name

                    if hasattr(parent, '__tablename__'):
                        tablename = parent.__tablename__
                    else:
                        tablename = parent.__table__.name

            sort_name = '%s.%s' % (tablename, sort_name)

            ordering = asc(text(sort_name)) if sort.dir == 'asc' else desc(
                text(sort_name))

            if sort.nullsOrder:
                ordering = nullsMethods[sort.nullsOrder](ordering)

            self.query = self.query.order_by(ordering)
Example #40
0
def get_processings_by_status(status,
                              period=None,
                              processing_ids=[],
                              locking=False,
                              locking_for_update=False,
                              bulk_size=None,
                              submitter=None,
                              to_json=False,
                              by_substatus=False,
                              only_return_id=False,
                              for_poller=False,
                              session=None):
    """
    Get processing or raise a NoObject exception.

    :param status: Processing status of list of processing status.
    :param period: Time period in seconds.
    :param locking: Whether to retrieve only unlocked items.
    :param bulk_size: bulk size limitation.
    :param submitter: The submitter name.
    :param to_json: return json format.

    :param session: The database session in use.

    :raises NoObject: If no processing is founded.

    :returns: Processings.
    """

    try:
        if status:
            if not isinstance(status, (list, tuple)):
                status = [status]
            if len(status) == 1:
                status = [status[0], status[0]]

        if only_return_id:
            query = session.query(models.Processing.processing_id)
        else:
            query = session.query(models.Processing)

        if status:
            if by_substatus:
                query = query.filter(models.Processing.substatus.in_(status))
            else:
                query = query.filter(models.Processing.status.in_(status))
            query = query.filter(
                models.Processing.next_poll_at <= datetime.datetime.utcnow())

        if processing_ids:
            query = query.filter(
                models.Processing.processing_id.in_(processing_ids))
        # if period:
        #     query = query.filter(models.Processing.updated_at < datetime.datetime.utcnow() - datetime.timedelta(seconds=period))
        if locking:
            query = query.filter(
                models.Processing.locking == ProcessingLocking.Idle)
        if submitter:
            query = query.filter(models.Processing.submitter == submitter)

        if for_poller:
            query = query.order_by(asc(models.Processing.poller_updated_at))
        elif locking_for_update:
            query = query.with_for_update(skip_locked=True)
        else:
            query = query.order_by(asc(models.Processing.updated_at))

        if bulk_size:
            query = query.limit(bulk_size)

        tmp = query.all()
        rets = []
        if tmp:
            for t in tmp:
                if only_return_id:
                    rets.append(t[0])
                else:
                    if to_json:
                        rets.append(t.to_dict_json())
                    else:
                        rets.append(t.to_dict())
        return rets
    except sqlalchemy.orm.exc.NoResultFound as error:
        raise exceptions.NoObject(
            'No processing attached with status (%s): %s' % (status, error))
    except Exception as error:
        raise error
Example #41
0
def games_q():
    result = DBSession.query(GameType.game_type_cd).\
            filter(GameType.active_ind==True).\
          order_by(expr.asc(GameType.game_type_cd)).\
      all()
    return [row[0] for row in result]
Example #42
0
def pull_values(lower_date, upper_date, es_user, es_pw, es_host, es_db, tq_user, tq_pw, tq_host, tq_db):
    create_logging_dirs()

    current_time = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    csv_output = './lynx_csv/' + current_time + '_lynx.csv'
    errors_output = './lynx_errors/' + current_time + '_lynx_errors.txt'
    recorded_count = 0
    no_header_yet = True

    # divide up into intervals
    num_intervals = 100
    completed = 0.0
    range_generator = date_range(lower_date, upper_date, num_intervals)
    # remove extra elements
    intervals = list(set(list(range_generator)))
    intervals = sorted(intervals)
    percentage = 100.0 / (len(intervals) - 1)

    engine_url = 'mysql+pymysql://' + es_user + ':' + es_pw + '@' + es_host + '/' + es_db
    es_engine = create_engine(engine_url)

    engine_url = 'mysql+pymysql://' + tq_user + ':' + tq_pw + '@' + tq_host + '/' + tq_db
    tq_engine = create_engine(engine_url)

    metadata = MetaData()
    sessions_table = Table('sessions', metadata, autoload=True, autoload_with=es_engine)
    events_table = Table('events', metadata, autoload=True, autoload_with=es_engine)
    browsers_table = Table('browsers', metadata, autoload=True, autoload_with=es_engine)
    session_attributes_table = Table('session_attributes', metadata, autoload=True, autoload_with=es_engine)
    attributes_table = Table('attributes', metadata, autoload=True, autoload_with=es_engine)
    event_types_table = Table('event_types', metadata, autoload=True, autoload_with=es_engine)
    event_attributes_table = Table('event_attributes', metadata, autoload=True, autoload_with=es_engine)
    forms_table = Table('forms', metadata, autoload=True, autoload_with=es_engine)
    revenues_table = Table('revenues', metadata, autoload=True, autoload_with=es_engine)

    arrival_facts_table = Table('arrival_facts', metadata, autoload=True, autoload_with=tq_engine)

    es_session = load_es_session(es_engine)
    tq_session = load_tq_session(tq_engine)

    attributes_select = es_session.execute(select([attributes_table.c.id, attributes_table.c.name]))
    attributes_table = {}
    for attribute in attributes_select:
        attributes_table[attribute['id']] = attribute['name']

    event_types_select = es_session.execute(select([event_types_table.c.id, event_types_table.c.name]))
    event_types_table = {}
    for attribute in event_types_select:
        event_types_table[attribute['id']] = attribute['name']

    arrival_fact_insert_stmt = arrival_facts_table.insert().values(
        browser_id=bindparam('browser_id'),
        entry_page=bindparam('entry_page'),
        conversion_page=bindparam('conversion_page'),
        conversion=bindparam('conversion'),
        conversion_count=bindparam('conversion_count'),
        event_category=bindparam('event_category'),
        form_city=bindparam('form_city'),
        form_state=bindparam('form_state'),
        form_zip=bindparam('form_zip'),
        prop_own=bindparam('prop_own'),
        user_agent=bindparam('user_agent'),
        browser=bindparam('browser'),
        browser_version=bindparam('browser_version'),
        device_type=bindparam('device_type'),
        utm_source=bindparam('utm_source'),
        utm_campaign=bindparam('utm_campaign'),
        day_of_week=bindparam('day_of_week'),
        os_name=bindparam('os_name'),
        os_version=bindparam('os_version'),
        device_name=bindparam('device_name'),
        device_brand=bindparam('device_brand'),
        ip_address=bindparam('ip_address'),
        gclid=bindparam('gclid'),
        events_count=bindparam('events_count'),
        page_views=bindparam('page_views'),
        conf=bindparam('conf'),
        conu=bindparam('conu'),
        revenue=bindparam('revenue'),
        robot_id=bindparam('robot_id'),
        last_activity=bindparam('last_activity'),
        created_at=bindparam('created_at'),
        session_id=bindparam('session_id')
    )

    arrival_fact_update_stmt = arrival_facts_table.update().where(arrival_facts_table.c.id == bindparam('b_id')).values(
        browser_id=bindparam('browser_id'),
        entry_page=bindparam('entry_page'),
        conversion_page=bindparam('conversion_page'),
        conversion=bindparam('conversion'),
        conversion_count=bindparam('conversion_count'),
        event_category=bindparam('event_category'),
        form_city=bindparam('form_city'),
        form_state=bindparam('form_state'),
        form_zip=bindparam('form_zip'),
        prop_own=bindparam('prop_own'),
        user_agent=bindparam('user_agent'),
        browser=bindparam('browser'),
        browser_version=bindparam('browser_version'),
        device_type=bindparam('device_type'),
        utm_source=bindparam('utm_source'),
        utm_campaign=bindparam('utm_campaign'),
        day_of_week=bindparam('day_of_week'),
        os_name=bindparam('os_name'),
        os_version=bindparam('os_version'),
        device_name=bindparam('device_name'),
        device_brand=bindparam('device_brand'),
        ip_address=bindparam('ip_address'),
        gclid=bindparam('gclid'),
        events_count=bindparam('events_count'),
        page_views=bindparam('page_views'),
        conf=bindparam('conf'),
        conu=bindparam('conu'),
        revenue=bindparam('revenue'),
        robot_id=bindparam('robot_id'),
        last_activity=bindparam('last_activity'),
        created_at=bindparam('created_at'),
        session_id=bindparam('session_id')
    )

    print colored('Pulling Values From Lynx', 'red')
    for index, interval in enumerate(intervals[:-1]):
        print colored("Grabbing for interval (%s)->(%s)" % (intervals[index], intervals[index + 1]), 'blue')
        lower = intervals[index]
        upper = intervals[index + 1]

        session_select = select([sessions_table]).where(sessions_table.c.last_activity > lower).where(
            sessions_table.c.last_activity <= upper)
        session_result = es_session.execute(session_select)

        session_result = [dict(r) for r in session_result]
        session_ids = map(lambda session: session['id'], session_result)

        if len(session_ids) <= 0:
            continue

        children_events_select = select(
            [events_table.c.id, events_table.c.session_id, events_table.c.parent_event_id, events_table.c.event_type_id,
             events_table.c.created_at]).where(
            events_table.c.session_id.in_(session_ids))
        children_events = es_session.execute(children_events_select)
        children_events = [dict(r) for r in children_events]
        for children_event in children_events:
            children_event['event_type'] = event_types_table[children_event['event_type_id']]

        revenue_select = select(
            [revenues_table.c.session_id, revenues_table.c.con_f, revenues_table.c.total_revenue]).where(
            revenues_table.c.session_id.in_(session_ids))
        revenue_result = es_session.execute(revenue_select)
        revenue_result = [dict(r) for r in revenue_result]

        arrival_fact_inserts = []
        arrival_fact_updates = []
        session_count = 0

        for session in session_result:

            session_id = session['id']
            should_update = False

            arrival_fact_id = -1;
            arrival_facts_select = select([arrival_facts_table.c.id, arrival_facts_table.c.last_activity]) \
                .select_from(arrival_facts_table).where(arrival_facts_table.c.session_id == session_id)
            arrival_facts_result = tq_session.execute(arrival_facts_select).fetchone()
            if arrival_facts_result:
                if arrival_facts_result['last_activity'] >= session['last_activity']:
                    continue
                else:
                    arrival_fact_id = arrival_facts_result['id']
                    should_update = True

            session_children = filter(lambda event: event['session_id'] == session_id, children_events)
            if len(session_children) <= 0:
                continue

            user_agent_value = None
            browser_id = None
            browser_name = None
            browser_version = None
            os_name = None
            os_version = None
            robot_id = None
            device_name = None
            device_type = None
            device_brand = None
            ip_address = None
            utm_source = None
            utm_campaign = None
            gclid = None
            form_city = None
            form_state = None
            form_zip = None
            prop_own = None
            conf = 0
            conu = False
            total_revenue = 0

            entry_page = None
            conversion_page = None
            conversion_count = 0

            browser_select = select([browsers_table.c.browser_id]).where(browsers_table.c.id == session['browser_id'])
            browser_id = es_session.execute(browser_select).fetchone()
            if browser_id:
                browser_id = browser_id['browser_id']

            user_agent_value = session['user_agent']
            user_agent_data = user_agent(user_agent_value)
            browser_name = user_agent_data['browser_name']
            browser_version = user_agent_data['browser_version']
            os_name = user_agent_data['os_name']
            os_version = user_agent_data['os_version']
            is_bot = user_agent_data['is_bot']
            bot_name = user_agent_data['bot_name']
            device_name = user_agent_data['device_name']
            device_type = user_agent_data['device_type']
            device_brand = user_agent_data['device_brand']

            ip_address = session['ip']

            revenues = filter(lambda revenue: revenue['session_id'] == session_id, revenue_result)
            if len(revenues) > 0:
                revenue = revenues[0]
                conf = revenue['con_f']
                total_revenue = revenue['total_revenue']
                conu = (conf > 0)

            session_attribute_select = select(
                [session_attributes_table.c.attribute_id, session_attributes_table.c.value]) \
                .where(session_attributes_table.c.session_id == session_id)
            session_attribute_result = es_session.execute(session_attribute_select)
            for session_attribute in session_attribute_result:
                attribute_name = attributes_table[session_attribute['attribute_id']]
                if attribute_name == 'os':
                    if not os_name:
                        os_name = session_attribute['value']
                elif attribute_name == 'utm_source':
                    utm_source = session_attribute['value']
                elif attribute_name == 'utm_campaign':
                    utm_campaign = session_attribute['value']
                elif attribute_name == 'gclid':
                    gclid = session_attribute['value']
                elif attribute_name == 'robot_id':
                    robot_id = session_attribute['value']

            # List of events that belong ot a session
            session_children_event_ids = map(lambda event: event['id'], session_children)
            page_rendered_events = filter(lambda event: event['event_type'] == "Page Rendered", session_children)
            # sort chronologically ascending
            page_rendered_events.sort(key=lambda x: x['created_at'], reverse=False)
            event_attribute_select = select([event_attributes_table.c.event_id, event_attributes_table.c.attribute_id,
                                             event_attributes_table.c.value]) \
                .where(event_attributes_table.c.event_id.in_(session_children_event_ids))
            # page serve event(first one, entry page)
            event_attribute_result = es_session.execute(event_attribute_select)
            event_attribute_result = [dict(r) for r in event_attribute_result]
            for event_attribute in filter(
                    lambda event_attribute: event_attribute['event_id'] == page_rendered_events[0]['id'],
                    event_attribute_result):
                if attributes_table[event_attribute['attribute_id']] == 'page_type':
                    entry_page = event_attribute['value']
                    break

            form_count_select = select([func.count()]).select_from(forms_table).where(
                forms_table.c.session_id == session_id)
            form_count = es_session.execute(form_count_select).scalar()
            if form_count > 0:
                form_select = select(
                    [forms_table.c.event_id, forms_table.c.city, forms_table.c.state, forms_table.c.zip,
                     forms_table.c.property_ownership]).where(forms_table.c.session_id == session_id).order_by(
                    asc(forms_table.c.created_at))
                form_result = es_session.execute(form_select)
                form = form_result.fetchone()
                form_city = form['city']
                form_state = form['state']
                form_zip = form['zip']
                prop_own = form['property_ownership']
                submit_event_ids = [form['event_id']]
                submit_event_ids.extend(map(lambda form: form['event_id'], form_result))
                submit_events = filter(lambda event: event['id'] in submit_event_ids, session_children)
                submit_render_event_ids = map(lambda event: event['parent_event_id'], submit_events)
                relevant_attributes = filter(
                    lambda event_attribute: event_attribute['event_id'] in submit_render_event_ids,
                    event_attribute_result)
                for event_attribute in relevant_attributes:
                    if attributes_table[event_attribute['attribute_id']] == 'page_type':
                        conversion_count += 1
                        conversion_page = event_attribute['value']
                        break

            fact_table = {
                'browser_id': browser_id,
                'entry_page': entry_page,
                'conversion_page': conversion_page,
                'conversion': (conversion_count > 0),
                'conversion_count': conversion_count,
                'event_category': None,
                'form_city': form_city,
                'form_state': form_state,
                'form_zip': form_zip,
                'prop_own': prop_own,
                'user_agent': user_agent_value,
                'browser': browser_name,
                'browser_version': browser_version,
                'device_type': device_type,
                'utm_source': utm_source,
                'utm_campaign': utm_campaign,
                'day_of_week': week_of_day(session['created_at']),
                'os_name': os_name,
                'os_version': os_version,
                'device_name': device_name,
                'device_brand': device_brand,
                'ip_address': ip_address,
                'gclid': gclid,
                'events_count': len(session_children),
                'page_views': len(page_rendered_events),
                'conf': conf,
                'conu': conu,
                'revenue': total_revenue,
                'robot_id': robot_id,
                'last_activity': session['last_activity'],
                'created_at': session['created_at'],
                'session_id': session_id
            }

            if should_update:
                fact_table['b_id'] = arrival_fact_id
                print colored('Update: (%s)' % (fact_table.values()),'green')
                arrival_fact_updates.append(fact_table)
            else:
                print colored('Add: (%s)' % (fact_table.values()),'green')
                arrival_fact_inserts.append(fact_table)

            session_count += 1
            if session_count % 100 == 0:
                print ("Processed %s sessions" % session_count)

                # if len(fact_tables) > 0:
                # with open(csv_output, 'a') as f:  # Just use 'w' mode in 3.x
                #     w = csv.DictWriter(f, fact_tables[0].as_dict().keys())
                #     if no_header_yet :
                #         w.writeheader()
                #         no_header_yet = False
                #
                #     for fact_table in fact_tables:
                #         w.writerow(fact_table.as_dict())
        try:
            if len(arrival_fact_inserts) > 0:
                tq_session.execute(arrival_fact_insert_stmt, arrival_fact_inserts)
                print colored('Planned fact tables added','green')

            if len(arrival_fact_updates) > 0:
                tq_session.execute(arrival_fact_update_stmt, arrival_fact_updates)
                print colored('Planned fact tables updated','green')

            tq_session.commit()
        except:
            e = sys.exc_info()
            with open(errors_output, 'a') as f:
                f.write("%s Records inserted" % recorded_count)
                f.write(str(e))
Example #43
0
def get_requests_by_status_type(status,
                                request_type=None,
                                time_period=None,
                                request_ids=[],
                                locking=False,
                                locking_for_update=False,
                                bulk_size=None,
                                to_json=False,
                                by_substatus=False,
                                only_return_id=False,
                                session=None):
    """
    Get requests.

    :param status: list of status of the request data.
    :param request_type: The type of the request data.
    :param locking: Wheter to lock requests to avoid others get the same request.
    :param bulk_size: Size limitation per retrieve.
    :param to_json: return json format.

    :raises NoObject: If no request are founded.

    :returns: list of Request.
    """

    try:
        if status:
            if not isinstance(status, (list, tuple)):
                status = [status]
            if len(status) == 1:
                status = [status[0], status[0]]

        if only_return_id:
            query = session.query(models.Request.request_id)\
                           .with_hint(models.Request, "INDEX(REQUESTS REQUESTS_SCOPE_NAME_IDX)", 'oracle')
        else:
            query = session.query(models.Request)\
                           .with_hint(models.Request, "INDEX(REQUESTS REQUESTS_SCOPE_NAME_IDX)", 'oracle')

        if status:
            if by_substatus:
                query = query.filter(models.Request.substatus.in_(status))
            else:
                query = query.filter(models.Request.status.in_(status))
            query = query.filter(
                models.Request.next_poll_at <= datetime.datetime.utcnow())

        if request_type is not None:
            query = query.filter(models.Request.request_type == request_type)
        # if time_period is not None:
        #     query = query.filter(models.Request.updated_at < datetime.datetime.utcnow() - datetime.timedelta(seconds=time_period))
        if request_ids:
            query = query.filter(models.Request.request_id.in_(request_ids))
        if locking:
            query = query.filter(models.Request.locking == RequestLocking.Idle)

        if locking_for_update:
            query = query.with_for_update(skip_locked=True)
        else:
            query = query.order_by(asc(models.Request.updated_at))\
                         .order_by(desc(models.Request.priority))
        if bulk_size:
            query = query.limit(bulk_size)

        tmp = query.all()
        rets = []
        if tmp:
            for req in tmp:
                if only_return_id:
                    rets.append(req[0])
                else:
                    if to_json:
                        rets.append(req.to_dict_json())
                    else:
                        rets.append(req.to_dict())
        return rets
    except sqlalchemy.orm.exc.NoResultFound as error:
        raise exceptions.NoObject(
            'No requests with status: %s, request_type: %s, time_period: %s, locking: %s, %s'
            % (status, request_type, time_period, locking, error))
Example #44
0
    def get(self):

        # TODO: a temp hack for status sync
        self.db2.commit()

        # TODO:
        #self.set_secure_cookie('session_key', session_key)
        instance_sort = self.get_secure_cookie('instance_sort')
        instance_sort = '?view=all'

        view = self.get_argument('view', 'all')
        by = self.get_argument('by', 'updated')
        sort = self.get_argument('sort', 'desc')
        status = self.get_argument('status', 'running')
        page_size = int(self.get_argument(
                'sepa', settings.INSTANCE_HOME_PAGE_SIZE))
        cur_page = int(self.get_argument('p', 1))

        start = (cur_page - 1) * page_size
        stop = start + page_size

        if status == 'running':
            slist = settings.INSTANCE_SLIST_RUNING
        elif status == 'stoped':
            slist = settings.INSTANCE_SLIST_STOPED
        else:
            slist = settings.INSTANCE_SLIST_ALL

        instances = self.db2.query(Instance).filter(
            Instance.isprivate != True ).filter(
            Instance.status.in_( slist) )

        if view == 'self' and self.current_user:
            instances = instances.filter_by(
                user_id = self.current_user.id )

        #by_obj = Instance.created if by == 'created' else Instance.updated
        if by == 'created':
            by_obj = Instance.created
        # TODO: sorted by username
        #elif by == 'username':
        #    by_obj = Instance.user.username
        else:
            by_obj = Instance.updated

        sort_by_obj = desc(by_obj) if sort == 'desc' else asc(by_obj)

        instances = instances.order_by( sort_by_obj )

        # TODO: may have a more highly active count ( use id )
        total = instances.count()

        instances = instances.slice(start, stop)

        if total > page_size:
            page_html = Pagination(
                total = total,
                page_size = page_size,
                cur_page = cur_page ).html(self.get_page_url)
        else:
            page_html = ""

        d = { 'title': self.title,
              'INSTANCE_LIST': instances,
              'cur_page': cur_page,
              'page_html': page_html }

        # TODO: save user sort method
        self.set_secure_cookie('instance_sort', instance_sort)

        self.render("instance/index.html", **d)
Example #45
0
    def dongYuProductSysn(self):
        """
       东宇产品信息同步
        """
        self.logger.info('dongYuProductSysn begin')
        mySqlSession = MySQLManager.getInstance().getSession()
        productListFrom2Box = mySqlSession.query(PS, PT).outerjoin(
            PT, PS.pn == PT.pn).filter(PS.isdeleted == 0).order_by(asc(
                PS.pn)).all()
        self.logger.info("加载2Box产品数据%s条" % str(len(productListFrom2Box)))

        msSqlSession = MsSQLManager.getInstance().getSession()
        #         productListFrom2YiFei=msSqlSession.query(INVMB).filter(or_(INVMB.MB005==132, INVMB.MB005==122)).order_by(asc(INVMB.MB001)).all()
        productListFrom2YiFei = msSqlSession.query(INVMB).order_by(
            asc(INVMB.MB001)).all()
        self.logger.info("加载YiFei产品数据%s条" % str(len(productListFrom2YiFei)))
        productMapFrom2YiFei = {}
        for row in productListFrom2YiFei:
            productMapFrom2YiFei[row.MB001.strip()] = row

        insertList = []
        updateList = []
        for row in productListFrom2Box:
            invmbItem = self.changInvmbModel(row)
            if invmbItem.MB001.find("CA") >= 0:
                self.updateCompare(msSqlSession, productMapFrom2YiFei,
                                   invmbItem, insertList, updateList)
                invmbItem06 = INVMB()
                self.copyModel(invmbItem, invmbItem06)
                invmbItem06.MB001 = "06%s" % invmbItem06.MB001
                invmbItem06.MB080 = invmbItem06.MB001  #货号,通过货号取得采购单品号;
                invmbItem06.MB005 = "112"
                invmbItem06.MB017 = "0101"
                invmbItem06.MB025 = "P"
                self.updateCompare(msSqlSession, productMapFrom2YiFei,
                                   invmbItem06, insertList, updateList)
            else:
                self.updateCompare(msSqlSession, productMapFrom2YiFei,
                                   invmbItem, insertList, updateList)

        self.logger.info("需要新增保存%s条." % str(len(insertList)))
        for row in insertList:
            try:
                self.logger.info("保存:%s记录" % row.MB001)
                msSqlSession.add(row)
                msSqlSession.commit()
            except Exception as e:
                self.logger.error(traceback.format_exc())
        self.logger.info("需要更新保存%s条." % str(len(updateList)))

        msSqlSession.close()
        mySqlSession.close()
        self.logger.info('dongYuProductSysn end')


# if __name__=='__main__':
#     MsSQLManager.getInstance().config(host='192.168.1.52',
#                                                             port='1433',
#                                                             user='******',
#                                                             paswd='dh+2013',
#                                                             database='DHGROUP',
#                                                             charset='utf8' ,
#                                                             encoding='utf-8',
#                                                             echo=True)
#     MySQLManager.getInstance().config(host='127.0.0.1',
#                                                         port='3306',
#                                                         user='******',
#                                                         paswd='root',
#                                                         database='bb2_default',
#                                                         charset='utf8' ,
#                                                         encoding='utf-8',
#                                                         echo=False)
#
#     productSysnService=YiFeiProductSysnService()
#     productSysnService.dongYuProductSysn()
Example #46
0
def get_collections(scope=None,
                    name=None,
                    request_id=None,
                    workload_id=None,
                    transform_id=None,
                    relation_type=None,
                    to_json=False,
                    session=None):
    """
    Get collections by request id or raise a NoObject exception.

    :param scope: collection scope.
    :param name: collection name, can be wildcard.
    :param request_id: The request id.
    :param workload_id: The workload id.
    :param transform_id: list of transform id related to this collection.
    :param relation_type: The relation type between this collection and the transform: Input, Ouput and Log.
    :param to_json: return json format.
    :param session: The database session in use.

    :raises NoObject: If no collections are founded.

    :returns: list of Collections.
    """
    try:
        if transform_id and type(transform_id) not in (list, tuple):
            transform_id = [transform_id]

        query = session.query(models.Collection)
        if scope:
            query = query.filter(models.Collection.scope == scope)
        if name:
            query = query.filter(
                models.Collection.name.like(name.replace('*', '%')))
        if request_id:
            query = query.filter(models.Collection.request_id == request_id)
        if workload_id:
            query = query.filter(models.Collection.workload_id == workload_id)
        if transform_id:
            query = query.filter(
                models.Collection.transform_id.in_(transform_id))
        if relation_type is not None:
            query = query.filter(
                models.Collection.relation_type == relation_type)

        query = query.order_by(asc(models.Collection.updated_at))

        tmp = query.all()
        rets = []
        if tmp:
            for t in tmp:
                if to_json:
                    rets.append(t.to_dict_json())
                else:
                    rets.append(t.to_dict())
        return rets
    except sqlalchemy.orm.exc.NoResultFound as error:
        raise exceptions.NoObject(
            'No collection with  scope(%s), name(%s), transform_id(%s): %s, relation_type: %s'
            % (scope, name, transform_id, relation_type, error))
    except Exception as error:
        raise error
Example #47
0
def dao_fetch_all_services_created_by_user(user_id):
    query = Service.query.filter_by(created_by_id=user_id).order_by(
        asc(Service.created_at))

    return query.all()
Example #48
0
def dao_fetch_live_services_data():
    year_start_date, year_end_date = get_current_financial_year()

    most_recent_annual_billing = db.session.query(
        AnnualBilling.service_id,
        func.max(AnnualBilling.financial_year_start).label('year')).group_by(
            AnnualBilling.service_id).subquery()

    this_year_ft_billing = FactBilling.query.filter(
        FactBilling.bst_date >= year_start_date,
        FactBilling.bst_date <= year_end_date,
    ).subquery()

    data = db.session.query(
        Service.id.label('service_id'),
        Service.name.label("service_name"),
        Organisation.name.label("organisation_name"),
        Organisation.organisation_type.label('organisation_type'),
        Service.consent_to_research.label('consent_to_research'),
        User.name.label('contact_name'),
        User.email_address.label('contact_email'),
        User.mobile_number.label('contact_mobile'),
        Service.go_live_at.label("live_date"),
        Service.volume_sms.label('sms_volume_intent'),
        Service.volume_email.label('email_volume_intent'),
        Service.volume_letter.label('letter_volume_intent'),
        case([(this_year_ft_billing.c.notification_type == 'email',
               func.sum(this_year_ft_billing.c.notifications_sent))],
             else_=0).label("email_totals"),
        case([(this_year_ft_billing.c.notification_type
               == 'sms', func.sum(this_year_ft_billing.c.notifications_sent))],
             else_=0).label("sms_totals"),
        case([(this_year_ft_billing.c.notification_type == 'letter',
               func.sum(this_year_ft_billing.c.notifications_sent))],
             else_=0).label("letter_totals"),
        AnnualBilling.free_sms_fragment_limit,
    ).join(Service.annual_billing).join(
        most_recent_annual_billing,
        and_(
            Service.id == most_recent_annual_billing.c.service_id,
            AnnualBilling.financial_year_start ==
            most_recent_annual_billing.c.year)).outerjoin(
                Service.organisation).outerjoin(
                    this_year_ft_billing,
                    Service.id == this_year_ft_billing.c.service_id).outerjoin(
                        User, Service.go_live_user_id == User.id).filter(
                            Service.count_as_live.is_(True),
                            Service.active.is_(True),
                            Service.restricted.is_(False),
                        ).group_by(
                            Service.id,
                            Organisation.name,
                            Organisation.organisation_type,
                            Service.name,
                            Service.consent_to_research,
                            Service.count_as_live,
                            Service.go_live_user_id,
                            User.name,
                            User.email_address,
                            User.mobile_number,
                            Service.go_live_at,
                            Service.volume_sms,
                            Service.volume_email,
                            Service.volume_letter,
                            this_year_ft_billing.c.notification_type,
                            AnnualBilling.free_sms_fragment_limit,
                        ).order_by(asc(Service.go_live_at)).all()
    results = []
    for row in data:
        existing_service = next(
            (x for x in results if x['service_id'] == row.service_id), None)

        if existing_service is not None:
            existing_service["email_totals"] += row.email_totals
            existing_service["sms_totals"] += row.sms_totals
            existing_service["letter_totals"] += row.letter_totals
        else:
            results.append(row._asdict())
    return results
Example #49
0
    def render(self, session, keyword, argument, username, command, before,
               after, return_code, limit, reverse_order, **arguments):

        q = session.query(Xtn)

        if command is not None:
            if command == 'all':
                # No filter
                pass
            elif command == 'rw':
                # Filter our command list
                q = q.filter(~Xtn.command.in_(_IGNORED_COMMANDS))
            else:
                q = q.filter_by(command=command)
        else:
            # filter out read only
            q = q.filter_by(is_readonly=False)

        if username is not None:
            username = username.lower().strip()
            q = q.filter(or_(Xtn.username == username,
                             Xtn.username.like(username + '@%')))

        # TODO: These should be typed in input.xml as datetime and use
        # the standard broker methods for dealing with input validation.
        if before is not None:
            try:
                end = parse(before)
            except ValueError:
                raise ArgumentError("Unable to parse date string '%s'" %
                                    before)
            if not end.tzinfo:
                end = end.replace(tzinfo=tzutc())
            q = q.filter(Xtn.start_time < end)

        if after is not None:
            try:
                start = parse(after)
            except ValueError:
                raise ArgumentError("Unable to parse date string '%s'" % after)
            if not start.tzinfo:
                start = start.replace(tzinfo=tzutc())
            q = q.filter(Xtn.start_time > start)

        if return_code is not None:
            if return_code == 0:
                q = q.filter(~exists().where(Xtn.xtn_id == XtnEnd.xtn_id))
            else:
                q = q.join(XtnEnd)
                q = q.filter(XtnEnd.return_code == return_code)
                q = q.reset_joinpoint()

        if keyword is not None or argument is not None:
            q = q.join(XtnDetail)
            if keyword is not None:
                q = q.filter_by(value=keyword)
            if argument is not None:
                q = q.filter_by(name=argument)
            q = q.reset_joinpoint()

        # Set an order by when searching for the records, this controls
        # which records are selected by the limit.
        if reverse_order:
            q = q.order_by(asc(Xtn.start_time))  # N oldest records
        else:
            # default: N most recent
            q = q.order_by(desc(Xtn.start_time))

        # Limit the ordered results.
        if limit is None:
            limit = self.config.getint('broker', 'default_audit_rows')
        if limit > self.config.getint('broker', 'max_audit_rows'):
            raise ArgumentError("Cannot set the limit higher than %s" %
                                self.config.get('broker', 'max_audit_rows'))
        q = q.limit(limit)

        # Now apply the user preference to the limited output after
        # the outer joins are applied to pull in details and end information.
        if reverse_order:
            q = q.from_self().order_by(desc(Xtn.start_time))
        else:
            q = q.from_self().order_by(asc(Xtn.start_time))

        return TransactionList(q.all())
Example #50
0
def get_collections_by_status(status,
                              relation_type=CollectionRelationType.Input,
                              time_period=None,
                              locking=False,
                              bulk_size=None,
                              to_json=False,
                              session=None):
    """
    Get collections by status, relation_type and time_period or raise a NoObject exception.

    :param status: The collection status.
    :param relation_type: The relation_type of the collection to the transform.
    :param time_period: time period in seconds since last update.
    :param locking: Wheter to retrieve unlocked files.
    :param to_json: return json format.
    :param session: The database session in use.

    :raises NoObject: If no collections are founded.

    :returns: list of Collections.
    """
    try:
        if not isinstance(status, (list, tuple)):
            status = [status]
        if len(status) == 1:
            status = [status[0], status[0]]

        query = session.query(models.Collection)\
                       .filter(models.Collection.status.in_(status))\
                       .filter(models.Collection.next_poll_at < datetime.datetime.utcnow())

        if relation_type is not None:
            query = query.filter(
                models.Collection.relation_type == relation_type)
        if time_period:
            query = query.filter(
                models.Collection.updated_at < datetime.datetime.utcnow() -
                datetime.timedelta(seconds=time_period))
        if locking:
            query = query.filter(
                models.Collection.locking == CollectionLocking.Idle)

        query = query.order_by(asc(models.Collection.updated_at))
        if bulk_size:
            query = query.limit(bulk_size)

        tmp = query.all()
        rets = []
        if tmp:
            for t in tmp:
                if to_json:
                    rets.append(t.to_dict_json())
                else:
                    rets.append(t.to_dict())
        return rets
    except sqlalchemy.orm.exc.NoResultFound as error:
        raise exceptions.NoObject(
            'No collections with  status(%s), relation_type(%s), time_period(%s): %s'
            % (status, relation_type, time_period, error))
    except Exception as error:
        raise error
Example #51
0
def _build_node_order_by(query):
    query = query.order_by(asc(models.BareMetalNode.memory_mb))
    query = query.order_by(asc(models.BareMetalNode.cpus))
    query = query.order_by(asc(models.BareMetalNode.local_gb))
    return query
Example #52
0
class Result(database.BASE):
    """Result Model."""
    __tablename__ = 'result'

    id = Column(Integer, primary_key=True)
    project_id = Column(Integer, ForeignKey('project.id'))
    path_name = Column(String(512), unique=True)
    name = Column(String(512))
    is_unregistered = Column(Boolean(), default=False)
    logs = relationship('Log',
                        cascade='all, delete-orphan',
                        order_by=lambda: asc(Log.id))
    args = relationship('Argument',
                        uselist=False,
                        cascade='all, delete-orphan')
    commands = relationship('Command', cascade='all, delete-orphan')
    snapshots = relationship('Snapshot', cascade='all, delete-orphan')
    assets = relationship('Asset', cascade='all, delete-orphan')
    log_modified_at = Column(DateTime, default=None)
    crawlable = Column(Boolean(), default=True)
    created_at = Column(DateTime, default=datetime.datetime.now())
    updated_at = Column(DateTime, default=datetime.datetime.now())

    def __init__(self,
                 path_name=None,
                 name=None,
                 project_id=None,
                 log_modified_at=None,
                 crawlable=True):
        self.path_name = path_name
        self.name = name
        self.project_id = project_id
        self.log_modified_at = log_modified_at
        self.crawlable = crawlable

    def __repr__(self):
        return '<Result id: %r, path_name: %r />' % (self.id, self.path_name)

    @classmethod
    def create(cls,
               path_name=None,
               name=None,
               project_id=None,
               log_modified_at=None,
               crawlable=True):
        """Initialize an instance and save it to db."""
        result = cls(path_name, name, project_id, log_modified_at, crawlable)

        db.session.add(result)
        db.session.commit()

        crawl_result(result, True)

        return result

    def sampled_logs(self, logs_limit=-1):
        """Return up to `logs_limit` logs.

        If `logs_limit` is -1, this function will return all logs that belong
        to the result.
        """
        logs_count = len(self.logs)
        if logs_limit == -1 or logs_count <= logs_limit:
            return self.logs
        elif logs_limit == 0:
            return []
        elif logs_limit == 1:
            return [self.logs[-1]]
        else:

            def get_sampled_log(idx):
                # always include the first and last element of `self.logs`
                return self.logs[idx * (logs_count - 1) // (logs_limit - 1)]

            return [get_sampled_log(i) for i in range(logs_limit)]

    def serialize_with_sampled_logs(self, logs_limit=-1):
        """serialize a result with up to `logs_limit` logs.

        If `logs_limit` is -1, this function will return a result with all its
        logs.
        """

        return {
            'id': self.id,
            'pathName': self.path_name,
            'name': self.name,
            'isUnregistered': self.is_unregistered,
            'logs': [log.serialize for log in self.sampled_logs(logs_limit)],
            'args': self.args.serialize if self.args is not None else [],
            'commands': [cmd.serialize for cmd in self.commands],
            'snapshots': [cmd.serialize for cmd in self.snapshots],
            'logModifiedAt': self.log_modified_at.isoformat()
        }

    @property
    def serialize(self):
        """serialize."""

        return self.serialize_with_sampled_logs(-1)
Example #53
0
def _query_with_filters(
    response: Response,
    query: Query,
    range: Optional[List[int]] = None,
    sort: Optional[List[str]] = None,
    filters: Optional[List[str]] = None,
) -> List:
    if filters is not None:
        for filter in chunked(filters, 2):
            if filter and len(filter) == 2:
                field = filter[0]
                value = filter[1]
                value_as_bool = value.lower() in ("yes", "y", "ye", "true",
                                                  "1", "ja", "insync")
                if value is not None:
                    if field.endswith("_gt"):
                        query = query.filter(
                            SubscriptionTable.__dict__[field[:-3]] > value)
                    elif field.endswith("_gte"):
                        query = query.filter(
                            SubscriptionTable.__dict__[field[:-4]] >= value)
                    elif field.endswith("_lte"):
                        query = query.filter(
                            SubscriptionTable.__dict__[field[:-4]] <= value)
                    elif field.endswith("_lt"):
                        query = query.filter(
                            SubscriptionTable.__dict__[field[:-3]] < value)
                    elif field.endswith("_ne"):
                        query = query.filter(
                            SubscriptionTable.__dict__[field[:-3]] != value)
                    elif field == "insync":
                        query = query.filter(
                            SubscriptionTable.insync.is_(value_as_bool))
                    elif field == "tags":
                        # For node and port selector form widgets
                        sub_values = value.split("-")
                        query = query.filter(
                            func.lower(ProductTable.tag).in_(
                                [s.lower() for s in sub_values]))
                    elif field == "tag":
                        # For React table 7
                        sub_values = value.split("-")
                        query = query.filter(
                            func.lower(ProductTable.tag).in_(
                                [s.lower() for s in sub_values]))
                    elif field == "product":
                        sub_values = value.split("-")
                        query = query.filter(
                            func.lower(ProductTable.name).in_(
                                [s.lower() for s in sub_values]))
                    elif field == "status":
                        # For React table 7
                        statuses = value.split("-")
                        query = query.filter(
                            SubscriptionTable.status.in_(
                                [s.lower() for s in statuses]))
                    elif field == "statuses":
                        # For port subscriptions
                        sub_values = value.split("-")
                        query = query.filter(
                            SubscriptionTable.status.in_(
                                [s.lower() for s in sub_values]))
                    elif field == "organisation":
                        try:
                            value_as_uuid = UUID(value)
                        except (ValueError, AttributeError):
                            msg = "Not a valid customer_id, must be a UUID: '{value}'"
                            logger.exception(msg)
                            raise_status(HTTPStatus.BAD_REQUEST, msg)
                        query = query.filter(
                            SubscriptionTable.customer_id == value_as_uuid)
                    elif field == "tsv":
                        logger.debug("Running full-text search query.",
                                     value=value)
                        query = query.search(value)
                    elif field in SubscriptionTable.__dict__:
                        query = query.filter(
                            cast(SubscriptionTable.__dict__[field],
                                 String).ilike("%" + value + "%"))

    if sort is not None and len(sort) >= 2:
        for item in chunked(sort, 2):
            if item and len(item) == 2:
                if item[0] in ["product", "tag"]:
                    field = "name" if item[0] == "product" else "tag"
                    if item[1].upper() == "DESC":
                        query = query.order_by(
                            expression.desc(ProductTable.__dict__[field]))
                    else:
                        query = query.order_by(
                            expression.asc(ProductTable.__dict__[field]))
                else:
                    if item[1].upper() == "DESC":
                        query = query.order_by(
                            expression.desc(
                                SubscriptionTable.__dict__[item[0]]))
                    else:
                        query = query.order_by(
                            expression.asc(
                                SubscriptionTable.__dict__[item[0]]))

    if range is not None and len(range) == 2:
        try:
            range_start = int(range[0])
            range_end = int(range[1])
            if range_start >= range_end:
                raise ValueError("range start must be lower than end")
        except (ValueError, AssertionError):
            msg = "Invalid range parameters"
            logger.exception(msg)
            raise_status(HTTPStatus.BAD_REQUEST, msg)
        total = query.count()
        query = query.slice(range_start, range_end)

        response.headers[
            "Content-Range"] = f"subscriptions {range_start}-{range_end}/{total}"

    return query.all()
Example #54
0
    def _find_existing(
        self,
        *,
        guild_xid: int,
        channel_xid: int,
        author_xid: int,
        friends: list[int],
        seats: int,
        format: int,
    ) -> Optional[Game]:
        required_seats = 1 + len(friends)
        inner = (
            select([
                Game,
                User.xid.label("users_xid"),
                count(
                    User.xid).over(partition_by=Game.id).label("player_count"),
            ], ).join(User, isouter=True).filter(  # type: ignore
                and_(
                    Game.guild_xid == guild_xid,
                    Game.channel_xid == channel_xid,
                    Game.seats == seats,
                    Game.format == format,
                    Game.status == GameStatus.PENDING.value,
                    Game.deleted_at.is_(None),
                ),
            ).group_by(Game,
                       User.xid).order_by(asc(Game.updated_at)).alias("inner"))
        outer = (
            DatabaseSession.query(Game)
            # Note: select_entity_from() is deprecated and may need to be replaced
            #       with an altenative method eventually. See: https://docs.sqlalchemy.org
            #       /en/latest/orm/query.html#sqlalchemy.orm.Query.select_entity_from
            .select_entity_from(inner).filter(
                or_(
                    column("player_count") == 0,
                    and_(
                        column("player_count") > 0,
                        column("player_count") <= seats - required_seats,
                    ),
                ), ))
        joiners = [author_xid, *friends]
        xids_blocked_by_joiners = [
            row.blocked_user_xid
            for row in DatabaseSession.query(Block).filter(
                Block.user_xid.in_(joiners))
        ]

        game: Game
        for game in outer.all():
            players = [player.xid for player in game.players]
            if any(xid in players for xid in xids_blocked_by_joiners):
                continue  # a joiner has blocked one of the players

            xids_blocked_by_players = [
                row.blocked_user_xid
                for row in DatabaseSession.query(Block).filter(
                    Block.user_xid.in_(players), )
            ]
            if any(xid in joiners for xid in xids_blocked_by_players):
                continue  # a player has blocked one of the joiners

            return game

        return None
Example #55
0
    def get(self):

        page_size = self.get_argument_int('sepa', USER_PS)
        cur_page = self.get_argument_int('p', 1)
        by = self.get_argument('by', 'id')
        order = self.get_argument_int('order', 1)
        gid = self.get_argument_int('gid', -1)
        search = self.get_argument('search', False)

        UL = self.db.query(User)

        if by == 'id':
            by_obj = User.id
        elif by == 'date_joined':
            by_obj = User.date_joined
        elif by == 'last_login':
            by_obj = User.last_login
        elif by == 'last_active':
            by_obj = User.last_active
        elif by == 'username':
            by_obj = User.username
        elif by == 'is_locked':
            by_obj = User.is_locked
        elif by == 'description':
            by_obj = User.id
            UL = UL.filter(User.description != None)
        else:
            by_obj = User.id

        by_exp = desc(by_obj) if order else asc(by_obj)

        start = (cur_page - 1) * page_size
        stop = start + page_size

        if search:
            search = '%' + search + '%'
            UL = UL.filter(
                or_(User.username.like(search), User.email.like(search)))

        GROUP = None
        if gid == 0:
            UL = UL.filter(~User.groups.any())
        elif gid > 0:
            GROUP = self.db.query(Group).get(gid)
            if GROUP:
                UL = UL.filter(User.groups.contains(GROUP))

        UL = UL.order_by(by_exp)

        total = UL.count()
        UL = UL.slice(start, stop)

        page_html = pagination(self.request.uri,
                               total,
                               page_size,
                               cur_page,
                               sepa_range=[20, 50, 100])

        def sort_by(by):
            return self.urlupdate({'by': by, 'order': 1 if order == 0 else 0})

        d = {
            'title': self.trans(_('Admin User Management')),
            'sort_by': sort_by,
            'SORT_BY': by,
            'ORDER': order,
            'urlupdate': self.urlupdate,
            'USER_LIST': UL,
            'PAGE_HTML': page_html,
            'TOTAL_USER': total,
            'PAGE_SIZE': page_size,
            'GROUP': GROUP,
            'GID': gid,
            'GROUP_LIST': self.db.query(Group).all()
        }

        if self.get_argument('ajax', None):
            self.render('admin/user/index.ajax.html', **d)
        else:
            self.render('admin/user/index.html', **d)
Example #56
0
def get_requests(request_id=None,
                 workload_id=None,
                 with_detail=False,
                 with_metadata=False,
                 with_request=False,
                 with_transform=False,
                 with_processing=False,
                 to_json=False,
                 session=None):
    """
    Get a request or raise a NoObject exception.

    :param workload_id: The workload id of the request.
    :param to_json: return json format.

    :param session: The database session in use.

    :raises NoObject: If no request is founded.

    :returns: Request.
    """
    try:
        if with_request or not (with_transform or with_processing
                                or with_detail or with_metadata):
            if with_metadata:
                query = session.query(models.Request)\
                               .with_hint(models.Request, "INDEX(REQUESTS REQUESTS_SCOPE_NAME_IDX)", 'oracle')

                if request_id:
                    query = query.filter(
                        models.Request.request_id == request_id)
                if workload_id:
                    query = query.filter(
                        models.Request.workload_id == workload_id)

                tmp = query.all()
                rets = []
                if tmp:
                    for t in tmp:
                        if to_json:
                            t_dict = t.to_dict_json()
                        else:
                            t_dict = t.to_dict()
                        rets.append(t_dict)
                return rets
            else:
                query = session.query(models.Request.request_id,
                                      models.Request.scope,
                                      models.Request.name,
                                      models.Request.requester,
                                      models.Request.request_type,
                                      models.Request.username,
                                      models.Request.userdn,
                                      models.Request.transform_tag,
                                      models.Request.workload_id,
                                      models.Request.priority,
                                      models.Request.status,
                                      models.Request.substatus,
                                      models.Request.locking,
                                      models.Request.created_at,
                                      models.Request.updated_at,
                                      models.Request.next_poll_at,
                                      models.Request.accessed_at,
                                      models.Request.expired_at,
                                      models.Request.errors)\
                               .with_hint(models.Request, "INDEX(REQUESTS REQUESTS_SCOPE_NAME_IDX)", 'oracle')

                if request_id:
                    query = query.filter(
                        models.Request.request_id == request_id)
                if workload_id:
                    query = query.filter(
                        models.Request.workload_id == workload_id)

                tmp = query.all()
                rets = []
                if tmp:
                    for t in tmp:
                        t2 = dict(zip(t.keys(), t))
                        rets.append(t2)
                return rets
        elif with_transform:
            subquery1 = session.query(
                models.Collection.coll_id, models.Collection.transform_id,
                models.Collection.scope.label("input_coll_scope"),
                models.Collection.name.label("input_coll_name"),
                models.Collection.status.label("input_coll_status"),
                models.Collection.bytes.label("input_coll_bytes"),
                models.Collection.total_files.label("input_total_files"),
                models.Collection.processed_files.label(
                    "input_processed_files"),
                models.Collection.processing_files.label(
                    "input_processing_files")).filter(
                        models.Collection.relation_type == 0)
            subquery1 = subquery1.subquery()

            subquery2 = session.query(
                models.Collection.coll_id, models.Collection.transform_id,
                models.Collection.scope.label("output_coll_scope"),
                models.Collection.name.label("output_coll_name"),
                models.Collection.status.label("output_coll_status"),
                models.Collection.bytes.label("output_coll_bytes"),
                models.Collection.total_files.label("output_total_files"),
                models.Collection.processed_files.label(
                    "output_processed_files"),
                models.Collection.processing_files.label(
                    "output_processing_files")).filter(
                        models.Collection.relation_type == 1)
            subquery2 = subquery2.subquery()
            if True:
                query = session.query(
                    models.Request.request_id, models.Request.scope,
                    models.Request.name, models.Request.requester,
                    models.Request.request_type, models.Request.username,
                    models.Request.userdn, models.Request.transform_tag,
                    models.Request.workload_id, models.Request.priority,
                    models.Request.status, models.Request.substatus,
                    models.Request.locking, models.Request.created_at,
                    models.Request.updated_at, models.Request.next_poll_at,
                    models.Request.accessed_at, models.Request.expired_at,
                    models.Request.errors, models.Transform.transform_id,
                    models.Transform.transform_type,
                    models.Transform.workload_id.label(
                        "transform_workload_id"),
                    models.Transform.status.label("transform_status"),
                    models.Transform.created_at.label("transform_created_at"),
                    models.Transform.updated_at.label("transform_updated_at"),
                    models.Transform.finished_at.label(
                        "transform_finished_at"), subquery1.c.input_coll_scope,
                    subquery1.c.input_coll_name, subquery1.c.input_coll_status,
                    subquery1.c.input_coll_bytes,
                    subquery1.c.input_total_files,
                    subquery1.c.input_processed_files,
                    subquery1.c.input_processing_files,
                    subquery2.c.output_coll_scope,
                    subquery2.c.output_coll_name,
                    subquery2.c.output_coll_status,
                    subquery2.c.output_coll_bytes,
                    subquery2.c.output_total_files,
                    subquery2.c.output_processed_files,
                    subquery2.c.output_processing_files)

            if request_id:
                query = query.filter(models.Request.request_id == request_id)
            if workload_id:
                query = query.filter(models.Request.workload_id == workload_id)

            query = query.outerjoin(
                models.Transform,
                and_(models.Request.request_id == models.Transform.request_id))
            query = query.outerjoin(
                subquery1,
                and_(
                    subquery1.c.transform_id == models.Transform.transform_id))
            query = query.outerjoin(
                subquery2,
                and_(
                    subquery2.c.transform_id == models.Transform.transform_id))
            query = query.order_by(asc(models.Request.request_id))

            tmp = query.all()
            rets = []
            if tmp:
                for t in tmp:
                    # t2 = dict(t)
                    t2 = dict(zip(t.keys(), t))

                    if 'request_metadata' in t2 and t2[
                            'request_metadata'] and 'workflow' in t2[
                                'request_metadata']:
                        workflow = t2['request_metadata']['workflow']
                        workflow_data = None
                        if 'processing_metadata' in t2 and t2[
                                'processing_metadata'] and 'workflow_data' in t2[
                                    'processing_metadata']:
                            workflow_data = t2['processing_metadata'][
                                'workflow_data']
                        if workflow is not None and workflow_data is not None:
                            workflow.metadata = workflow_data
                            t2['request_metadata']['workflow'] = workflow

                    rets.append(t2)
            return rets
        elif with_processing:
            subquery = session.query(
                models.Processing.processing_id,
                models.Processing.transform_id, models.Processing.workload_id,
                models.Processing.status.label("processing_status"),
                models.Processing.created_at.label("processing_created_at"),
                models.Processing.updated_at.label("processing_updated_at"),
                models.Processing.finished_at.label("processing_finished_at"))
            subquery = subquery.subquery()

            if True:
                query = session.query(
                    models.Request.request_id, models.Request.scope,
                    models.Request.name, models.Request.requester,
                    models.Request.request_type, models.Request.username,
                    models.Request.userdn, models.Request.transform_tag,
                    models.Request.workload_id, models.Request.priority,
                    models.Request.status, models.Request.substatus,
                    models.Request.locking, models.Request.created_at,
                    models.Request.updated_at, models.Request.next_poll_at,
                    models.Request.accessed_at, models.Request.expired_at,
                    models.Request.errors, models.Transform.transform_id,
                    models.Transform.workload_id.label(
                        "transform_workload_id"),
                    models.Transform.status.label("transform_status"),
                    subquery.c.processing_id,
                    subquery.c.workload_id.label("processing_workload_id"),
                    subquery.c.processing_status,
                    subquery.c.processing_created_at,
                    subquery.c.processing_updated_at,
                    subquery.c.processing_finished_at)

            if request_id:
                query = query.filter(models.Request.request_id == request_id)
            if workload_id:
                query = query.filter(models.Request.workload_id == workload_id)

            query = query.outerjoin(
                models.Transform,
                and_(models.Request.request_id == models.Transform.request_id))
            query = query.outerjoin(
                subquery,
                and_(subquery.c.transform_id == models.Transform.transform_id))
            query = query.order_by(asc(models.Request.request_id))

            tmp = query.all()
            rets = []
            if tmp:
                for t in tmp:
                    # t2 = dict(t)
                    t2 = dict(zip(t.keys(), t))

                    if 'request_metadata' in t2 and t2[
                            'request_metadata'] and 'workflow' in t2[
                                'request_metadata']:
                        workflow = t2['request_metadata']['workflow']
                        workflow_data = None
                        if 'processing_metadata' in t2 and t2[
                                'processing_metadata'] and 'workflow_data' in t2[
                                    'processing_metadata']:
                            workflow_data = t2['processing_metadata'][
                                'workflow_data']
                        if workflow is not None and workflow_data is not None:
                            workflow.metadata = workflow_data
                            t2['request_metadata']['workflow'] = workflow

                    rets.append(t2)
            return rets
        elif with_detail or with_metadata:
            subquery1 = session.query(
                models.Collection.coll_id, models.Collection.transform_id,
                models.Collection.scope.label("input_coll_scope"),
                models.Collection.name.label("input_coll_name"),
                models.Collection.status.label("input_coll_status"),
                models.Collection.bytes.label("input_coll_bytes"),
                models.Collection.total_files.label("input_total_files"),
                models.Collection.processed_files.label(
                    "input_processed_files"),
                models.Collection.processing_files.label(
                    "input_processing_files")).filter(
                        models.Collection.relation_type == 0)
            subquery1 = subquery1.subquery()

            subquery2 = session.query(
                models.Collection.coll_id, models.Collection.transform_id,
                models.Collection.scope.label("output_coll_scope"),
                models.Collection.name.label("output_coll_name"),
                models.Collection.status.label("output_coll_status"),
                models.Collection.bytes.label("output_coll_bytes"),
                models.Collection.total_files.label("output_total_files"),
                models.Collection.processed_files.label(
                    "output_processed_files"),
                models.Collection.processing_files.label(
                    "output_processing_files")).filter(
                        models.Collection.relation_type == 1)
            subquery2 = subquery2.subquery()

            if with_metadata:
                query = session.query(
                    models.Request.request_id, models.Request.scope,
                    models.Request.name, models.Request.requester,
                    models.Request.request_type, models.Request.username,
                    models.Request.userdn, models.Request.transform_tag,
                    models.Request.workload_id, models.Request.priority,
                    models.Request.status, models.Request.substatus,
                    models.Request.locking, models.Request.created_at,
                    models.Request.updated_at, models.Request.next_poll_at,
                    models.Request.accessed_at, models.Request.expired_at,
                    models.Request.errors,
                    models.Request._request_metadata.label('request_metadata'),
                    models.Request._processing_metadata.label(
                        'processing_metadata'), models.Transform.transform_id,
                    models.Transform.transform_type,
                    models.Transform.workload_id.label(
                        "transform_workload_id"),
                    models.Transform.status.label("transform_status"),
                    models.Transform.created_at.label("transform_created_at"),
                    models.Transform.updated_at.label("transform_updated_at"),
                    models.Transform.finished_at.label(
                        "transform_finished_at"), subquery1.c.input_coll_scope,
                    subquery1.c.input_coll_name, subquery1.c.input_coll_status,
                    subquery1.c.input_coll_bytes,
                    subquery1.c.input_total_files,
                    subquery1.c.input_processed_files,
                    subquery1.c.input_processing_files,
                    subquery2.c.output_coll_scope,
                    subquery2.c.output_coll_name,
                    subquery2.c.output_coll_status,
                    subquery2.c.output_coll_bytes,
                    subquery2.c.output_total_files,
                    subquery2.c.output_processed_files,
                    subquery2.c.output_processing_files)
            else:
                query = session.query(
                    models.Request.request_id, models.Request.scope,
                    models.Request.name, models.Request.requester,
                    models.Request.request_type, models.Request.username,
                    models.Request.userdn, models.Request.transform_tag,
                    models.Request.workload_id, models.Request.priority,
                    models.Request.status, models.Request.substatus,
                    models.Request.locking, models.Request.created_at,
                    models.Request.updated_at, models.Request.next_poll_at,
                    models.Request.accessed_at, models.Request.expired_at,
                    models.Request.errors, models.Transform.transform_id,
                    models.Transform.transform_type,
                    models.Transform.workload_id.label(
                        "transform_workload_id"),
                    models.Transform.status.label("transform_status"),
                    models.Transform.created_at.label("transform_created_at"),
                    models.Transform.updated_at.label("transform_updated_at"),
                    models.Transform.finished_at.label(
                        "transform_finished_at"), subquery1.c.input_coll_scope,
                    subquery1.c.input_coll_name, subquery1.c.input_coll_status,
                    subquery1.c.input_coll_bytes,
                    subquery1.c.input_total_files,
                    subquery1.c.input_processed_files,
                    subquery1.c.input_processing_files,
                    subquery2.c.output_coll_scope,
                    subquery2.c.output_coll_name,
                    subquery2.c.output_coll_status,
                    subquery2.c.output_coll_bytes,
                    subquery2.c.output_total_files,
                    subquery2.c.output_processed_files,
                    subquery2.c.output_processing_files)

            if request_id:
                query = query.filter(models.Request.request_id == request_id)
            if workload_id:
                query = query.filter(models.Request.workload_id == workload_id)

            query = query.outerjoin(
                models.Transform,
                and_(models.Request.request_id == models.Transform.request_id))
            query = query.outerjoin(
                subquery1,
                and_(
                    subquery1.c.transform_id == models.Transform.transform_id))
            query = query.outerjoin(
                subquery2,
                and_(
                    subquery2.c.transform_id == models.Transform.transform_id))
            query = query.order_by(asc(models.Request.request_id))

            tmp = query.all()
            rets = []
            if tmp:
                for t in tmp:
                    # t2 = dict(t)
                    t2 = dict(zip(t.keys(), t))

                    if 'request_metadata' in t2 and t2[
                            'request_metadata'] and 'workflow' in t2[
                                'request_metadata']:
                        workflow = t2['request_metadata']['workflow']
                        workflow_data = None
                        if 'processing_metadata' in t2 and t2[
                                'processing_metadata'] and 'workflow_data' in t2[
                                    'processing_metadata']:
                            workflow_data = t2['processing_metadata'][
                                'workflow_data']
                        if workflow is not None and workflow_data is not None:
                            workflow.metadata = workflow_data
                            t2['request_metadata']['workflow'] = workflow

                    rets.append(t2)
            return rets
    except sqlalchemy.orm.exc.NoResultFound as error:
        raise exceptions.NoObject(
            'request workload_id: %s cannot be found: %s' %
            (workload_id, error))