def load_user_roles(user, permissions): """Load all user roles for user Args: user (Person): Person object permissions (dict): dict where the permissions will be stored Returns: source_contexts_to_rolenames (dict): Role names for contexts """ # Add permissions from all DB-managed roles user_roles = db.session.query(UserRole)\ .options( orm.undefer_group('UserRole_complete'), orm.undefer_group('Role_complete'), orm.joinedload('role'))\ .filter(UserRole.person_id == user.id)\ .order_by(UserRole.updated_at.desc())\ .all() source_contexts_to_rolenames = {} for user_role in user_roles: source_contexts_to_rolenames.setdefault( user_role.context_id, list()).append(user_role.role.name) if isinstance(user_role.role.permissions, dict): collect_permissions( user_role.role.permissions, user_role.context_id, permissions)
def go(): l = q.options( undefer_group('primary'), undefer_group('secondary')).all() o2 = l[2] eq_(o2.opened, 1) eq_(o2.userident, 7) eq_(o2.description, 'order 3')
def load_user_roles(user, permissions): """Load all user roles for user Args: user (Person): Person object permissions (dict): dict where the permissions will be stored Returns: source_contexts_to_rolenames (dict): Role names for contexts """ # Add permissions from all DB-managed roles user_roles = db.session.query(UserRole)\ .options( orm.undefer_group('UserRole_complete'), orm.undefer_group('Role_complete'), orm.joinedload('role'))\ .filter(UserRole.person_id == user.id)\ .order_by(UserRole.updated_at.desc())\ .all() source_contexts_to_rolenames = {} for user_role in user_roles: source_contexts_to_rolenames.setdefault( user_role.context_id, list()).append(user_role.role.name) if isinstance(user_role.role.permissions, dict): collect_permissions(user_role.role.permissions, user_role.context_id, permissions)
def go(): result = q.options(undefer_group('primary'), undefer_group('secondary')).all() o2 = result[2] eq_(o2.opened, 1) eq_(o2.userident, 7) eq_(o2.description, 'order 3')
def handle_assessment_post(sender, objects=None, sources=None): # pylint: disable=unused-argument """Apply custom attribute definitions and map people roles when generating Assessmet with template""" db.session.flush() audit_ids = [] template_ids = [] snapshot_ids = [] for src in sources: snapshot_ids.append(src.get('object', {}).get('id')) audit_ids.append(src.get('audit', {}).get('id')) template_ids.append(src.get('template', {}).get('id')) snapshot_cache = { s.id: s for s in Snapshot.query.options( orm.undefer_group('Snapshot_complete'), orm.Load(Snapshot).joinedload("revision").undefer_group( 'Revision_complete')).filter(Snapshot.id.in_(snapshot_ids)) } template_cache = { t.id: t for t in all_models.AssessmentTemplate.query.options( orm.undefer_group('AssessmentTemplate_complete'), ).filter( all_models.AssessmentTemplate.id.in_(template_ids)) } audit_cache = { a.id: a for a in all_models.Audit.query.options( orm.undefer_group('Audit_complete'), ).filter( all_models.Audit.id.in_(audit_ids)) } for assessment, src in izip(objects, sources): snapshot_dict = src.get("object") or {} common.map_objects(assessment, snapshot_dict) common.map_objects(assessment, src.get("audit")) snapshot = snapshot_cache.get(snapshot_dict.get('id')) if not src.get("_generated") and not snapshot: continue template = template_cache.get(src.get("template", {}).get("id")) audit = audit_cache[src["audit"]["id"]] relate_assignees(assessment, snapshot, template, audit) relate_ca(assessment, template) assessment.title = u'{} assessment for {}'.format( snapshot.revision.content['title'], audit.title, ) if not template: continue if template.test_plan_procedure: assessment.test_plan = snapshot.revision.content['test_plan'] else: assessment.test_plan = template.procedure_description if template.template_object_type: assessment.assessment_type = template.template_object_type
def person(id): try: person = Person.query\ .filter_by(id=id)\ .options(undefer_group('extra'), undefer_group('profile'))\ .one() except NoResultFound, e: return render_template('not-found.html'), 404
def pubroot(request, info, session): date = datetime.date.today() # If it's the early hours of the morning, it's more useful for us # to consider it still to be yesterday. if datetime.datetime.now().hour < 4: date = date - datetime.timedelta(1) thisweek_start = date - datetime.timedelta(date.weekday()) thisweek_end = thisweek_start + datetime.timedelta(6) lastweek_start = thisweek_start - datetime.timedelta(7) lastweek_end = thisweek_end - datetime.timedelta(7) weekbefore_start = lastweek_start - datetime.timedelta(7) weekbefore_end = lastweek_end - datetime.timedelta(7) weeks = [ ("Current week", thisweek_start, thisweek_end, business_totals(session, thisweek_start, thisweek_end)), ("Last week", lastweek_start, lastweek_end, business_totals(session, lastweek_start, lastweek_end)), ( "The week before last", weekbefore_start, weekbefore_end, business_totals(session, weekbefore_start, weekbefore_end), ), ] currentsession = Session.current(session) barsummary = ( session.query(StockLine) .filter(StockLine.location == "Bar") .order_by(StockLine.dept_id, StockLine.name) .options(joinedload_all("stockonsale.stocktype.unit")) .options(undefer_group("qtys")) .all() ) stillage = ( session.query(StockAnnotation) .join(StockItem) .outerjoin(StockLine) .filter( tuple_(StockAnnotation.text, StockAnnotation.time).in_( select( [StockAnnotation.text, func.max(StockAnnotation.time)], StockAnnotation.atype == "location" ).group_by(StockAnnotation.text) ) ) .filter(StockItem.finished == None) .order_by(StockLine.name != null(), StockAnnotation.time) .options(joinedload_all("stockitem.stocktype.unit")) .options(joinedload_all("stockitem.stockline")) .options(undefer_group("qtys")) .all() ) return ( "index.html", {"currentsession": currentsession, "barsummary": barsummary, "stillage": stillage, "weeks": weeks}, )
def drawlines(self, h): sl = td.s.query(StockLine).\ filter(StockLine.location.in_(self.locations)).\ filter(StockLine.capacity==None).\ order_by(StockLine.name).\ options(joinedload('stockonsale')).\ options(joinedload('stockonsale.stocktype')).\ options(undefer_group('qtys')).\ all() f = ui.tableformatter("pl l L r rp") header = f("Line", "StockID", "Stock", "Used", "Remaining") def fl(line): if line.stockonsale: sos = line.stockonsale[0] return (line.name, sos.id, sos.stocktype.format(), sos.used, sos.remaining) return (line.name, "", "", "", "") ml = [header] + [f(*fl(line)) for line in sl] y = 0 for l in ml: for line in l.display(self.w): self.win.addstr(y, 0, line) y = y + 1 if y >= h: break
def get(self, course_id): course = Courses.query.get_or_404(course_id) require(READ, course) # Get all questions for this course, default order is most recent first post = Posts(courses_id=course_id) question = PostsForQuestions(post=post) base_query = PostsForQuestions.query. \ options(joinedload("criteria").joinedload("criterion")). \ options(joinedload("selfevaltype")). \ options(undefer_group('counts')). \ join(Posts). \ options(contains_eager('post').joinedload("user").joinedload('usertypeforsystem')). \ options(contains_eager('post').joinedload("files")). \ filter(Posts.courses_id == course_id). \ order_by(desc(Posts.created)) if allow(MANAGE, question): questions = base_query.all() else: now = datetime.datetime.utcnow() questions = base_query. \ filter(or_(PostsForQuestions.answer_start.is_(None), now >= PostsForQuestions.answer_start)).\ all() restrict_users = not allow(MANAGE, question) on_question_list_get.send( self, event_name=on_question_list_get.name, user=current_user, course_id=course_id) return { "questions": marshal(questions, dataformat.get_posts_for_questions(restrict_users, include_answers=False)) }
def meeting_dialogue(id): glossary_js = generate_glossary_js() try: meeting = Meeting.query.filter_by(id=id)\ .options(undefer_group('extra')).one() except NoResultFound, e: abort(404)
def department(request, info, session, departmentid, as_spreadsheet=False): d = session\ .query(Department)\ .get(int(departmentid)) if d is None: raise Http404 include_finished = request.GET.get("show_finished", "off") == "on" items = session\ .query(StockItem)\ .join(StockType)\ .filter(StockType.department == d)\ .order_by(desc(StockItem.id))\ .options(joinedload_all('stocktype.unit'), undefer_group('qtys'), joinedload('stockline'), joinedload('delivery'), joinedload('finishcode')) if not include_finished: items = items.filter(StockItem.finished == None) if as_spreadsheet: return spreadsheets.stock( session, items.all(), tillname=info.tillname, filename="{}-dept{}-stock.ods".format( info.tillname, departmentid)) pager = Pager(request, items, preserve_query_parameters=["show_finished"]) return ('department.html', {'tillobject': d, 'department': d, 'pager': pager, 'include_finished': include_finished})
def stocksearch(request, info, session): form = StockForm(request.GET) pager = None if form.is_valid() and form.is_filled_in(): q = session\ .query(StockItem)\ .join(StockType)\ .order_by(StockItem.id)\ .options(joinedload_all('stocktype.unit'), joinedload('stockline'), joinedload('delivery'), undefer_group('qtys')) q = form.filter(q) if not form.cleaned_data['include_finished']: q = q.filter(StockItem.finished == None) pager = Pager(request, q, preserve_query_parameters=[ "manufacturer", "name", "include_finished" ]) return ('stocksearch.html', { 'nav': [("Stock", info.reverse("tillweb-stocksearch"))], 'form': form, 'stocklist': pager.items() if pager else [], 'pager': pager })
def drawlines(self, h): sl = td.s.query(StockLine).\ filter(StockLine.location.in_(self.locations)).\ order_by(StockLine.name).\ options(joinedload('stockonsale')).\ options(joinedload('stockonsale.stocktype')).\ options(undefer_group('qtys')).\ all() f = ui.tableformatter("pl l L r rp") header = f("Line", "StockID", "Stock", "Used", "Remaining") def fl(line): if line.linetype == "regular" and line.stockonsale: sos = line.stockonsale[0] return (line.name, sos.id, sos.stocktype.format(), "{} {}".format(sos.used, sos.stocktype.unit_id), "{} {}".format(sos.remaining, sos.stocktype.unit_id)) elif line.linetype == "continuous": return (line.name, "", line.stocktype.format(), "", "{} {}".format(line.stocktype.remaining, line.stocktype.unit_id)) elif line.linetype == "display": return (line.name, "", line.stocktype.format(), "", "{}+{} {}".format(line.ondisplay, line.instock, line.stocktype.unit_id)) return (line.name, "", "", "", "") ml = [header] + [f(*fl(line)) for line in sl] y = 0 for l in ml: for line in l.display(self.w): self.win.addstr(y, 0, line) y = y + 1 if y >= h: break
def relate_ca(assessment, template): """Generates custom attribute list and relates it to Assessment objects Args: assessment (model instance): Assessment model template: Assessment Temaplte instance (may be None) """ if not template: return ca_definitions = all_models.CustomAttributeDefinition.query.options( orm.undefer_group('CustomAttributeDefinition_complete'), ).filter_by( definition_id=template.id, definition_type="assessment_template", ).order_by( all_models.CustomAttributeDefinition.id ) for definition in ca_definitions: cad = all_models.CustomAttributeDefinition( title=definition.title, definition=assessment, attribute_type=definition.attribute_type, multi_choice_options=definition.multi_choice_options, multi_choice_mandatory=definition.multi_choice_mandatory, mandatory=definition.mandatory, helptext=definition.helptext, placeholder=definition.placeholder, ) db.session.add(cad)
def _get_snapshot_data(self, assessment, relationships): """Get snapshot data for the current assessment: Args: relationships: List of all relationships related to the current assessment. """ relationship_ids = self._filter_rels(relationships, "Snapshot") with benchmark("Get assessment snapshot relationships"): snapshots = models.Snapshot.query.options( orm.undefer_group("Snapshot_complete"), orm.joinedload('revision'), ).filter(models.Snapshot.id.in_(relationship_ids)).all() with benchmark("Set assessment snapshot relationships"): data = [] for snapshot in snapshots: data.append({ "archived": assessment.audit.archived, "revision": snapshot.revision.log_json(), "related_sources": [], "parent": { "context_id": assessment.context_id, "href": "/api/audits/{}".format(assessment.audit_id), "type": "Audit", "id": assessment.audit_id, }, "child_type": snapshot.child_type, "child_id": snapshot.child_id, "related_destinations": [], "id": snapshot.id, "revisions": [], "revision_id": snapshot.revision_id, "type": snapshot.type, }) return data
def export_xlsx(self, file): """ Exports all votes according to the code book. """ mapper = ColumnMapper() workbook = Workbook(file, {'default_date_format': 'dd.mm.yyyy'}) workbook.add_worksheet('CITATION') worksheet = workbook.add_worksheet('DATA') worksheet.write_row(0, 0, mapper.columns.values()) query = self.query().options(undefer_group("dataset")) query = query.order_by(None).order_by(SwissVote.bfs_number) row = 0 for vote in query: row += 1 for column_, value in enumerate(mapper.get_values(vote)): if value is None: pass elif isinstance(value, str): worksheet.write_string(row, column_, value) elif isinstance(value, date): worksheet.write_datetime(row, column_, value) elif isinstance(value, int) or isinstance(value, Decimal): worksheet.write_number(row, column_, value) elif isinstance(value, NumericRange): worksheet.write_string(row, column_, f'{value.lower}-{value.upper}') workbook.close()
def department(request, info, session, departmentid, as_spreadsheet=False): d = session\ .query(Department)\ .get(int(departmentid)) if d is None: raise Http404 include_finished = request.GET.get("show_finished", "off") == "on" items = session\ .query(StockItem)\ .join(StockType)\ .filter(StockType.department == d)\ .order_by(desc(StockItem.id))\ .options(joinedload_all('stocktype.unit'), undefer_group('qtys'), joinedload('stockline'), joinedload('delivery'), joinedload('finishcode')) if not include_finished: items = items.filter(StockItem.finished == None) if as_spreadsheet: return spreadsheets.stock(session, items.all(), tillname=info['tillname'], filename="{}-dept{}-stock.ods".format( info['tillname'], departmentid)) pager = Pager(request, items) return ('department.html', { 'department': d, 'pager': pager, 'include_finished': include_finished })
def update_cycle_task_group_parent_state(objs): """Update cycle status for sent cycle task group""" if not objs: return cycles_dict = {} cycle_groups_dict = collections.defaultdict(set) group_ids = [] for obj in objs: cycle_groups_dict[obj.cycle].add(obj) group_ids.append(obj.id) cycles_dict[obj.cycle.id] = obj.cycle # collect all groups that are in same cycles that group from sent list groups = models.CycleTaskGroup.query.filter( models.CycleTaskGroup.cycle_id.in_([c.id for c in cycle_groups_dict]), ).options( orm.undefer_group("CycleTaskGroup_complete") ).distinct().with_for_update().all() for group in groups: cycle_groups_dict[cycles_dict[group.cycle_id]].add(group) updated_cycles = [] for cycle in cycles_dict.itervalues(): old_status = cycle.status _update_parent_status(cycle, {g.status for g in cycle_groups_dict[cycle]}) cycle.start_date, cycle.end_date = _get_date_range( cycle_groups_dict[cycle]) cycle.next_due_date = _get_min_next_due_date(cycle_groups_dict[cycle]) if old_status != cycle.status: updated_cycles.append(Signals.StatusChangeSignalObjectContext( instance=cycle, old_status=old_status, new_status=cycle.status)) if updated_cycles: Signals.status_change.send(models.Cycle, objs=updated_cycles)
def get(self, node_id, type): privilege_type = self.get_argument('type', None) limit = int(self.get_argument('limit', 0)) index = int(self.get_argument('index', 1)) node_id = int(node_id) node = TreeNode(node_id) privilege_types = PrivilegeType.query \ .options(undefer_group("audit")) \ .order_by(PrivilegeType.ctime.desc()) if type == "user": node_ids = [n.id for n in node.parents] node_ids.append(node.id) privilege_types = privilege_types \ .filter(PrivilegeType.node_id.in_(node_ids)) \ .filter_by(username=self.user.username) elif type == "token": node_ids = [n.id for n in node.offspring_treenode] privilege_types = privilege_types \ .filter(PrivilegeType.node_id.in_(node_ids)) \ .filter(PrivilegeType.username.like(settings.AUTH_TOKEN_PREFIX + "%")) if privilege_type: privilege_types = privilege_types.filter_by( privilege_type=privilege_type) if limit: count, privilege_types = privilege_types.paginator(limit, index) data = privilege_types.all() self.write_json( format_paginator_resposne(data, count, limit, index)) else: self.write_data(privilege_types.all())
def update_cycle_task_group_parent_state(objs): """Update cycle status for sent cycle task group""" objs = [obj for obj in objs or [] if obj.cycle.workflow.kind != "Backlog"] if not objs: return cycles_dict = {} cycle_groups_dict = collections.defaultdict(set) group_ids = [] for obj in objs: cycle_groups_dict[obj.cycle].add(obj) group_ids.append(obj.id) cycles_dict[obj.cycle.id] = obj.cycle # collect all groups that are in same cycles that group from sent list groups = models.CycleTaskGroup.query.filter( models.CycleTaskGroup.cycle_id.in_([c.id for c in cycle_groups_dict]), ).options( orm.undefer_group("CycleTaskGroup_complete") ).distinct().with_for_update().all() for group in groups: cycle_groups_dict[cycles_dict[group.cycle_id]].add(group) updated_cycles = [] for cycle in cycles_dict.itervalues(): old_status = cycle.status _update_parent_status(cycle, {g.status for g in cycle_groups_dict[cycle]}) cycle.start_date, cycle.end_date = _get_date_range( cycle_groups_dict[cycle]) cycle.next_due_date = _get_min_next_due_date(cycle_groups_dict[cycle]) if old_status != cycle.status: updated_cycles.append(Signals.StatusChangeSignalObjectContext( instance=cycle, old_status=old_status, new_status=cycle.status)) if updated_cycles: Signals.status_change.send(models.Cycle, objs=updated_cycles)
def request_find_password(user_login): user = get_user(user_login, orm.undefer_group('profile')) if user.email: token, expired_at = generate_token(user) url = url_for('.change_password_form', user_login=user.login, token=token, _external=True) expired_at = datetime.datetime.utcfromtimestamp(expired_at) msg = Message('[LangDev.org] Change your password: '******''' You can change your password through the following link: {url} But the above link will be expired at {expired_at} UTC. ''').format(url=url, expired_at=expired_at) current_app.mail.send(msg) email = hide_email(user.email) result = Result(user=user, email=email) status_code = 201 else: result = Result(user=user, error='Has no email address') status_code = 403 response = render('user/request_find_password', result, **result) response.status_code = status_code return response
def find_users(emails): """Find or generate user. If Integration Server is specified not found in DB user is generated with Creator role. """ if not settings.INTEGRATION_SERVICE_URL: return Person.query.filter(Person.email.in_(emails)).options( orm.undefer_group('Person_complete')).all() # Verify emails usernames = [ email.split('@')[0] for email in emails if is_authorized_domain(email) and not is_external_app_user_email(email) ] service = client.PersonClient() ldaps = service.search_persons(usernames) authorized_domain = getattr(settings, "AUTHORIZED_DOMAIN", "") verified_emails = { '%s@%s' % (ldap['username'], authorized_domain) for ldap in ldaps } # Find users in db users = Person.query.filter(Person.email.in_(emails)).all() found_emails = {user.email for user in users} # Create new users new_emails = verified_emails - found_emails new_usernames = [email.split('@')[0] for email in new_emails] new_users = [('%s@%s' % (ldap['username'], authorized_domain), '%s %s' % (ldap['firstName'], ldap['lastName'])) for ldap in ldaps if ldap['username'] in new_usernames] for email, name in new_users: user = create_user(email, name=name, modified_by_id=get_current_user_id()) users.append(user) # bulk create people if new_users: log_event(db.session) db.session.commit() creator_role_granted = False # Grant Creator role to all users for user in users: if user.system_wide_role == SystemWideRoles.NO_ACCESS: add_creator_role(user) creator_role_granted = True # bulk create people roles if creator_role_granted: log_event(db.session) db.session.commit() return users
def drawlines(self, h): sl = ( td.s.query(StockLine) .filter(StockLine.location.in_(self.locations)) .filter(StockLine.capacity == None) .order_by(StockLine.name) .options(joinedload("stockonsale")) .options(joinedload("stockonsale.stocktype")) .options(undefer_group("qtys")) .all() ) f = ui.tableformatter("pl l L r rp") header = f("Line", "StockID", "Stock", "Used", "Remaining") def fl(line): if line.stockonsale: sos = line.stockonsale[0] return (line.name, sos.id, sos.stocktype.format(), sos.used, sos.remaining) return (line.name, "", "", "", "") ml = [header] + [f(*fl(line)) for line in sl] y = 0 for l in ml: for line in l.display(self.w): self.addstr(y, 0, line) y = y + 1 if y >= h: break
def _process(self): page = request.args.get('page', '1') order_columns = { 'start_dt': Event.start_dt, 'title': db.func.lower(Event.title) } direction = 'desc' if request.args.get('desc', '1') == '1' else 'asc' order_column = order_columns[request.args.get('order', 'start_dt')] query = (Event.query.with_parent(self.category).options( joinedload('series'), undefer_group('series'), load_only('id', 'category_id', 'created_dt', 'end_dt', 'protection_mode', 'start_dt', 'title', 'type_', 'series_pos', 'series_count', 'visibility')).order_by( getattr(order_column, direction)()).order_by(Event.id)) if page == 'all': events = query.paginate(show_all=True) else: events = query.paginate(page=int(page)) return WPCategoryManagement.render_template( 'management/content.html', self.category, 'content', subcategories=self.category.children, events=events, page=page, order_column=request.args.get('order', 'start_dt'), direction=direction)
def relate_ca(assessment, template): """Generates custom attribute list and relates it to Assessment objects Args: assessment (model instance): Assessment model template: Assessment Temaplte instance (may be None) """ if not template: return None ca_definitions = all_models.CustomAttributeDefinition.query.options( orm.undefer_group('CustomAttributeDefinition_complete'), ).filter_by( definition_id=template.id, definition_type="assessment_template", ).order_by( all_models.CustomAttributeDefinition.id ) created_cads = [] for definition in ca_definitions: cad = all_models.CustomAttributeDefinition( title=definition.title, definition=assessment, attribute_type=definition.attribute_type, multi_choice_options=definition.multi_choice_options, multi_choice_mandatory=definition.multi_choice_mandatory, mandatory=definition.mandatory, helptext=definition.helptext, placeholder=definition.placeholder, ) db.session.add(cad) created_cads.append(cad) return created_cads
def eager_query(cls): from sqlalchemy import orm query = super(Event, cls).eager_query().order_by(cls.id.desc()) return query.options( orm.undefer_group('Revision_complete'), orm.subqueryload('revisions'))
def export_csv(self, file): """ Exports all votes according to the code book. """ mapper = ColumnMapper() csv = writer(file) csv.writerow(mapper.columns.values()) query = self.query().options(undefer_group("dataset")) query = query.order_by(None).order_by(SwissVote.bfs_number) for vote in query: row = [] for value in mapper.get_values(vote): if value is None: row.append('.') elif isinstance(value, str): row.append(value) elif isinstance(value, date): row.append(f'{value:%d.%m.%Y}') elif isinstance(value, int): row.append(str(value)) elif isinstance(value, NumericRange): row.append(f'{value.lower}-{value.upper}') elif isinstance(value, Decimal): row.append(f'{value:f}'.replace( '.', ',').rstrip('0').rstrip(',')) csv.writerow(row)
def get(self, sn): server = VM.query.options(undefer_group('details'), joinedload(VM.vm_host)).filter_by(sn=sn)\ .first_or_404(msg="sn %s doesn't exists" % sn) data = self.json_decode(server.searchable_info)\ if server.searchable_info \ else {"hostname": server.hostname, "sn": server.sn, "type": server.type} if server.type == ServerType.vm: if server.vm_host: data.update({"vm_host": {"hostname": server.vm_host.hostname, "sn": server.vm_host.sn}, "vm_name": server.vm_name}) elif server.type == ServerType.vmh: data.update({"vms": [{"hostname": vm.hostname, "sn": vm.sn} for vm in server.vms]}) data.update({"status": server.validity}) # Node info ns = NodeServers.query.filter_by(server_id=server.id).all() data.update( on_nodes=[{'id': i.node_id, 'path': zk.without_expire.get_node_dir(i.node_id)} for i in ns] ) return self.write_json(data)
class RHDisplayCategoryEventsBase(RHDisplayCategoryBase): """Base class for display pages displaying an event list.""" _category_query_options = (joinedload('children').load_only( 'id', 'title', 'protection_mode'), undefer('attachment_count'), undefer('has_events')) _event_query_options = (joinedload('person_links'), joinedload('series'), undefer_group('series'), joinedload('label'), load_only('id', 'category_id', 'created_dt', 'start_dt', 'end_dt', 'timezone', 'protection_mode', 'title', 'type_', 'series_pos', 'series_count', 'own_address', 'own_venue_id', 'own_venue_name', 'label_id', 'label_message', 'visibility')) def _process_args(self): RHDisplayCategoryBase._process_args(self) self.now = now_utc(exact=False).astimezone( self.category.display_tzinfo) def format_event_date(self, event): day_month = 'dd MMM' tzinfo = self.category.display_tzinfo start_dt = event.start_dt.astimezone(tzinfo) end_dt = event.end_dt.astimezone(tzinfo) if start_dt.year != end_dt.year: return '{} - {}'.format( to_unicode(format_date(start_dt, timezone=tzinfo)), to_unicode(format_date(end_dt, timezone=tzinfo))) elif (start_dt.month != end_dt.month) or (start_dt.day != end_dt.day): return '{} - {}'.format( to_unicode(format_date(start_dt, day_month, timezone=tzinfo)), to_unicode(format_date(end_dt, day_month, timezone=tzinfo))) else: return to_unicode(format_date(start_dt, day_month, timezone=tzinfo)) def group_by_month(self, events): def _format_tuple(x): (year, month), events = x return { 'name': format_date(date(year, month, 1), format='MMMM yyyy'), 'events': list(events), 'is_current': year == self.now.year and month == self.now.month } def _key(event): start_dt = event.start_dt.astimezone(self.category.tzinfo) return start_dt.year, start_dt.month months = groupby(events, key=_key) return map(_format_tuple, months) def happening_now(self, event): return event.start_dt <= self.now < event.end_dt def is_recent(self, dt): return dt > self.now - relativedelta(weeks=1)
def _load_audits(audit_ids): """Returns audits for given IDs.""" return { a.id: a for a in all_models.Audit.query.options( orm.undefer_group('Audit_complete'), ).filter( all_models.Audit.id.in_(audit_ids)) }
def _load_templates(template_ids): """Returns assessment templates for given IDs.""" return { t.id: t for t in all_models.AssessmentTemplate.query.options( orm.undefer_group('AssessmentTemplate_complete'), ).filter( all_models.AssessmentTemplate.id.in_(template_ids)) }
def _get_near_flights(flight, location, time, max_distance=1000): # calculate max_distance in degrees at the earth's sphere (approximate, # cutoff at +-85 deg) max_distance_deg = (max_distance / METERS_PER_DEGREE) / \ math.cos(math.radians(min(abs(location.latitude), 85))) # the distance filter is geometric only, so max_distance must be given in # SRID units (which is degrees for WGS84). The filter will be more and more # inaccurate further to the poles. But it's a lot faster than the geograpic # filter... result = Flight.query() \ .options(undefer_group('path')) \ .filter(Flight.id != flight.id) \ .filter(Flight.takeoff_time <= time) \ .filter(Flight.landing_time >= time) \ .filter(func.ST_DWithin(Flight.locations, location.to_wkt_element(), max_distance_deg)) result = _patch_query(result) flights = [] for flight in result: # find point closest to given time closest = min(range(len(flight.timestamps)), key=lambda x: abs((flight.timestamps[x] - time).total_seconds())) trace = to_shape(flight.locations).coords if closest == 0 or closest == len(trace) - 1: point = trace[closest] else: # interpolate flight trace between two fixes next_smaller = closest if flight.timestamps[closest] < time else closest - 1 next_larger = closest if flight.timestamps[closest] > time else closest + 1 dx = (time - flight.timestamps[next_smaller]).total_seconds() / \ (flight.timestamps[next_larger] - flight.timestamps[next_smaller]).total_seconds() point_next = trace[closest] point_prev = trace[closest] point = [point_prev[0] + (point_next[0] - point_prev[0]) * dx, point_prev[1] + (point_next[1] - point_prev[1]) * dx] point_distance = location.geographic_distance( Location(latitude=point[1], longitude=point[0])) if point_distance > max_distance: continue flights.append(flight) # limit to 5 flights if len(flights) == 5: break return flights
def find_users(emails): """Find or generate user. If Integration Server is specified not found in DB user is generated with Creator role. """ # pylint: disable=too-many-locals if not settings.INTEGRATION_SERVICE_URL: return Person.query.filter(Person.email.in_(emails)).options( orm.undefer_group('Person_complete')).all() # Verify emails usernames = [email.split('@')[0] for email in emails if is_authorized_domain(email) and not is_external_app_user_email(email)] service = client.PersonClient() ldaps = service.search_persons(usernames) authorized_domain = getattr(settings, "AUTHORIZED_DOMAIN", "") verified_emails = {'%s@%s' % (ldap['username'], authorized_domain) for ldap in ldaps} # Find users in db users = Person.query.filter(Person.email.in_(emails)).all() found_emails = {user.email for user in users} # Create new users new_emails = verified_emails - found_emails new_usernames = [email.split('@')[0] for email in new_emails] new_users = [('%s@%s' % (ldap['username'], authorized_domain), '%s %s' % (ldap['firstName'], ldap['lastName'])) for ldap in ldaps if ldap['username'] in new_usernames] for email, name in new_users: user = create_user(email, name=name, modified_by_id=get_current_user_id()) users.append(user) # bulk create people if new_users: log_event(db.session) db.session.commit() creator_role_granted = False # Grant Creator role to all users for user in users: if user.system_wide_role == SystemWideRoles.NO_ACCESS: add_creator_role(user) creator_role_granted = True # bulk create people roles if creator_role_granted: log_event(db.session) db.session.commit() return users
def _load_snapshots(snapshot_ids): """Returns snapshots for given IDs.""" return { s.id: s for s in all_models.Snapshot.query.options( orm.undefer_group('Snapshot_complete'), orm.Load(all_models.Snapshot).joinedload('revision').undefer_group( 'Revision_complete')).filter( all_models.Snapshot.id.in_(snapshot_ids)) }
def post(self): data = self.params sn, hostname, raw_type = data['sn'], data['hostname'], data['type'] try: server_type = ServerType(raw_type) except ValueError: raise ParamsInvalidError("server type %s invalid" % raw_type) if not sn or not hostname: raise ParamsInvalidError("sn or hostname not allowed to be empty") server = RawServer.query.options(undefer_group('details')).filter_by(sn=sn).first() if server is None: server = RawServer(sn=sn, hostname=hostname, type=server_type, validity=ServerStatus.online) else: server.hostname = hostname server.type = server_type # received assets update from vm after first boot up, make it online if server.validity == ServerStatus.creating: server.validity = ServerStatus.online server.idc = data['idc'] server.MACs = filter(lambda x: not x.is_reserved, [MAC.from_string(s) for s in data.pop('mac', [])]) data['macs'] = [str(mac) for mac in server.MACs] ip_set = [IPv4(s) for s in data.pop('ips', [])] server.private_ipv4s = filter(lambda x: x.ip.is_private and not x.ip.is_loopback, ip_set) server.public_ipv4s = filter(lambda x: not x.ip.is_private, ip_set) data['private_ip'] = [str(ip) for ip in server.private_ipv4s] data['public_ip'] = [str(ip) for ip in server.public_ipv4s] _check = lambda x: len(x) != 0 if isinstance(x, collections.Sized) else x is not None server.searchable_info = self.json_encode({k: v for k, v in data.iteritems() if _check(v)}) vm_macs = data.pop('vms') if isinstance(vm_macs, collections.MutableSequence): vms = VM.polymorphic_query.with_entities(VM.id, VM.MACs, VM.vm_name, VM.vm_host_id).all() for vm_name, macs in [(vm['vm_name'], vm['mac']) for vm in vm_macs]: macs_set = set(MAC.from_string(s) for s in macs) try: vm = ifilter(lambda v: set(v.MACs) & macs_set, (v for v in vms if v.MACs)).next() except StopIteration: continue if vm.vm_host_id != server.id or vm.vm_name != vm_name: VM.query.filter_by(id=vm.id).update({VM.vm_name: vm_name, VM.vm_host_id: server.id}) server.save() self.write_json({"message": "asset info update succeed"})
def get_avatar(request): """ Returns the current User object """ log.info("Get avatar") login = request.unauthenticated_userid if login is not None: log.info(" + Returning the user") query = request.dbsession.query(User).options(undefer_group('edit')) user = query.filter_by(login=login).first() return user
def get_avatar(request): """ Returns the current User object """ log.info("Get avatar") login = unauthenticated_userid(request) if login is not None: log.info(" + Returning the user") query = request.dbsession.query(User).options(undefer_group('edit')) user = query.filter_by(login=login).first() return user
def stockline(request, info, session, stocklineid): try: s = ( session.query(StockLine) .filter_by(id=int(stocklineid)) .options(joinedload_all("stockonsale.stocktype.unit")) .options(undefer_group("qtys")) .one() ) except NoResultFound: raise Http404 return ("stockline.html", {"stockline": s})
def related_objects(self, id): """Get data for assessment related_objects page.""" # id name is used as a kw argument and can't be changed here # pylint: disable=invalid-name,redefined-builtin with benchmark("check assessment permissions"): assessment = models.Assessment.query.options( orm.undefer_group("Assessment_complete")).get(id) if not permissions.is_allowed_read_for(assessment): raise Forbidden() with benchmark("Get assessment related_objects data"): data = self._get_related_data(assessment) with benchmark("Make response"): return self.json_success_response(data, )
def delivery(request, info, session, deliveryid): try: d = ( session.query(Delivery) .filter_by(id=int(deliveryid)) .options(joinedload_all("items.stocktype.unit")) .options(joinedload_all("items.stockline")) .options(undefer_group("qtys")) .one() ) except NoResultFound: raise Http404 return ("delivery.html", {"delivery": d})
def related_objects(self, id): """Get data for assessment related_objects page.""" # id name is used as a kw argument and can't be changed here # pylint: disable=invalid-name,redefined-builtin with benchmark("check assessment permissions"): assessment = models.Assessment.query.options( orm.undefer_group("Assessment_complete") ).get(id) if not permissions.is_allowed_read_for(assessment): raise Forbidden() with benchmark("Get assessment related_objects data"): data = self._get_related_data(assessment) with benchmark("Make response"): return self.json_success_response(data, )
def _get_item(self, klass, key, object_name): assert self.dbsession is not None, "Missing dbsession" dbsession = self.dbsession() obj = dbsession.query(klass)\ .options(undefer_group('edit'))\ .filter(getattr(klass, self.id_key) == key)\ .scalar() if obj is None: raise KeyError obj.__name__ = object_name return obj
def stock(request, info, session, stockid): s = session\ .query(StockItem)\ .options(joinedload_all('stocktype.department'), joinedload_all('stocktype.stockline_log.stockline'), joinedload_all('delivery.supplier'), joinedload_all('stockunit.unit'), joinedload_all('annotations.type'), subqueryload_all('out.transline.transaction'), undefer_group('qtys'))\ .get(int(stockid)) if not s: raise Http404 return ('stock.html', {'stock': s})
class RHDisplayCategoryEventsBase(RHDisplayCategoryBase): """Base class for display pages displaying an event list.""" _category_query_options = (joinedload('children').load_only('id', 'title', 'protection_mode'), undefer('attachment_count'), undefer('has_events')) _event_query_options = (joinedload('person_links'), joinedload('series'), undefer_group('series'), joinedload('label'), load_only('id', 'category_id', 'created_dt', 'start_dt', 'end_dt', 'timezone', 'protection_mode', 'title', 'type_', 'series_pos', 'series_count', 'own_address', 'own_venue_id', 'own_venue_name', 'label_id', 'label_message', 'visibility')) def _process_args(self): RHDisplayCategoryBase._process_args(self) self.now = now_utc(exact=False).astimezone(self.category.display_tzinfo)
def stocktype(request, info, session, stocktype_id): try: s = session.query(StockType).filter_by(id=int(stocktype_id)).one() except NoResultFound: raise Http404 include_finished = request.GET.get("show_finished", "off") == "on" items = ( session.query(StockItem) .filter(StockItem.stocktype == s) .options(undefer_group("qtys")) .order_by(desc(StockItem.id)) ) if not include_finished: items = items.filter(StockItem.finished == None) items = items.all() return ("stocktype.html", {"stocktype": s, "items": items, "include_finished": include_finished})
def get_custom_attribute_definitions(cls): """Get all applicable CA definitions (even ones without a value yet).""" from ggrc.models.custom_attribute_definition import \ CustomAttributeDefinition as cad if cls.__name__ == "Assessment": query = cad.query.filter(or_( cad.definition_type == utils.underscore_from_camelcase(cls.__name__), cad.definition_type == "assessment_template", )) else: query = cad.query.filter( cad.definition_type == utils.underscore_from_camelcase(cls.__name__) ) return query.options( orm.undefer_group('CustomAttributeDefinition_complete') )
def stock(request, info, session, stockid): try: s = ( session.query(StockItem) .filter_by(id=int(stockid)) .options(joinedload_all("stocktype.department")) .options(joinedload_all("stocktype.stockline_log.stockline")) .options(joinedload_all("delivery.supplier")) .options(joinedload_all("stockunit.unit")) .options(joinedload_all("annotations.type")) .options(subqueryload_all("out.transline.transaction")) .options(undefer_group("qtys")) .one() ) except NoResultFound: raise Http404 return ("stock.html", {"stock": s})
def stock(request, info, session, stockid): s = session\ .query(StockItem)\ .options(joinedload_all('stocktype.department'), joinedload_all('stocktype.stockline_log.stockline'), joinedload_all('delivery.supplier'), joinedload_all('stockunit.unit'), joinedload_all('annotations.type'), subqueryload_all('out.transline.transaction'), undefer_group('qtys'))\ .get(int(stockid)) if not s: raise Http404 return ('stock.html', { 'tillobject': s, 'stock': s, })
def stocksearch(request, info, session): form = StockForm(request.GET) result = [] q = ( session.query(StockItem) .join(StockType) .order_by(StockItem.id) .options(joinedload_all("stocktype.unit")) .options(joinedload("stockline")) .options(undefer_group("qtys")) ) if form.is_valid(): if form.is_filled_in(): q = form.filter(q) if not form.cleaned_data["include_finished"]: q = q.filter(StockItem.finished == None) result = q.all() return ("stocksearch.html", {"form": form, "stocklist": result})
def department(request, info, session, departmentid): d = session.query(Department).get(int(departmentid)) if d is None: raise Http404 include_finished = request.GET.get("show_finished", "off") == "on" items = ( session.query(StockItem) .join(StockType) .filter(StockType.department == d) .order_by(desc(StockItem.id)) .options(joinedload_all("stocktype.unit")) .options(undefer_group("qtys")) .options(joinedload("stockline")) .options(joinedload("finishcode")) ) if not include_finished: items = items.filter(StockItem.finished == None) items = items.all() return ("department.html", {"department": d, "items": items, "include_finished": include_finished})
def _process(self): page = request.args.get('page', '1') order_columns = {'start_dt': Event.start_dt, 'title': db.func.lower(Event.title)} direction = 'desc' if request.args.get('desc', '1') == '1' else 'asc' order_column = order_columns[request.args.get('order', 'start_dt')] query = (Event.query.with_parent(self.category) .options(joinedload('series'), undefer_group('series'), load_only('id', 'category_id', 'created_dt', 'end_dt', 'protection_mode', 'start_dt', 'title', 'type_', 'series_pos', 'series_count')) .order_by(getattr(order_column, direction)()) .order_by(Event.id)) if page == 'all': events = query.paginate(show_all=True) else: events = query.paginate(page=int(page)) return WPCategoryManagement.render_template('management/content.html', self.category, 'content', subcategories=self.category.children, events=events, page=page, order_column=request.args.get('order', 'start_dt'), direction=direction)
def _get_people_data(self, relationships): """Get assessment people data. This function returns data for people related to the assessment without ACL roles. The data does not include the relationships since those are sent in a different block. """ relationship_ids = self._filter_rels(relationships, "Person") with benchmark("Get assessment snapshot relationships"): people = models.Person.query.options( orm.undefer_group("Person_complete"), orm.joinedload('language'), orm.subqueryload('object_people'), orm.subqueryload('_custom_attribute_values').undefer_group( 'CustomAttributeValue_complete' ) ).filter( models.Person.id.in_(relationship_ids) ).all() return [person.log_json() for person in people]
def _get_snapshot_data(self, assessment, relationships): """Get snapshot data for the current assessment: Args: relationships: List of all relationships related to the current assessment. """ relationship_ids = self._filter_rels(relationships, "Snapshot") if not relationship_ids: return [] with benchmark("Get assessment snapshot relationships"): snapshots = models.Snapshot.query.options( orm.undefer_group("Snapshot_complete"), orm.joinedload('revision'), ).filter( models.Snapshot.id.in_(relationship_ids) ).all() with benchmark("Set assessment snapshot relationships"): data = [] for snapshot in snapshots: data.append({ "archived": assessment.audit.archived, "revision": snapshot.revision.log_json(), "related_sources": [], "parent": { "context_id": assessment.context_id, "href": "/api/audits/{}".format(assessment.audit_id), "type": "Audit", "id": assessment.audit_id, }, "child_type": snapshot.child_type, "child_id": snapshot.child_id, "related_destinations": [], "id": snapshot.id, "revisions": [], "revision_id": snapshot.revision_id, "type": snapshot.type, "original_object_deleted": snapshot.original_object_deleted, }) return data
def update_cycle_task_tree(objs): """Update cycle task group status for sent cycle task""" objs = [o for o in objs or [] if o.cycle.workflow.kind != "Backlog"] if not objs: return groups_dict = {i.cycle_task_group_id: i.cycle_task_group for i in objs} group_task_dict = collections.defaultdict(set) # load all tasks that are in the same groups there are tasks that be updated task_ids = [t.id for t in db.session.deleted if isinstance(t, models.CycleTaskGroupObjectTask)] for task in itertools.chain(db.session.dirty, db.session.new): if not isinstance(task, models.CycleTaskGroupObjectTask): continue group_task_dict[task.cycle_task_group].add(task) if task.id: task_ids.append(task.id) query = models.CycleTaskGroupObjectTask.query.filter( models.CycleTaskGroupObjectTask.cycle_task_group_id.in_(groups_dict) ).options( orm.undefer_group("CycleTaskGroupObjectTask_complete") ) if task_ids: query = query.filter(models.CycleTaskGroupObjectTask.id.notin_(task_ids)) tasks = query.distinct().with_for_update().all() for task in tasks: group_task_dict[groups_dict[task.cycle_task_group_id]].add(task) updated_groups = [] for group in groups_dict.itervalues(): old_state = [group.status, group.start_date, group.end_date, group.next_due_date] _update_parent_status(group, {t.status for t in group_task_dict[group]}) group.start_date, group.end_date = _get_date_range(group_task_dict[group]) group.next_due_date = _get_min_end_date(group_task_dict[group]) if old_state != [group.status, group.start_date, group.end_date, group.next_due_date]: # if status updated then add it in list. require to update cycle state updated_groups.append(group) if updated_groups: update_cycle_task_group_parent_state(updated_groups)
def get(self, course_uuid): course = Course.get_active_by_uuid_or_404(course_uuid) require(READ, course, title="Assignments Unavailable", message="Assignments can be seen only by those enrolled in the course. Please double-check your enrollment in this course.") assignment = Assignment(course_id=course.id) restrict_user = not allow(MANAGE, assignment) # Get all assignments for this course, order by answer_start date, created date base_query = Assignment.query \ .options(joinedload("assignment_criteria").joinedload("criterion")) \ .options(undefer_group('counts')) \ .filter( Assignment.course_id == course.id, Assignment.active == True ) \ .order_by(desc(Assignment.answer_start), desc(Assignment.created)) if restrict_user: now = datetime.datetime.utcnow() assignments = base_query \ .filter(or_( Assignment.answer_start.is_(None), now >= Assignment.answer_start ))\ .all() else: assignments = base_query.all() on_assignment_list_get.send( self, event_name=on_assignment_list_get.name, user=current_user, course_id=course.id) return { "objects": marshal(assignments, dataformat.get_assignment(restrict_user)) }