def update_dict(self, data): start_date = data.pop('start_date', None) end_date = data.pop('end_date', None) set_attribute(self, 'during', DateInterval([start_date, end_date])) if data.get('accounts') is not None: new_accounts = set([(a['type'], a['value']) for a in data.pop('accounts', [])]) for account in self.accounts: key = (account.type, account.value) if key in new_accounts: new_accounts.remove(key) else: self.accounts.remove(account) for new_account in new_accounts: type, value = new_account self.accounts.append( GroupAccount(type=type, value=value, group_id=data.get('id'))) for key, value in data.items(): if key.startswith('_'): continue set_attribute(self, key, value)
def update_dict(self, data): start_date = data.pop('start_date', None) end_date = data.pop('end_date', None) set_attribute(self, 'during', DateInterval([start_date, end_date])) for key, value in data.items(): set_attribute(self, key, value)
def svn(self, prn, date): """ ??? """ for candidate in filter(lambda x: x.prn == prn, self): if date in DateInterval([candidate.launch, candidate.deactivation]): return candidate.svn raise RuntimeError('could not find SVN associated with PRN={} on ' '{:%Y-%m-%d}'.format(prn, date))
def validate_interval(self, data): """Check if holding blocks the loan/request.""" # Create an interval for the currently checked holding # holding['start_date'] -> holding['end_date'] holding_interval = DateInterval([data['start_date'], data['end_date']]) # If there is an intersection between those two intervals, then the # desired dates of the requested holding are not available try: intersection = holding_interval & self.context['request_interval'] raise ValidationError(intersection) except IllegalArgument: pass
def validate_holdings(self, data): """Check if another holding blocks the loan/request.""" item = self.context['item'] start = data.get('start_date', _today()) end = data.get('end_date', _max_loan_duration(start)) # Create an interval for the requested/new holding holding_schema = HoldingSchema( context={'request_interval': DateInterval([start, end])}) errors = [] for hold in item.holdings: current_error = holding_schema.validate(hold) if current_error: errors.append((hold['id'], current_error)) if errors: raise ValidationError(errors)
def collection_get(self): qs = self.request.validated['querystring'] offset = qs['offset'] limit = qs['limit'] order_by = [func.lower(Work.during).desc()] format = qs.get('format') query = qs.get('query') filters = [] if qs.get('start_date') or qs.get('end_date'): duration = DateInterval([qs.get('start_date'), qs.get('end_date')]) filters.append(Work.during.op('&&')(duration)) if query: filters.append(Work.search_terms.match(query)) filter_type = self.request.validated['querystring'].get('filter_type') if filter_type: filter_types = filter_type.split(',') filters.append(sql.or_(*[Work.type == f for f in filter_types])) from_query = None listing = self.context.search( filters=filters, offset=offset, limit=limit, order_by=order_by, format=format, from_query=from_query, principals=self.request.effective_principals) schema = WorkSchema() result = { 'total': listing['total'], 'records': [schema.to_json(work.to_dict()) for work in listing['hits']], 'snippets': [], 'limit': limit, 'offset': offset, 'status': 'ok' } return result
class TestIntervalProperties(object): @mark.parametrize(('number_range', 'length'), ( ([1, 4], 3), ([-1, 1], 2), ((-inf, inf), inf), ((1, inf), inf), )) def test_length(self, number_range, length): assert IntInterval(number_range).length == length @mark.parametrize(('number_range', 'radius'), ( ([1, 4], 1.5), ([-1, 1], 1.0), ([-4, -1], 1.5), ((-inf, inf), inf), ((1, inf), inf), )) def test_radius(self, number_range, radius): assert IntInterval(number_range).radius == radius @mark.parametrize(('number_range', 'centre'), ( ([1, 4], 2.5), ([-1, 1], 0), ([-4, -1], -2.5), ((1, inf), inf), )) def test_centre(self, number_range, centre): assert IntInterval(number_range).centre == centre @mark.parametrize(('number_range', 'is_open'), (((2, 3), True), ('(2, 5)', True), ('[3, 4)', False), ('(4, 5]', False), ('3 - 4', False), ([4, 5], False), ('[4, 5]', False))) def test_open(self, number_range, is_open): assert IntInterval(number_range).open == is_open @mark.parametrize( ('number_range', 'is_closed'), (((2, 3), False), ('(2, 5)', False), ('[3, 4)', False), ('(4, 5]', False), ('3 - 4', True), ([4, 5], True), ('[4, 5]', True))) def test_closed(self, number_range, is_closed): assert IntInterval(number_range).closed == is_closed @mark.parametrize(('number_range', 'empty'), ( ((2, 3), True), ([2, 3], False), ([2, 2], False), ((2, 2), True), ('[2, 2)', True), ('(2, 2]', True), ('[2, 3)', False), ((2, 10), False), )) def test_empty(self, number_range, empty): assert IntInterval(number_range).empty == empty @mark.parametrize(('number_range', 'degenerate'), ( ((2, 4), False), ('(2, 2)', True), ('[0, 0)', True), )) def test_degenerate(self, number_range, degenerate): assert IntInterval(number_range).degenerate == degenerate @mark.parametrize( ('interval', 'discrete'), ((IntInterval((2, 3)), True), (IntInterval(5), True), (FloatInterval(3.5), False), (DecimalInterval(Decimal('2.4')), False), (DateTimeInterval(datetime(2002, 1, 1)), False), (DateInterval(date(2002, 1, 1)), True))) def test_discrete(self, interval, discrete): assert interval.discrete == discrete
class TestComparisonOperators(object): @mark.parametrize( ('comparison', 'result'), ( (IntInterval([1, 3]) == IntInterval([1, 3]), True), (IntInterval([1, 3]) == IntInterval([1, 4]), False), (IntInterval([inf, inf]) == inf, True), (IntInterval([3, 3]) == 3, True), (IntInterval([3, 3]) == 5, False), (IntInterval([3, 3]) == 'something', False), (IntInterval([3, 3]) == DateInterval( [date(2011, 1, 1), date(2011, 1, 1)]), False), (IntInterval.from_string('(,)') == None, False), # noqa (DateInterval(date(2000, 1, 1), date(2001, 1, 1)) == -12312321312312312312123123, False))) def test_eq_operator(self, comparison, result): assert comparison is result @mark.parametrize( ('comparison', 'result'), ( (IntInterval([1, 3]) != IntInterval([1, 3]), False), (IntInterval([1, 3]) != IntInterval([1, 4]), True), (IntInterval([inf, inf]) != inf, False), (IntInterval([3, 3]) != 3, False), (IntInterval([3, 3]) != 5, True), (IntInterval([3, 3]) != 'something', True), (IntInterval.from_string('(,)') != None, True) # noqa )) def test_ne_operator(self, comparison, result): assert comparison is result @mark.parametrize( ('comparison', 'result'), ((IntInterval([1, 3]) > IntInterval([0, 2]), True), (IntInterval( (1, 4)) > 1, False), (IntInterval( (1, 6)) > [1, 6], False), (IntInterval((1, 6)) > 0, True))) def test_gt_operator(self, comparison, result): assert comparison is result @mark.parametrize( ('comparison', 'result'), ((IntInterval([1, 3]) >= IntInterval([0, 2]), True), (IntInterval( (1, 4)) >= 1, False), (IntInterval( (1, 6)) >= [1, 6], False), (IntInterval((1, 6)) >= 0, True))) def test_ge_operator(self, comparison, result): assert comparison is result @mark.parametrize( ('comparison', 'result'), ((IntInterval([0, 2]) < IntInterval([1, 3]), True), (IntInterval([2, 3]) < IntInterval([2, 3]), False), (IntInterval([2, 5]) < 6, True), (IntInterval([2, 5]) < 5, False), (IntInterval([2, 5]) < inf, True))) def test_lt_operator(self, comparison, result): assert comparison is result @mark.parametrize( ('comparison', 'result'), ((IntInterval([0, 2]) <= IntInterval([1, 3]), True), (IntInterval([1, 3]) <= IntInterval([1, 3]), True), (IntInterval([1, 7]) <= 8, True), (IntInterval([1, 6]) <= 5, False), (IntInterval([1, 5]) <= inf, True))) def test_le_operator(self, comparison, result): assert comparison is result def test_integer_comparison(self): assert IntInterval([2, 2]) <= 3 assert IntInterval([1, 3]) >= 0 assert IntInterval([2, 2]) == 2 assert IntInterval([2, 2]) != 3 @mark.parametrize('value', ( IntInterval([0, 2]), 1, 1.0, (-1, 1), )) def test_contains_operator_for_inclusive_interval(self, value): assert value in IntInterval([-1, 2]) @mark.parametrize('value', ( IntInterval([0, 2]), 2, [-1, 1], )) def test_contains_operator_for_non_inclusive_interval(self, value): assert value not in IntInterval((-1, 2)) @mark.parametrize(('interval1', 'interval2', 'expected'), ( (IntInterval((0, 2)), IntInterval((0, 2)), True), (IntInterval([0, 2]), IntInterval([0, 2]), True), (IntInterval.from_string('[0, 2)'), IntInterval.from_string('[0, 2)'), True), (IntInterval.from_string('(0, 2]'), IntInterval.from_string('(0, 2]'), True), (IntInterval((0, 2)), IntInterval((1, 2)), False), (IntInterval((0, 2)), IntInterval((0, 1)), False), (IntInterval((0, 2)), IntInterval([0, 1]), False), (IntInterval((0, 2)), FloatInterval((0, 1)), False), )) def test_hash_operator_with_interval_attributes(self, interval1, interval2, expected): actual = (interval1.__hash__() == interval2.__hash__()) assert actual == expected @mark.parametrize(('contains_check', 'expected'), ( (IntInterval([0, 2]) in { IntInterval([0, 2]): '' }, True), (IntInterval([0, 2]) in { IntInterval((0, 2)): '' }, False), (IntInterval([0, 2]) in set([IntInterval([0, 2])]), True), )) def test_hash_operator_with_collections(self, contains_check, expected): assert contains_check is expected
def collection_get(self): qs = self.request.validated['querystring'] offset = qs['offset'] limit = qs['limit'] person_id = qs.get('person_id') group_id = qs.get('group_id') format = qs.get('format') order_by = [] query = qs.get('query') filters = [] if person_id: filters.append(Membership.person_id == person_id) if qs.get('start_date') or qs.get('end_date'): duration = DateInterval([qs.get('start_date'), qs.get('end_date')]) filters.append(Membership.during.op('&&')(duration)) if group_id: if qs['transitive']: # find group_ids = [group_id] group_ids.extend(ResourceFactory(GroupResource)( self.request, group_id).child_groups()) filters.append( sql.or_(*[Membership.group_id == g for g in group_ids])) else: filters.append(Membership.group_id == group_id) cte_total = None from_query=None query_callback = None if format == 'record': format = None elif format == 'snippet': from_query = self.context.session.query(Membership) def query_callback(from_query): filtered_members = from_query.cte('filtered_members') with_members = self.context.session.query( func.min(func.coalesce(func.lower(filtered_members.c.during), datetime.date(1900, 1, 1))).label('earliest'), func.max(func.coalesce(func.upper(filtered_members.c.during), datetime.date(2100, 1, 1))).label('latest'), func.count(filtered_members.c.id.distinct()).label('memberships'), func.count(Contributor.work_id.distinct()).label('works'), func.array_agg(Group.id.distinct()).label('group_ids'), func.array_agg(Group.name.distinct()).label('group_names'), func.max(filtered_members.c.id).label('id'), Person.id.label('person_id'), Person.name.label('person_name')).join( Person, Person.id == filtered_members.c.person_id).join( Group, Group.id == filtered_members.c.group_id).outerjoin( Person.contributors, ) if query and group_id: with_members = with_members.filter( Person.family_name.ilike('%%%s%%' % query)) with_members = with_members.group_by(Person.id, Person.name) return with_members.order_by(Person.name) listing = self.context.search( from_query=from_query, filters=filters, offset=offset, limit=limit, order_by=order_by, post_query_callback=query_callback, apply_limits_post_query={'snippet': True}.get(format, False), principals=self.request.effective_principals) schema = MembershipSchema() result = {'total': listing['total'] or cte_total, 'records': [], 'snippets': [], 'limit': limit, 'status': 'ok', 'offset': offset} if format == 'snippet': snippets = [] for hit in listing['hits']: #start_date, end_date = parse_duration(hit.during, # format='%Y-%m-%d') earliest = hit.earliest if earliest: if earliest.year == 1900: earliest = None else: earliest = earliest.strftime('%Y-%m-%d') latest = hit.latest if latest: if latest.year == 2100: latest = None else: latest = latest.strftime('%Y-%m-%d') groups = [{'id': i[0], 'name': i[1]} for i in zip(hit.group_ids, hit.group_names)] snippets.append({'id': hit.id, 'person_id': hit.person_id, 'person_name': hit.person_name, 'groups': groups, 'earliest': earliest, 'latest': latest, 'works': hit.works, 'memberships': hit.memberships}) result['snippets'] = snippets else: result['records'] = [schema.to_json(person.to_dict()) for person in listing['hits']] return result
def listing(self, text_query=None, start_date=None, end_date=None, person_ids=None, group_ids=None, offset=0, limit=100, order_by=None, principals=None): query = self.session.query( Membership.person_id.label('person_id'), Person.name.label('person_name'), Person.family_name.label('person_name_sorted'), func.array_agg( sql.distinct(func.concat(Group.id, ':', Group.name))).label('groups'), func.min(func.coalesce(func.lower(Membership.during), datetime.date(1900, 1, 1))).label('earliest'), func.max(func.coalesce(func.upper(Membership.during), datetime.date(2100, 1, 1))).label('latest'), func.count(sql.distinct(Membership.id)).label('memberships') ).join(Person).join(Group).group_by(Membership.person_id, Person.name, Person.family_name) if person_ids: query = query.filter(sql.or_(*[Membership.person_id == pid for pid in person_ids])) if group_ids: query = query.filter(sql.or_(*[Membership.group_id == pid for pid in group_ids])) if start_date or end_date: duration = DateInterval([start_date, end_date]) query = query.filter(Membership.during.op('&&')(duration)) if text_query: query = query.filter( Person.name.ilike('%%%s%%' % text_query)) total = query.count() query = query.order_by(order_by or Person.family_name) query = query.limit(limit).offset(offset) filtered_members = query.cte('members') full_listing = self.session.query( filtered_members, func.count(Contributor.work_id).label('works') ) full_listing = full_listing.outerjoin( Contributor, filtered_members.c.person_id == Contributor.person_id) full_listing = full_listing.group_by( filtered_members).order_by(filtered_members.c.person_name_sorted) hits = [] for hit in full_listing.all(): earliest = hit.earliest if earliest: if earliest.year == 1900: earliest = None else: earliest = earliest.strftime('%Y-%m-%d') latest = hit.latest if latest: if latest.year == 2100: latest = None else: latest = latest.strftime('%Y-%m-%d') groups = [] for group in hit.groups: id, name = group.split(':', 1) groups.append(dict(id=id, name=name)) hits.append({'person_id': hit.person_id, 'person_name': hit.person_name, 'groups': groups, 'earliest': earliest, 'latest': latest, 'works': hit.works, 'memberships': hit.memberships}) return {'total': total, 'hits': hits, 'limit': limit, 'offset': offset}
def listing(self, text_query=None, type=None, start_date=None, end_date=None, contributor_person_ids=None, contributor_group_ids=None, affiliation_group_ids=None, related_work_ids=None, offset=0, limit=100, order_by=None, principals=None): selected_work_ids = None if contributor_person_ids: query = self.session.query(Contributor.work_id.label('id')) query = query.filter(sql.or_(*[Contributor.person_id == pid for pid in contributor_person_ids])) query = query.group_by(Contributor.work_id) selected_work_ids = query.cte('selected_work_ids') elif contributor_group_ids: query = self.session.query(Contributor.work_id.label('id')) query = query.filter(sql.or_(*[Contributor.group_id == gid for gid in contributor_group_ids])) query = query.group_by(Contributor.work_id) selected_work_ids = query.cte('selected_work_ids') elif affiliation_group_ids: query = self.session.query(Affiliation.work_id.label('id')) query = query.filter(sql.or_(*[Affiliation.group_id == gid for gid in affiliation_group_ids])) query = query.group_by(Affiliation.work_id) selected_work_ids = query.cte('selected_work_ids') elif related_work_ids: query = self.session.query(Relation.work_id.label('id')) query = query.filter(sql.or_(*[Relation.target_id == wid for wid in related_work_ids])) query = query.group_by(Relation.work_id) selected_work_ids = query.cte('selected_work_ids') work_query = self.session.query(Work.id) if selected_work_ids is not None: work_query = work_query.join( selected_work_ids, selected_work_ids.c.id == Work.id) acl_filters = self.acl_filters(principals) if acl_filters: group_filters = [f for f in acl_filters if f.left.table.name == 'affiliations'] person_filters = [f for f in acl_filters if f.left.table.name == 'contributors'] if group_filters: query = self.session.query(Affiliation.work_id.label('id')) query = query.filter(sql.or_(*group_filters)) query = query.group_by(Affiliation.work_id) allowed_work_ids = query.cte('allowed_work_ids') allowed_group_query = query if person_filters: query = self.session.query(Contributor.work_id.label('id')) query = query.filter(sql.or_(*person_filters)) query = query.group_by(Contributor.work_id) allowed_work_ids = query.cte('allowed_work_ids') allowed_person_query = query if group_filters and person_filters: query = allowed_group_query.union( allowed_person_query).group_by('id') allowed_work_ids = query.cte('allowed_work_ids') work_query = work_query.join( allowed_work_ids, allowed_work_ids.c.id == Work.id) if start_date or end_date: duration = DateInterval([start_date, end_date]) work_query = work_query.filter(Work.during.op('&&')(duration)) if text_query: work_query = work_query.filter( Work.title.ilike('%%%s%%' % text_query)) if type: work_query = work_query.filter(Work.type == type) total = work_query.count() work_query = work_query.order_by(order_by or Work.issued.desc()) work_query = work_query.limit(limit).offset(offset) filtered_work_ids = work_query.cte('filtered_work_ids') listed_works = self.session.query( Work.id.label('id'), Work.type.label('type'), Work.issued.label('issued'), Work.title).join( filtered_work_ids, filtered_work_ids.c.id == Work.id).cte('listed_works') Target = aliased(Work) full_listing = self.session.query( listed_works, func.json_agg( func.json_build_object('id', Contributor.id, 'position', Contributor.position, 'name', Person.name, 'person_id', Person.id, 'initials', Person.initials, 'prefix', Person.family_name_prefix, 'given_name', Person.given_name, 'family_name', Person.family_name, 'description', Contributor.description, 'group_id', Contributor.group_id, 'role', Contributor.role) ).label('contributors'), func.json_agg( func.json_build_object('id', Relation.id, 'relation_type', Relation.type, 'type', Target.type, 'location', Relation.location, 'starting', Relation.starting, 'ending', Relation.ending, 'volume', Relation.volume, 'issue', Relation.issue, 'number', Relation.number, 'title', Target.title) ).label('relations'), func.array_agg( sql.distinct( func.concat(Group.id, ':', Group.name))).label('affiliations') ) full_listing = full_listing.outerjoin( Contributor, listed_works.c.id == Contributor.work_id).outerjoin( Person, Person.id == Contributor.person_id) full_listing = full_listing.outerjoin( Affiliation, Contributor.id == Affiliation.contributor_id).outerjoin( Group, Group.id == Affiliation.group_id) full_listing = full_listing.outerjoin( Relation, listed_works.c.id == Relation.work_id).outerjoin( Target, Target.id == Relation.target_id) full_listing = full_listing.group_by(listed_works).order_by( listed_works.c.issued.desc()) hits = [] contributor_role_ids = set(contributor_person_ids or []) for hit in full_listing.all(): aff_labels = dict([tuple(a.split(':', 1)) for a in hit.affiliations]) contributors = [] roles = set() # filter out contributor rows with a null id. # this happens with course relations contributor_rows = [c for c in hit.contributors if c.get('id')] contributor_rows.sort(key=itemgetter('position')) for contributor in contributor_rows: if contributor['person_id'] in contributor_role_ids: roles.add(contributor['role']) if (contributors and contributors[-1]['position'] == contributor['position']): # same contributor as previous one, (but other affiliation # it's hard to remove this with a distinct clause # in the json agg, so we remove it here continue cg_id = contributor['group_id'] if cg_id and str(cg_id) in aff_labels: # a group contributor is always added as an affiliation contributor['group_name'] = aff_labels[str(cg_id)] contributors.append(contributor) affiliations = [] for affiliation in hit.affiliations: id, name = affiliation.split(':', 1) affiliations.append(dict(id=id, name=name)) hits.append({'id': hit.id, 'title': hit.title, 'type': hit.type, 'roles': list(roles), 'issued': hit.issued.strftime('%Y-%m-%d'), 'relations': hit.relations, 'affiliations': affiliations, 'contributors': contributors}) return {'total': total, 'hits': hits, 'limit': limit, 'offset': offset}
def update_dict(self, data): if self.id is None and data.get('id'): self.id = data.pop('id') start_date = data.pop('start_date', None) end_date = data.pop('end_date', None) issued = data['issued'] set_attribute(self, 'during', DateInterval([start_date, end_date])) if start_date is None and end_date is None: set_attribute(self, 'during', DateInterval([issued, issued])) if 'identifiers' in data: new_values = set([(a['type'], a['value']) for a in data.pop('identifiers', [])]) for value in self.identifiers: key = (value.type, value.value) if key in new_values: new_values.remove(key) else: self.identifiers.remove(value) for new_value in new_values: type, value = new_value self.identifiers.append( Identifier(type=type, value=value, work_id=data.get('id'))) if 'measures' in data: new_values = set([(a['type'], a['value']) for a in data.pop('measures', [])]) for value in self.measures: key = (value.type, value.value) if key in new_values: new_values.remove(key) else: self.measures.remove(value) for new_value in new_values: type, value = new_value self.measures.append( Measure(type=type, value=value, work_id=data.get('id'))) if 'contributors' in data: existing_contributors = dict([(c.id, c) for c in self.contributors]) new_contributors = [] for contributor_data in data.pop('contributors', []): contributor_data['work_id'] = self.id affiliations_data = contributor_data.pop('affiliations', []) if contributor_data.get('id') in existing_contributors: contributor = existing_contributors.pop( contributor_data['id']) contributor.update_dict(contributor_data) else: contributor = Contributor.from_dict(contributor_data) existing_affiliations = dict([ (c.id, c) for c in contributor.affiliations ]) new_affiliations = [] for affiliation_data in affiliations_data: affiliation_data['work_id'] = self.id if affiliation_data.get('id') in existing_affiliations: affiliation = existing_affiliations.pop( affiliation_data['id']) affiliation.update_dict(affiliation_data) else: affiliation = Affiliation.from_dict(affiliation_data) new_affiliations.append(affiliation) contributor.affiliations[:] = new_affiliations new_contributors.append(contributor) self.contributors[:] = new_contributors if 'descriptions' in data: existing_descriptions = dict([(c.id, c) for c in self.descriptions]) new_descriptions = [] for description_data in data.pop('descriptions', []): description_data['work_id'] = self.id if description_data.get('id') in existing_descriptions: description = existing_descriptions.pop( description_data['id']) description.update_dict(description_data) else: description = Description.from_dict(description_data) new_descriptions.append(description) self.descriptions[:] = new_descriptions if 'relations' in data: existing_relations = dict([(c.id, c) for c in self.relations]) new_relations = [] for relation_data in data.pop('relations', []): relation_data['work_id'] = self.id if relation_data.get('id') in existing_relations: relation = existing_relations.pop(relation_data['id']) relation.update_dict(relation_data) else: relation = Relation.from_dict(relation_data) new_relations.append(relation) self.relations[:] = new_relations for key, value in data.items(): if key.startswith('_'): continue set_attribute(self, key, value)
def test_canonicalize_date_intervals(): interval = canonicalize(DateInterval([date(2000, 2, 2), date(2000, 2, 6)])) assert interval.upper.day == 7
def projectEdit(projectID, tableName): """ Update specified table for specified project. Check the jwt, csrf token, and the user's roles before doing anything. The csrf token gets you in the door, a valid jwt token is a trusted source for your roles. """ if projectID: p = db.session.query(alch.Description).join(alch.Portfolio) p = p.filter_by(projectID=projectID).first() lastModifiedBy = { "tableName": tableName, "id": current_identity.get_id() } if tableName == "description": form, errors = updateFromForm(forms.Description, p, lastModifiedBy) if not errors: success = "Project description was updated." elif tableName == "portfolio": form, errors = updateFromForm(forms.Portfolio, p.portfolio[0], lastModifiedBy) if not errors: success = "Project portfolio entry was updated." elif tableName == "project": form, errors = updateFromForm(forms.Project, p.project[0], lastModifiedBy) if not errors: success = "Project management entry was updated." elif tableName == "disposition": request.json["projectID"] = int(projectID) request.json["lastModifiedBy"] = current_identity.get_id() disposedIn = request.json.get("disposedIn", None) if disposedIn: disposedIn = DateInterval.from_string(disposedIn) d = db.session.query(alch.Disposition) \ .filter_by(projectID = projectID) \ .filter_by(disposedIn = disposedIn).first() if not d: # Create a new object from the request. d = alch.Disposition(projectID=projectID) d_success = "A new disposition was created for cycle " else: # Consider the Fiscal Year offset, and decide whether the # primary key value "disposedIn" has changed or not. If yes, # then we are really creating a new disposition. If not, we # need to re-shift the data in the database to match what is # in the request. if (d.disposedIn.lower == disposedIn.lower + app.config["FISCAL_YEAR_OFFSET"] and d.disposedIn.upper == disposedIn.upper + app.config["FISCAL_YEAR_OFFSET"]): d_success = "Updated disposition for cycle " else: d = alch.Disposition(projectID=projectID) d_success = "A new disposition was created for cycle " form, errors = updateFromForm(forms.Disposition, d, lastModifiedBy, disposedIn) if not errors: disposedIn = request.json.get("disposedIn") disposedIn = DateInterval.from_string(disposedIn) FY = "FY{}".format(disposedIn.lower.strftime("%y")) month = disposedIn.lower.strftime("%m") if (disposedIn.lower + relativedelta(months=3) - relativedelta(days=1) == disposedIn.upper): Q = app.config["FISCAL_QUARTERS"][int(int(month) / 3 + 1)] else: Q = "" cycle = "{FY} {Q}.".format(FY=FY, Q=Q[1]) success = d_success + cycle elif tableName == "comment": commentID = request.json.get("commentID", None) if commentID: commentID = int(commentID) c = db.session.query(alch.Comment). \ filter_by(commentID = commentID).first() c_success = "The comment was updated." else: c = alch.Comment(projectID=projectID, commentAuthor=current_identity.get_id(), commentAuthored=datetime.utcnow(), commentLastModifiedBy=None) c_success = "A new comment was created." form, errors = updateFromForm(forms.Comment, c, lastModifiedBy) if not errors: success = c_success response = getProjectAttributes(projectID, tableName) if errors: response["errors"] = errors else: response["success"] = success return jsonify(**response)