def validate_open_interval(obj, domain_model): session = Session() query = session.query(domain_model).filter(domain_model.end_date == None) results = query.all() if results: if obj: for result in results: if stringKey(result) == stringKey(obj): continue else: yield result else: for result in results: yield result
def validate_open_interval(obj, domain_model): session = Session() query = session.query(domain_model).filter( domain_model.end_date == None) results = query.all() if results: if obj: for result in results: if stringKey(result) == stringKey(obj): continue else: yield result else: for result in results: yield result
def _jsonValues(self, nodes, fields, context): """ filter values from the nodes to respresent in json, currently that means some footwork around, probably better as another set of adapters. """ values = [] domain_model = proxy.removeSecurityProxy(context.domain_model) domain_interface = queryModelInterface(domain_model) domain_annotation = queryModelDescriptor(domain_interface) for n in nodes: d = {} # field to dictionaries for field in fields: f = field.__name__ getter = field.query for anno_field in domain_annotation.fields: if anno_field.name == f: if getattr(anno_field.listing_column, 'getter', None): getter = anno_field.listing_column.getter d[f] = v = getter(n, field) else: d[f] = v = field.query(n) if isinstance(v, datetime.datetime): d[f] = v.strftime('%F %I:%M %p') elif isinstance(v, datetime.date): d[f] = v.strftime('%F') d['object_id'] = stringKey(n) values.append(d) return values
def _jsonValues( self, nodes, fields, context): """ filter values from the nodes to respresent in json, currently that means some footwork around, probably better as another set of adapters. """ values = [] domain_model = proxy.removeSecurityProxy( context.domain_model ) domain_interface = queryModelInterface( domain_model ) domain_annotation = queryModelDescriptor( domain_interface ) for n in nodes: d = {} # field to dictionaries for field in fields: f = field.__name__ getter = field.query for anno_field in domain_annotation.fields: if anno_field.name == f: if getattr(anno_field.listing_column, 'getter', None): getter=anno_field.listing_column.getter d[ f ] = v = getter( n , field) else: d[ f ] = v = field.query( n ) if isinstance( v, datetime.datetime ): d[f] = v.strftime('%F %I:%M %p') elif isinstance( v, datetime.date ): d[f] = v.strftime('%F') d['object_id'] = stringKey(n) values.append( d ) return values
def _jsonValues(self, nodes, fields, context, getters_by_field_name={}): """ filter values from the nodes to respresent in json, currently that means some footwork around, probably better as another set of adapters. """ values = [] for n in nodes: d = {} for field in fields: f = field.__name__ getter = getters_by_field_name.get(f, None) if getter is not None: d[f] = v = getter(n, field) else: d[f] = v = field.query(n) # !+i18n_DATE(mr, sep-2010) two problems with the isinstance # tests below: # a) they seem to always fail # b) this is incorrect way to localize dates if isinstance(v, datetime.datetime): d[f] = v.strftime("%F %I:%M %p") elif isinstance(v, datetime.date): d[f] = v.strftime("%F") d["object_id"] = url.set_url_context(stringKey(n)) values.append(d) return values
def _get_secured_batch( self, query, start, limit): secured_query = secured_iterator("zope.View", query, self.context) nodes =[] for ob in secured_query: ob = contained( ob, self, stringKey(ob) ) nodes.append(ob) self.set_size = len(nodes) return nodes[start : start + limit]
def _get_secured_batch(self, query, start, limit): secured_query = secured_iterator("zope.View", query, self.context) nodes = [] for ob in secured_query: ob = contained(ob, self, stringKey(ob)) nodes.append(ob) self.set_size = len(nodes) return nodes[start:start + limit]
def validate_date_in_interval(obj, domain_model, date): session = Session() query = session.query(domain_model).filter( sql.expression.between(date, domain_model.start_date, domain_model.end_date) ) results = query.all() if results: if obj: # the object itself can overlap for result in results: if stringKey(result) == stringKey(obj): continue else: yield result else: # all results indicate an error for result in results: yield result
def validate_date_in_interval(obj, domain_model, date): session = Session() query = session.query(domain_model).filter( sql.expression.between(date, domain_model.start_date, domain_model.end_date)) results = query.all() if results: if obj: # the object itself can overlap for result in results: if stringKey(result) == stringKey(obj): continue else: yield result else: # all results indicate an error for result in results: yield result
def __call__(self, context, location): key = stringKey(context) for container in self.containers: if IQueryContent.providedBy(container): parent = container.__parent__ container = container.query(location) if parent is not None: container.__parent__ = parent if key in container: return LocationProxy(context, container, key) raise LocationError(key)
def get_sitting_items(sitting, request, include_actions=False): items = [] if sitting.status in [ sitting_wf_state[u'draft-agenda'].id, sitting_wf_state[u'published-agenda'].id ]: order = "planned_order" else: order = "real_order" schedulings = map(removeSecurityProxy, sitting.items.batch(order_by=order, limit=None)) for scheduling in schedulings: item = ProxyFactory(location_wrapped(scheduling.item, sitting)) props = IDCDescriptiveProperties.providedBy(item) and item or \ IDCDescriptiveProperties(item) discussions = tuple(scheduling.discussions.values()) discussion = discussions and discussions[0] or None info = IWorkflowInfo(item, None) state_title = info.workflow().workflow.states[item.status].title record = { 'title': props.title, 'description': props.description, 'name': stringKey(scheduling), 'status': item.status, 'type': item.type.capitalize, 't': item.type, 'state_title': state_title, #'category_id': scheduling.category_id, #'category': scheduling.category, 'discussion': discussion, 'delete_url': "%s/delete" % ui_url.absoluteURL(scheduling, request), 'url': ui_url.absoluteURL(item, request) } if include_actions: record['actions'] = get_scheduling_actions(scheduling, request) record['workflow'] = get_workflow_actions(item, request) discussion_actions = get_discussion_actions(discussion, request) if discussion_actions: assert len(discussion_actions) == 1 record['discussion_action'] = discussion_actions[0] else: record['discussion_action'] = None items.append(record) return items
def get_sitting_items(sitting, request, include_actions=False): items = [] if sitting.status in get_states("groupsitting", keys=["draft_agenda", "published_agenda"]): order = "planned_order" else: order = "real_order" schedulings = map( removeSecurityProxy, sitting.items.batch(order_by=order, limit=None)) site_url = url.absoluteURL(getSite(), request) for scheduling in schedulings: item = ProxyFactory(location_wrapped(scheduling.item, sitting)) props = IDCDescriptiveProperties.providedBy(item) and item or \ IDCDescriptiveProperties(item) discussions = tuple(scheduling.discussions.values()) discussion = discussions and discussions[0] or None info = IWorkflowInfo(item, None) state_title = info.workflow().workflow.states[item.status].title record = { 'title': props.title, 'description': props.description, 'name': stringKey(scheduling), 'status': item.status, 'type': item.type.capitalize, 't':item.type, 'state_title': state_title, #'category_id': scheduling.category_id, #'category': scheduling.category, 'discussion': discussion, 'delete_url': "%s/delete" % url.absoluteURL(scheduling, request), 'url': url.set_url_context(site_url+('/business/%ss/obj-%s' % (item.type, item.parliamentary_item_id)))} if include_actions: record['actions'] = get_scheduling_actions(scheduling, request) record['workflow'] = get_workflow_actions(item, request) discussion_actions = get_discussion_actions(discussion, request) if discussion_actions: assert len(discussion_actions) == 1 record['discussion_action'] = discussion_actions[0] else: record['discussion_action'] = None items.append(record) return items
def render(self, date, template=None): #need('yui-editor') need('yui-rte') need('yui-resize') need('yui-button') if template is None: template = self.template container = self.context.__parent__ #schedule_url = self.request.getURL() container_url = url.absoluteURL(container, self.request) # determine position in container key = stringKey(self.context) keys = list(container.keys()) pos = keys.index(key) links = {} if pos > 0: links['previous'] = "%s/%s/%s" % ( container_url, keys[pos-1], self.__name__) if pos < len(keys) - 1: links['next'] = "%s/%s/%s" % ( container_url, keys[pos+1], self.__name__) #start_date = utils.datetimedict.fromdatetime(self.context.start_date) #end_date = utils.datetimedict.fromdatetime(self.context.end_date) site_url = url.absoluteURL(getSite(), self.request) return template( display="sitting", #title=_(u"$A $e, $B $Y", mapping=start_date), title = "%s: %s - %s" % (self.context.group.short_name, self.context.start_date.strftime('%Y-%m-%d %H:%M'), self.context.end_date.strftime('%H:%M')), description=_(u"Sitting Info"), # title = u"", # description = u"", # links=links, actions=get_sitting_actions(self.context, self.request), items=get_sitting_items( self.context, self.request, include_actions=True), #categories=vocabulary.ItemScheduleCategories(self.context), new_category_url="%s/admin/content/categories/add?next_url=..." % site_url, status=self.context.status, )
def _jsonValues( self, nodes, fields, context): """ filter values from the nodes to respresent in json, currently that means some footwork around, probably better as another set of adapters. """ values = [] for n in nodes: d = {} # field to dictionaries for field in fields: f = field.__name__ d[ f ] = v = field.query( n ) if isinstance( v, datetime.datetime ): d[f] = v.strftime('%F %I:%M %p') elif isinstance( v, datetime.date ): d[f] = v.strftime('%F') d['object_id'] = stringKey(n) values.append( d ) return values
def _jsonValues(self, nodes, fields, context): """ filter values from the nodes to respresent in json, currently that means some footwork around, probably better as another set of adapters. """ values = [] for n in nodes: d = {} # field to dictionaries for field in fields: f = field.__name__ d[f] = v = field.query(n) if isinstance(v, datetime.datetime): d[f] = v.strftime('%F %I:%M %p') elif isinstance(v, datetime.date): d[f] = v.strftime('%F') d['object_id'] = stringKey(n) values.append(d) return values
def publish_to_xml(context): """Generates XML for object and saves it to the file. If object contains attachments - XML is saved in zip archive with all attached files. """ context = zope.security.proxy.removeSecurityProxy(context) obj_type = IWorkflow(context).name #locking lock_name = "%s-%s" %(obj_type, stringKey(context)) with LockStore.get_lock(lock_name): #root key (used to cache files to zip) root_key = make_key() #create a fake interaction to ensure items requiring a participation #are serialized #!+SERIALIZATION(mb, Jan-2013) review this approach try: zope.security.management.getInteraction() except zope.security.interfaces.NoInteraction: principal = zope.security.testing.Principal('user', 'manager', ()) zope.security.management.newInteraction(create_participation(principal)) include = [] # data dict to be published data = {} if interfaces.IFeatureVersion.providedBy(context): include.append("versions") if interfaces.IFeatureAudit.providedBy(context): include.append("event") exclude = ["data", "event", "attachments"] data.update( obj2dict(context, 1, parent=None, include=include, exclude=exclude, root_key=root_key ) ) tags = IStateController(context).get_state().tags if tags: data["tags"] = tags permissions = get_object_state_rpm(context).permissions data["permissions"] = get_permissions_dict(permissions) # setup path to save serialized data path = os.path.join(setupStorageDirectory(), obj_type) if not os.path.exists(path): os.makedirs(path) # xml file path file_path = os.path.join(path, stringKey(context)) #files to zip files = [] if interfaces.IFeatureAttachment.providedBy(context): attachments = getattr(context, "attachments", None) if attachments: data["attachments"] = [] for attachment in attachments: # serializing attachment attachment_dict = obj2dict(attachment, 1, parent=context, exclude=["data", "event", "versions"]) # saving attachment to tmp attached_file = tmp(delete=False) attached_file.write(attachment.data) attached_file.flush() attached_file.close() files.append(attached_file.name) attachment_dict["saved_file"] = os.path.basename( attached_file.name ) data["attachments"].append(attachment_dict) #add explicit origin chamber for this object (used to partition data in #if more than one parliament exists) data["origin_parliament"] = get_origin_parliament(context) #add any additional files to file list files = files + PersistFiles.get_files(root_key) # zipping xml, attached files plus any binary fields # also remove the temporary files if files: #generate temporary xml file temp_xml = tmp(delete=False) temp_xml.write(serialize(data, name=obj_type)) temp_xml.close() #write attachments/binary fields to zip with ZipFile("%s.zip" % (file_path), "w") as zip_file: for f in files: zip_file.write(f, os.path.basename(f)) # write the xml zip_file.write(temp_xml.name, "%s.xml" % os.path.basename(file_path)) files.append(temp_xml.name) else: # save serialized xml to file with open("%s.xml" % (file_path), "w") as xml_file: xml_file.write(serialize(data, name=obj_type)) xml_file.close() # publish to rabbitmq outputs queue connection = bungeni.core.notifications.get_mq_connection() if not connection: return channel = connection.channel() publish_file_path = "%s.%s" %(file_path, ("zip" if files else "xml")) channel.basic_publish( exchange=SERIALIZE_OUTPUT_EXCHANGE, routing_key=SERIALIZE_OUTPUT_ROUTING_KEY, body=simplejson.dumps({"type": "file", "location": publish_file_path }), properties=pika.BasicProperties(content_type="text/plain", delivery_mode=2 ) ) #clean up - remove any files if zip was/was not created if files: files.append("%s.%s" %(file_path, "xml")) else: files.append("%s.%s" %(file_path, "zip")) remove_files(files) #clear the cache PersistFiles.clear_files(root_key)
def id(self, object): """ defines the xapian 'primary key' """ #TODO Add the language to the index! return "%s.%s-%s"%(object.__class__.__module__, object.__class__.__name__, container.stringKey(object))
def editLink( item, formatter ): return u'<a class="button-link" href="%s/edit">Edit</a>'%( stringKey( item ) )
def viewLink( item, formatter ): return u'<a class="button-link" href="%s">View</a>'%( stringKey( item ) )
def viewLink(item, formatter): return u'<a class="button-link" href="%s">View</a>' % (stringKey(item))
def editLink(item, formatter): return u'<a class="button-link" href="%s/edit">Edit</a>' % (stringKey(item))
def publish_to_xml(context): """Generates XML for object and saves it to the file. If object contains attachments - XML is saved in zip archive with all attached files. """ context = zope.security.proxy.removeSecurityProxy(context) obj_type = IWorkflow(context).name #locking lock_name = "%s-%s" % (obj_type, stringKey(context)) with LockStore.get_lock(lock_name): #root key (used to cache files to zip) root_key = make_key() #create a fake interaction to ensure items requiring a participation #are serialized #!+SERIALIZATION(mb, Jan-2013) review this approach try: zope.security.management.getInteraction() except zope.security.interfaces.NoInteraction: principal = zope.security.testing.Principal('user', 'manager', ()) zope.security.management.newInteraction( create_participation(principal)) include = [] # data dict to be published data = {} if interfaces.IFeatureVersion.providedBy(context): include.append("versions") if interfaces.IFeatureAudit.providedBy(context): include.append("event") exclude = ["data", "event", "attachments"] data.update( obj2dict(context, 1, parent=None, include=include, exclude=exclude, root_key=root_key)) tags = IStateController(context).get_state().tags if tags: data["tags"] = tags permissions = get_object_state_rpm(context).permissions data["permissions"] = get_permissions_dict(permissions) # setup path to save serialized data path = os.path.join(setupStorageDirectory(), obj_type) if not os.path.exists(path): os.makedirs(path) # xml file path file_path = os.path.join(path, stringKey(context)) #files to zip files = [] if interfaces.IFeatureAttachment.providedBy(context): attachments = getattr(context, "attachments", None) if attachments: data["attachments"] = [] for attachment in attachments: # serializing attachment attachment_dict = obj2dict( attachment, 1, parent=context, exclude=["data", "event", "versions"]) # saving attachment to tmp attached_file = tmp(delete=False) attached_file.write(attachment.data) attached_file.flush() attached_file.close() files.append(attached_file.name) attachment_dict["saved_file"] = os.path.basename( attached_file.name) data["attachments"].append(attachment_dict) #add explicit origin chamber for this object (used to partition data in #if more than one parliament exists) data["origin_parliament"] = get_origin_parliament(context) #add any additional files to file list files = files + PersistFiles.get_files(root_key) # zipping xml, attached files plus any binary fields # also remove the temporary files if files: #generate temporary xml file temp_xml = tmp(delete=False) temp_xml.write(serialize(data, name=obj_type)) temp_xml.close() #write attachments/binary fields to zip with ZipFile("%s.zip" % (file_path), "w") as zip_file: for f in files: zip_file.write(f, os.path.basename(f)) # write the xml zip_file.write(temp_xml.name, "%s.xml" % os.path.basename(file_path)) files.append(temp_xml.name) else: # save serialized xml to file with open("%s.xml" % (file_path), "w") as xml_file: xml_file.write(serialize(data, name=obj_type)) xml_file.close() # publish to rabbitmq outputs queue connection = bungeni.core.notifications.get_mq_connection() if not connection: return channel = connection.channel() publish_file_path = "%s.%s" % (file_path, ("zip" if files else "xml")) channel.basic_publish(exchange=SERIALIZE_OUTPUT_EXCHANGE, routing_key=SERIALIZE_OUTPUT_ROUTING_KEY, body=simplejson.dumps({ "type": "file", "location": publish_file_path }), properties=pika.BasicProperties( content_type="text/plain", delivery_mode=2)) #clean up - remove any files if zip was/was not created if files: files.append("%s.%s" % (file_path, "xml")) else: files.append("%s.%s" % (file_path, "zip")) remove_files(files) #clear the cache PersistFiles.clear_files(root_key)