def create_initial_application_acl(mapper, connection, target): if target.application_type == SVN: acl_rules = [ ('internal_developer', 'edit'), ('internal_developer', 'view'), ('external_developer', 'edit'), ('external_developer', 'view'), ] else: acl_rules = [ ('internal_developer', 'view'), ('external_developer', 'view'), ('secretary', 'view'), ('secretary', 'edit'), ] if target.application_type == 'trac': acl_rules.append(('customer', 'view')) for role_id, permission_name in acl_rules: acl = Session.object_session(target).query(ApplicationACL).get((target.id, role_id, permission_name)) if not acl: acl = ApplicationACL(application_id=target.id, role_id=role_id, permission_name=permission_name) Session.object_session(target).add(acl) else: # XXX this should not happen. pass
def update_translation_table(): from camelot.model.i18n import Translation from sqlalchemy.orm.session import Session t = Translation.get_by(source=source, language=language) if not t: t = Translation(source=source, language=language) t.value = value Session.object_session( t ).flush( [t] )
def create_initial_kanban_acl(mapper, connection, target): acl_rules = [ ('role:redturtle_developer', 'view'), ] for principal_id, permission_name in acl_rules: acl = KanbanACL(principal=principal_id, board_id=target.id, permission_name=permission_name) Session.object_session(target).add(acl)
def add_clarification_question(self, question, answer): clarification_question = BriefClarificationQuestion( brief=self, question=question, answer=answer, ) clarification_question.validate() Session.object_session(self).add(clarification_question) return clarification_question
def run(self): from sqlalchemy.orm.session import Session from camelot.view.remote_signals import get_signal_handler signal_handler = get_signal_handler() collection = list(self._collection_getter()) self.update_maximum_signal.emit( len(collection) ) for i, entity in enumerate(collection): message = self.update_entity(entity) Session.object_session( entity ).flush( [entity] ) signal_handler.sendEntityUpdate( self, entity ) self.update_progress_signal.emit( i, message or '')
def request(): from sqlalchemy.orm.session import Session from camelot.view.remote_signals import get_signal_handler o = entity_getter() self._model_function(o) if self._flush: sh = get_signal_handler() Session.object_session(o).flush([o]) sh.sendEntityUpdate(self, o) return True
def update_app_position(mapper, connection, target): for n, app in enumerate(target.project.applications): if app.id == target.id: if target.position == -1: # keep last app = target.project.applications.pop(n) Session.object_session(target).query(Project).get(app.project_id).applications.append(app) elif n != target.position: app = target.project.applications.pop(n) Session.object_session(target).query(Project).get(app.project_id).applications.insert(target.position, app) target.project.applications.reorder() break
def removeFixture( cls, entity, fixture_key, fixture_class ): """Remove a fixture from the database""" # remove the object itself from sqlalchemy.orm.session import Session obj = cls.findFixture( entity, fixture_key, fixture_class) print 'remove', unicode(obj) obj.delete() Session.object_session( obj ).flush( [obj] ) # if this succeeeds, remove the reference reference = cls.findFixtureReference(entity, fixture_key, fixture_class) reference.delete() Session.object_session( reference ).flush( [reference] )
def set_current_version( cls, fixture_class = None, fixture_version = 0 ): """Set the current version of the fixtures in the database for a certain fixture class. :param fixture_class: the fixture class for which to get the version :param fixture_version: the version number to which to set the fixture version """ from sqlalchemy.orm.session import Session obj = cls.query.filter_by( fixture_class = fixture_class ).first() if not obj: obj = FixtureVersion( fixture_class = fixture_class ) obj.fixture_version = fixture_version Session.object_session( obj ).flush( [obj] )
def primary_type(self, val): current_prim = DBSession.query(RoleXUser).filter(RoleXUser.is_primary == True, RoleXUser.user_id == self.id).first() if current_prim and current_prim.role.title != val: current_prim.is_primary = False existing_role = DBSession.query(RoleXUser).join(Role).filter(Role.title == val, RoleXUser.user_id == self.id).first() if existing_role: existing_role.is_primary = True else: role = DBSession.query(Role).filter(Role.title == val).first() if role: role_x_user = RoleXUser(role=role, user=self, is_primary=True) Session.object_session(self).add(role_x_user)
def request(): from sqlalchemy.orm.session import Session from camelot.view.remote_signals import get_signal_handler sh = get_signal_handler() c = list(collection_getter()) s = list(selection_getter()) self._model_function( c, s, options ) to_flush = [] if self._selection_flush: to_flush = s if self._collection_flush: to_flush = c for o in to_flush: Session.object_session( o ).flush( [o] ) sh.sendEntityUpdate( self, o )
def testSaving(self): """Test checks for dirtyness and modification""" exp = Experiment() exp.params['project'] = "P" exp.params['experimenter'] = "E" obj_session = Session.object_session(exp) self.assertTrue(obj_session is None) self.m.save(exp) obj_session = Session.object_session(exp) self.assertEqual(obj_session, self.m.session) self.assertFalse(self.m.session.is_modified(exp)) self.assertFalse(exp in self.m.session.dirty)
def add_measurement(self, data_source, quantity_measured, value, weight=1, taken_at=None): """Associate a new Measurement with this Identifier.""" _db = Session.object_session(self) logging.debug( "MEASUREMENT: %s on %s/%s: %s == %s (wt=%d)", data_source.name, self.type, self.identifier, quantity_measured, value, weight) now = datetime.datetime.utcnow() taken_at = taken_at or now # Is there an existing most recent measurement? most_recent = get_one( _db, Measurement, identifier=self, data_source=data_source, quantity_measured=quantity_measured, is_most_recent=True, on_multiple='interchangeable' ) if most_recent and most_recent.value == value and taken_at == now: # The value hasn't changed since last time. Just update # the timestamp of the existing measurement. self.taken_at = taken_at if most_recent and most_recent.taken_at < taken_at: most_recent.is_most_recent = False return create( _db, Measurement, identifier=self, data_source=data_source, quantity_measured=quantity_measured, taken_at=taken_at, value=value, weight=weight, is_most_recent=True)[0]
def Get_flot_data(self): """ Returns all versions in a nice format """ max_counter=100 session = Session.object_session(self) if not session: # Anonymous user, no stats recorded. Queries below won't work # because the user doesn't exist in database and hence no # session object. output_prozt = "var quote=[];" output_total = "var trans=[];" output_trans = "var revie=[];" return output_prozt+output_total+output_trans values = list(); Statistic2 = aliased(Statistic) values = session.query(Statistic2.value*1000/Statistic.value, Statistic.value, Statistic2.value). \ filter(Statistic.stat == 'user:translations-'+self.username). \ filter(Statistic2.stat == 'user:reviews-'+self.username). \ filter(Statistic.date == Statistic2.date). \ order_by(Statistic.date.asc()). \ limit(max_counter). \ all() output_prozt = "var quote=%s;" % ([[i, stat[0]/10] for i, stat in enumerate(values)]) output_total = "var trans=%s;" % ([[i, stat[1]] for i, stat in enumerate(values)]) output_trans = "var revie=%s;" % ([[i, stat[2]] for i, stat in enumerate(values)]) return output_prozt+output_total+output_trans
def get_potential_fuzzy_matches(self, lang): """ Returns a list of pairs (text,Parts) which may be fuzzy matches for this description. The part is the already translated version, included because we needed to look it up anyway for existence """ session = Session.object_session(self) # Find all descriptions which share a part with this description PartDescr2=aliased(PartDescription) related_descrs = set( d for d, in session.query(PartDescr2.description_id). join(PartDescription, PartDescription.part_md5==PartDescr2.part_md5). filter(PartDescription.description_id==self.description_id)) # Always add self, as part_description table is not complete related_descrs.add(self.description_id) # Finally, find all parts of all descriptions which have been # translated and and part of a package which share a source or # package # FIXME: don't use Description.package -> use package_version-tb Descr2 = aliased(Description) related_parts = session.query(Part, Descr2).join(PartDescription, PartDescription.part_md5 == Part.part_md5). \ join(Descr2, Descr2.description_id == PartDescription.description_id). \ join(Description, (Description.package == Descr2.package) | (Description.source == Descr2.source)). \ filter(Description.description_id.in_(related_descrs)). \ filter(Part.language == lang).all() # First we go through the descriptions, deconstructing them into parts descr_map = dict( (part_md5, part) for _, descr in related_parts for part, part_md5 in descr.get_description_parts() ) result = [ (descr_map.get(trans.part_md5), trans) for trans, _ in related_parts ] return result
def refresh(self, session=None, attrs=None): """Refresh this object.""" if not session: session = Session.object_session(self) if not session: session = get_session() session.refresh(self, attrs)
def expire(self, session=None, attrs=None): """Expire this object ().""" if not session: session = Session.object_session(self) if not session: session = get_session() session.expire(self, attrs)
def get_description_predecessors(self): """ get all descriptions of the predecessors """ session = Session.object_session(self) PackageVersion2=aliased(PackageVersion) #SELECT B.description_id from package_version_tb AS A LEFT JOIN package_version_tb AS B ON A.package = B.package where A.description_id='79246' group by B.description_id; DescriptionIDs = [x for x, in session.query(PackageVersion2.description_id). \ join(PackageVersion, PackageVersion2.package == PackageVersion.package). \ filter(PackageVersion.description_id == self.description_id).\ filter(PackageVersion2.description_id != self.description_id). \ group_by(PackageVersion2.description_id).\ all()] # START REMOVE AFTER FIX # FIXME # use later only package_version_tb and not the old package field # SELECT B.description_id from description_tb AS A left join description_tb AS B ON A.package = B.package where A.description_id='79246' group by B.description_id; Description2=aliased(Description) DescriptionIDs2 = [x for x, in session.query(Description2.description_id). \ join(Description, Description2.package == Description.package). \ filter(Description.description_id == self.description_id).\ filter(Description.description_id != self.description_id). \ group_by(Description2.description_id). \ all()] DescriptionIDs += DescriptionIDs2 # END REMOVE AFTER FIX #return dict.fromkeys(DescriptionIDs).keys() result = session.query(Description).filter(Description.description_id.in_(DescriptionIDs)).all() return result
def __nonzero__(self): session = Session.object_session(self.instance) if session is None: return bool(self.count()) for v, in session.query(exists(self.as_scalar())): return bool(v) return False
def charge_network_fees(self, broadcast, fee): """Account network fees due to transaction broadcast. By default this creates a new accounting entry on a special account (`GenericAccount.NETWORK_FEE_ACCOUNT`) where the network fees are put. :param txs: Internal transactions participating in send :param txid: External transaction id :param fee: Fee as the integer """ session = Session.object_session(self) fee_account = self.get_or_create_network_fee_account() # TODO: Not sure which one is better approach # assert fee_account.id, "Fee account is not properly constructed, flush() DB" session.flush() transaction = self.coin_description.Transaction() transaction.sending_account = fee_account transaction.receiving_account = None transaction.amount = fee transaction.state = "network_fee" transaction.wallet = self transaction.label = "Network fees for {}".format(broadcast.txid) fee_account.balance -= fee self.balance -= fee session.add(fee_account) session.add(transaction)
def open_file(self, store=current_store, use_seek=False): """Opens the file-like object which is a context manager (that means it can used for :keyword:`with` statement). If ``use_seek`` is ``True`` (though ``False`` by default) it guarentees the returned file-like object is also seekable (provides :meth:`~file.seek()` method). :param store: the storage which contains image files. :data:`~sqlalchemy_imageattach.context.current_store` by default :type store: :class:`~sqlalchemy_imageattach.store.Store` :returns: the file-like object of the image, which is a context manager (plus, also seekable only if ``use_seek`` is ``True``) :rtype: :class:`file`, :class:`~sqlalchemy_imageattach.file.FileProxy`, file-like object """ if not isinstance(store, Store): raise TypeError('store must be an instance of ' 'sqlalchemy_imageattach.store.Store, not ' + repr(store)) if Session.object_session(self) is None: try: file = self.file except AttributeError: raise IOError('no stored original image file') return ReusableFileProxy(file) return store.open(self, use_seek)
def open_file(self, store=current_store, use_seek=False): """The shorthand of :meth:`~Image.open_file()` for the :attr:`original`. :param store: the storage which contains the image files :data:`~sqlalchemy_imageattach.context.current_store` by default :type store: :class:`~sqlalchemy_imageattach.store.Store` :param use_seek: whether the file should seekable. if ``True`` it maybe buffered in the memory. default is ``False`` :type use_seek: :class:`bool` :returns: the file-like object of the image, which is a context manager (plus, also seekable only if ``use_seek`` is ``True``) :rtype: :class:`file`, :class:`~sqlalchemy_imageattach.file.FileProxy`, file-like object """ original = self.require_original() if Session.object_session(self.instance) is None: try: file = original.file except AttributeError: raise IOError('no stored original image file') return ReusableFileProxy(file) return original.open_file(store, use_seek)
def is_configured(cls, library): if cls.GLOBAL_ENABLED is None: Analytics(Session.object_session(library)) if cls.GLOBAL_ENABLED: return True else: return library.id in cls.LIBRARY_ENABLED
def register(self, license_pool, type, source, detail, resolved=None): """Register a problem detail document as a Complaint against the given LicensePool. """ if not license_pool: raise ValueError("No license pool provided") _db = Session.object_session(license_pool) if type not in self.VALID_TYPES: raise ValueError("Unrecognized complaint type: %s" % type) now = datetime.datetime.utcnow() if source: complaint, is_new = get_one_or_create( _db, Complaint, license_pool=license_pool, source=source, type=type, resolved=resolved, on_multiple='interchangeable', create_method_kwargs = dict( timestamp=now, ) ) complaint.timestamp = now complaint.detail = detail else: complaint, is_new = create( _db, Complaint, license_pool=license_pool, source=source, type=type, timestamp=now, detail=detail, resolved=resolved ) return complaint, is_new
def original(self): """(:class:`Image`) The original image. It could be ``None`` if there are no stored images yet. """ if Session.object_session(self.instance) is None: for image, store in self._stored_images: if image.original: return image state = instance_state(self.instance) try: added = state.committed_state[self.attr.key].added_items except KeyError: pass else: for image in added: if image.original: return image if self.session: for image in self.session.new: if image.original: return image return query = self.filter_by(original=True) try: return query.one() except NoResultFound: pass
def receive_set(attribute, config, target): cache_keys = config.registry["cache_keys"] session = Session.object_session(target) purges = session.info.setdefault("warehouse.cache.origin.purges", set()) key_maker = cache_keys[attribute] keys = key_maker(target).purge purges.update(list(keys))
def metadata_to_list_entry(self, custom_list, data_source, now, metadata): """Convert a Metadata object to a CustomListEntry.""" _db = Session.object_session(data_source) title_from_external_list = self.metadata_to_title(now, metadata) list_entry, was_new = title_from_external_list.to_custom_list_entry( custom_list, self.metadata_client, self.overwrite_old_data) e = list_entry.edition if not e: # We couldn't create an Edition, probably because we # couldn't find a useful Identifier. self.log.info("Could not create edition for %s", metadata.title) else: q = _db.query(Work).join(Work.presentation_edition).filter( Edition.permanent_work_id==e.permanent_work_id) if q.count() > 0: self.log.info("Found matching work in collection for %s", metadata.title ) else: self.log.info("No matching work found for %s", metadata.title ) return list_entry
def register_drm_device_identifier(self, device_identifier): _db = Session.object_session(self) return get_one_or_create( _db, DRMDeviceIdentifier, credential=self, device_identifier=device_identifier )
def to_custom_list_entry(self, custom_list, metadata_client, overwrite_old_data=False): """Turn this object into a CustomListEntry with associated Edition.""" _db = Session.object_session(custom_list) edition = self.to_edition(_db, metadata_client, overwrite_old_data) list_entry, is_new = get_one_or_create( _db, CustomListEntry, edition=edition, customlist=custom_list ) if (not list_entry.first_appearance or list_entry.first_appearance > self.first_appearance): if list_entry.first_appearance: self.log.info( "I thought %s first showed up at %s, but then I saw it earlier, at %s!", self.metadata.title, list_entry.first_appearance, self.first_appearance ) list_entry.first_appearance = self.first_appearance if (not list_entry.most_recent_appearance or list_entry.most_recent_appearance < self.most_recent_appearance): if list_entry.most_recent_appearance: self.log.info( "I thought %s most recently showed up at %s, but then I saw it later, at %s!", self.metadata.title, list_entry.most_recent_appearance, self.most_recent_appearance ) list_entry.most_recent_appearance = self.most_recent_appearance list_entry.annotation = self.annotation list_entry.set_work(self.metadata, metadata_client) return list_entry, is_new
def update_listener(mapper, connection, target): modified = Session.object_session(target).is_modified(target, include_collections=False) if modified: translations = {} insp = inspect(target) db = kernel.db.Database() for t in get_translations(target, db): if not t.lng_code in translations: translations[t.lng_code] = {} translations[t.lng_code][t.trl_code] = t default_lang = get_default_language().lng_code trans_lang = request.forms.get('translation_language') if not trans_lang: trans_lang = get_translation_language().lng_code if not trans_lang in translations: translations[trans_lang] = {} for f in target.translatable_fields: if getattr(target, f) is not None and getattr(target, f) != 'None' and modified: attr_state = insp.attrs[f] code = get_entity_code(target, f) if code in translations[trans_lang]: translation = translations[trans_lang][code] else: translation = Translation() translation.trl_code = code translation.lng_code = trans_lang translation.trl_text = str(getattr(target, f)) db.add(translation) if not hasattr(target, '__translation__'): setattr(target, '__translation__', {}) if not trans_lang in target.__translation__: target.__translation__[trans_lang] = {} target.__translation__[trans_lang][f] = getattr(target, f) if trans_lang != default_lang and len(attr_state.history.deleted) > 0: setattr(target, f, attr_state.history.deleted[0]) db.commit() db.close()
def raw_analyses(self): s = Session.object_session(self) if s: return s.query(PackageWorkerResult).filter( PackageWorkerResult.package_analysis_id == self.id) return []
def add_link(self, rel, href, data_source, media_type=None, content=None, content_path=None, rights_status_uri=None, rights_explanation=None, original_resource=None, transformation_settings=None): """Create a link between this Identifier and a (potentially new) Resource. TODO: There's some code in metadata_layer for automatically fetching, mirroring and scaling Representations as links are created. It might be good to move that code into here. """ from resource import ( Resource, Hyperlink, Representation, ) _db = Session.object_session(self) # Find or create the Resource. if not href: href = Hyperlink.generic_uri(data_source, self, rel, content) rights_status = None if rights_status_uri: rights_status = RightsStatus.lookup(_db, rights_status_uri) resource, new_resource = get_one_or_create( _db, Resource, url=href, create_method_kwargs=dict(data_source=data_source, rights_status=rights_status, rights_explanation=rights_explanation)) # Find or create the Hyperlink. link, new_link = get_one_or_create( _db, Hyperlink, rel=rel, data_source=data_source, identifier=self, resource=resource, ) if content or content_path: # We have content for this resource. resource.set_fetched_content(media_type, content, content_path) elif (media_type and not resource.representation): # We know the type of the resource, so make a # Representation for it. resource.representation, is_new = get_one_or_create( _db, Representation, url=resource.url, media_type=media_type) if original_resource: original_resource.add_derivative(link.resource, transformation_settings) # TODO: This is where we would mirror the resource if we # wanted to. return link, new_link
def equivalent_identifier_ids(self, levels=5, threshold=0.5): _db = Session.object_session(self) return Identifier.recursively_equivalent_identifier_ids( _db, [self.id], levels, threshold)
def software_config_delete(context, config_id): config = software_config_get(context, config_id) session = Session.object_session(config) session.delete(config) session.flush()
def __init__(self, library, vendor_id, node_value, authenticator): self.library = library self._db = Session.object_session(library) self.request_handler = AdobeVendorIDRequestHandler(vendor_id) self.model = AdobeVendorIDModel(library, authenticator, node_value)
def works_on_loan_or_on_hold(self): db = Session.object_session(self) results = set() holds = [hold.work for hold in self.holds if hold.work] loans = self.works_on_loan() return set(holds + loans)
def __init__(self, custom_list, log=None): self.log = log or logging.getLogger( "Membership manager for %s" % custom_list.name) self._db = Session.object_session(custom_list) self.custom_list = custom_list
def active_card_count(self): session = Session.object_session(self) return session.query(EntryState).filter(and_( EntryState.swimlane == self, ~EntryState.status.in_(['discarded', 'removed']) )).count()
def session(self): return Session.object_session(self)
def from_config(cls, library): """Initialize an AuthdataUtility from site configuration. :return: An AuthdataUtility if one is configured; otherwise None. :raise CannotLoadConfiguration: If an AuthdataUtility is incompletely configured. """ _db = Session.object_session(library) # Try to find an external integration with a configured Vendor ID. integrations = _db.query( ExternalIntegration ).outerjoin( ExternalIntegration.libraries ).filter( ExternalIntegration.protocol==ExternalIntegration.OPDS_REGISTRATION, ExternalIntegration.goal==ExternalIntegration.DISCOVERY_GOAL, Library.id==library.id ) integration = None for possible_integration in integrations: vendor_id = ConfigurationSetting.for_externalintegration( cls.VENDOR_ID_KEY, possible_integration).value if vendor_id: integration = possible_integration break library_uri = ConfigurationSetting.for_library( Configuration.WEBSITE_URL, library).value if not integration: return None vendor_id = integration.setting(cls.VENDOR_ID_KEY).value library_short_name = ConfigurationSetting.for_library_and_externalintegration( _db, ExternalIntegration.USERNAME, library, integration ).value secret = ConfigurationSetting.for_library_and_externalintegration( _db, ExternalIntegration.PASSWORD, library, integration ).value other_libraries = None adobe_integration = ExternalIntegration.lookup( _db, ExternalIntegration.ADOBE_VENDOR_ID, ExternalIntegration.DRM_GOAL, library=library ) if adobe_integration: other_libraries = adobe_integration.setting(cls.OTHER_LIBRARIES_KEY).json_value other_libraries = other_libraries or dict() if (not vendor_id or not library_uri or not library_short_name or not secret ): raise CannotLoadConfiguration( "Short Client Token configuration is incomplete. " "vendor_id, username, password and " "Library website_url must all be defined.") if '|' in library_short_name: raise CannotLoadConfiguration( "Library short name cannot contain the pipe character." ) return cls(vendor_id, library_uri, library_short_name, secret, other_libraries)
def snapshot_delete(context, snapshot_id): snapshot = snapshot_get(context, snapshot_id) session = Session.object_session(snapshot) session.delete(snapshot) session.flush()
def software_deployment_delete(context, deployment_id): deployment = software_deployment_get(context, deployment_id) session = Session.object_session(deployment) session.delete(deployment) session.flush()
def collapse_to_seq(self): """Returns the sequence being collapse to""" return Session.object_session(self).query(Sequence).filter( Sequence.sample_id == self.collapse_to_subject_sample_id, Sequence.ai == self.collapse_to_subject_seq_ai).one()
def _dbsession(self): return Session.object_session(self.manager)
def from_config(cls, library: Library, _db=None): """Initialize an AuthdataUtility from site configuration. The library must be successfully registered with a discovery integration in order for that integration to be a candidate to provide configuration for the AuthdataUtility. :return: An AuthdataUtility if one is configured; otherwise None. :raise CannotLoadConfiguration: If an AuthdataUtility is incompletely configured. """ _db = _db or Session.object_session(library) if not _db: raise ValueError( "No database connection provided and could not derive one from Library object!" ) # Use a version of the library library = _db.merge(library, load=False) # Try to find an external integration with a configured Vendor ID. integrations = (_db.query(ExternalIntegration).outerjoin( ExternalIntegration.libraries).filter( ExternalIntegration.protocol == ExternalIntegration.OPDS_REGISTRATION, ExternalIntegration.goal == ExternalIntegration.DISCOVERY_GOAL, Library.id == library.id, )) for possible_integration in integrations: vendor_id = ConfigurationSetting.for_externalintegration( cls.VENDOR_ID_KEY, possible_integration).value registration_status = ( ConfigurationSetting.for_library_and_externalintegration( _db, RegistrationConstants.LIBRARY_REGISTRATION_STATUS, library, possible_integration, ).value) if (vendor_id and registration_status == RegistrationConstants.SUCCESS_STATUS): integration = possible_integration break else: return None library_uri = ConfigurationSetting.for_library( Configuration.WEBSITE_URL, library).value vendor_id = integration.setting(cls.VENDOR_ID_KEY).value library_short_name = ConfigurationSetting.for_library_and_externalintegration( _db, ExternalIntegration.USERNAME, library, integration).value secret = ConfigurationSetting.for_library_and_externalintegration( _db, ExternalIntegration.PASSWORD, library, integration).value other_libraries = None adobe_integration = ExternalIntegration.lookup( _db, ExternalIntegration.ADOBE_VENDOR_ID, ExternalIntegration.DRM_GOAL, library=library, ) if adobe_integration: other_libraries = adobe_integration.setting( cls.OTHER_LIBRARIES_KEY).json_value other_libraries = other_libraries or dict() if not vendor_id or not library_uri or not library_short_name or not secret: raise CannotLoadConfiguration( "Short Client Token configuration is incomplete. " "vendor_id (%s), username (%s), password (%s) and " "Library website_url (%s) must all be defined." % (vendor_id, library_uri, library_short_name, secret)) if "|" in library_short_name: raise CannotLoadConfiguration( "Library short name cannot contain the pipe character.") return cls(vendor_id, library_uri, library_short_name, secret, other_libraries)
def move(self, how_many, where, delay, sector=0, conf=None): sess = Session.object_session(self) where = forcelist(where) already = sess.query(MarchingOrder).filter_by(leader=self).first() if already: raise InProgressException(already) fighting = (sess.query(SkirmishAction).filter_by( participant=self).first()) if fighting: allow = False if conf: if len(where) == 1 and where[0] == self.region: allow = conf["game"].get("allow_sector_retreat", False) if not allow: raise InProgressException(fighting) if how_many > self.loyalists: # TODO: Attempt to pick up loyalists raise InsufficientException(how_many, self.loyalists, "loyalists") # Is that sector even real? if conf: num_sectors = conf["game"].get("num_sectors", 1) if sector < 0 or sector > num_sectors: raise NoSuchSectorException(sector, num_sectors) elif sector == 0: # Assign a random sector sector = random.randint(1, num_sectors) # TODO: Drop off loyalists locations = [self.region] + where for src, dest in pairwise(locations): if src == dest: continue if dest not in src.borders: raise NonAdjacentException(src, dest) traverse_neutrals = False if conf: traverse_neutrals = conf["game"].get("traversable_neutrals", False) if not dest.enterable_by(self.team, traverse_neutrals=traverse_neutrals): raise TeamException(dest) orders = [] if delay > 0: orders = [] total_delay = 0 for src, dest in pairwise(locations): travel_mult = max(src.travel_multiplier, dest.travel_multiplier) if src != dest: total_delay += (delay * travel_mult) else: if conf: intrasector = conf["game"].get("intrasector_travel", 900) # Travel multiplier doesn't apply to intrasector total_delay += intrasector mo = MarchingOrder(arrival=time.mktime(time.localtime()) + total_delay, leader=self, source=src, dest=dest, dest_sector=sector) orders.append(mo) sess.add(mo) else: self.region = where[-1] self.sector = sector # TODO: Change number of loyalists self.defectable = False sess.commit() return orders
def works_on_loan(self): db = Session.object_session(self) loans = db.query(Loan).filter(Loan.patron == self) return [loan.work for loan in self.loans if loan.work]
def remove_role(self, role, library=None): _db = Session.object_session(self) role = get_one(_db, AdminRole, admin=self, role=role, library=library) if role: _db.delete(role)
def state(self, state): self._state = state self.state_time = int(time.time()) session = Session.object_session(self) if session: session.commit()
def register_drm_device_identifier(self, device_identifier): _db = Session.object_session(self) return get_one_or_create(_db, DRMDeviceIdentifier, credential=self, device_identifier=device_identifier)
def self_and_subgenres(self): _db = Session.object_session(self) genres = [] for genre_data in self.genredata.self_and_subgenres: genres.append(self.lookup(_db, genre_data.name)[0]) return genres
def add_role(self, role, library=None): _db = Session.object_session(self) role, is_new = get_one_or_create(_db, AdminRole, admin=self, role=role, library=library) return role
def flush(self, entity_instance): """Flush the pending changes of this entity instance to the backend""" from sqlalchemy.orm.session import Session session = Session.object_session(entity_instance) if session: session.flush([entity_instance])
def stats(self): session = Session.object_session(self) return data_models.Stats(self, session.query(Stat).filter( Stat.user_id == self.id, Stat.server_id == self.active_server_id ).order_by(Stat.name).all())
def expunge(self, entity_instance): """Expunge the entity from the session""" from sqlalchemy.orm.session import Session session = Session.object_session(entity_instance) if session: session.expunge(entity_instance)
def merge_into(self, destination): """Two Contributor records should be the same. Merge this one into the other one. For now, this should only be used when the exact same record comes in through two sources. It should not be used when two Contributors turn out to represent different names for the same human being, e.g. married names or (especially) pen names. Just because we haven't thought that situation through well enough. """ if self == destination: # They're already the same. return logging.info(u"MERGING %r (%s) into %r (%s)", self, self.viaf, destination, destination.viaf) # make sure we're not losing any names we know for the contributor existing_aliases = set(destination.aliases) new_aliases = list(destination.aliases) for name in [self.sort_name] + self.aliases: if name != destination.sort_name and name not in existing_aliases: new_aliases.append(name) if new_aliases != destination.aliases: destination.aliases = new_aliases if not destination.family_name: destination.family_name = self.family_name if not destination.display_name: destination.display_name = self.display_name # keep sort_name if one of the contributor objects has it. if not destination.sort_name: destination.sort_name = self.sort_name if not destination.wikipedia_name: destination.wikipedia_name = self.wikipedia_name # merge non-name-related properties for k, v in self.extra.items(): if not k in destination.extra: destination.extra[k] = v if not destination.lc: destination.lc = self.lc if not destination.viaf: destination.viaf = self.viaf if not destination.biography: destination.biography = self.biography _db = Session.object_session(self) for contribution in self.contributions: # Is the new contributor already associated with this # Edition in the given role (in which case we delete # the old contribution) or not (in which case we switch the # contributor ID)? existing_record = _db.query(Contribution).filter( Contribution.contributor_id == destination.id, Contribution.edition_id == contribution.edition.id, Contribution.role == contribution.role) if existing_record.count(): _db.delete(contribution) else: contribution.contributor_id = destination.id _db.commit() _db.delete(self) _db.commit()
def generate_thumbnail(self, ratio=None, width=None, height=None, filter='undefined', store=current_store, _preprocess_image=None, _postprocess_image=None): """Resizes the :attr:`original` (scales up or down) and then store the resized thumbnail into the ``store``. :param ratio: resize by its ratio. if it's greater than 1 it scales up, and if it's less than 1 it scales down. exclusive for ``width`` and ``height`` parameters :type ratio: :class:`numbers.Real` :param width: resize by its width. exclusive for ``ratio`` and ``height`` parameters :type width: :class:`numbers.Integral` :param height: resize by its height. exclusive for ``ratio`` and ``width`` parameters :type height: :class:`numbers.Integral` :param filter: a filter type to use for resizing. choose one in :const:`wand.image.FILTER_TYPES`. default is ``'undefined'`` which means ImageMagick will try to guess best one to use :type filter: :class:`basestring`, :class:`numbers.Integral` :param store: the storage to store the resized image file. :data:`~sqlalchemy_imageattach.context.current_store` by default :type store: :class:`~sqlalchemy_imageattach.store.Store` :param _preprocess_image: internal-use only option for preprocessing original image before resizing. it has to be callable which takes a :class:`wand.image.Image` object and returns a new :class:`wand.image.Image` object :type _preprocess_image: :class:`collections.Callable` :param _postprocess_image: internal-use only option for preprocessing original image before resizing. it has to be callable which takes a :class:`wand.image.Image` object and returns a new :class:`wand.image.Image` object :type _postprocess_image: :class:`collections.Callable` :returns: the resized thumbnail image. it might be an already existing image if the same size already exists :rtype: :class:`Image` :raises exceptions.IOError: when there's no :attr:`original` image yet """ params = ratio, width, height param_count = sum(p is not None for p in params) if not param_count: raise TypeError('pass an argument ratio, width, or height') elif param_count > 1: raise TypeError('pass only one argument in ratio, width, or ' 'height; these parameters are exclusive for ' 'each other') transient = Session.object_session(self.instance) is None state = instance_state(self.instance) try: added = state.committed_state[self.attr.key].added_items except KeyError: added = [] if width is not None: if not isinstance(width, numbers.Integral): raise TypeError('width must be integer, not ' + repr(width)) elif width < 1: raise ValueError('width must be natural number, not ' + repr(width)) # find the same-but-already-generated thumbnail for image in added: if image.width == width: return image if not transient: query = self.filter_by(width=width) try: return query.one() except NoResultFound: pass height = lambda sz: sz[1] * (width / sz[0]) elif height is not None: if not isinstance(height, numbers.Integral): raise TypeError('height must be integer, not ' + repr(height)) elif height < 1: raise ValueError('height must be natural number, not ' + repr(height)) # find the same-but-already-generated thumbnail for image in added: if image.height == height: return image if not transient: query = self.filter_by(height=height) try: return query.one() except NoResultFound: pass width = lambda sz: sz[0] * (height / sz[1]) elif ratio is not None: if not isinstance(ratio, numbers.Real): raise TypeError('ratio must be an instance of numbers.Real, ' 'not ' + repr(ratio)) width = lambda sz: sz[0] * ratio height = lambda sz: sz[1] * ratio data = io.BytesIO() with self.open_file(store=store) as f: if _preprocess_image is None: img = WandImage(file=f) else: with WandImage(file=f) as img: img = _preprocess_image(img) with img: original_size = img.size if callable(width): width = width(original_size) if callable(height): height = height(original_size) width = int(width) height = int(height) # find the same-but-already-generated thumbnail for image in added: if image.width == width and image.height == height: return image if not transient: query = self.filter_by(width=width, height=height) try: return query.one() except NoResultFound: pass img.resize(width, height, filter=filter) if _postprocess_image is None: mimetype = img.mimetype img.save(file=data) else: with _postprocess_image(img) as img: mimetype = img.mimetype img.save(file=data) return self.from_raw_file(data, store, size=(width, height), mimetype=mimetype, original=False)
def unseen_message_count(self): return (Session.object_session(self).query(Message.id).join( GroupChatSubscription, GroupChatSubscription.group_chat_id == Message.conversation_id ).filter(GroupChatSubscription.id == self.id).filter( Message.id > GroupChatSubscription.last_seen_message_id).count())
def for_library(cls, key, library): """Find or create a ConfigurationSetting for the given Library.""" _db = Session.object_session(library) return cls.for_library_and_externalintegration(_db, key, library, None)
def delete(self): session = Session.object_session(self) session.delete(self) session.commit()