Пример #1
0
def _create_object_in_session( obj ):
    session = object_session( obj ) if object_session is not None else None
    if session is not None:
        object_session( obj ).add( obj )
        object_session( obj ).flush()
    else:
        raise Exception( NO_SESSION_ERROR_MESSAGE )
def test_invalid_token_during_connect(db, patch_access_token_getter,
                                      account_with_single_auth_creds):
    account_id = account_with_single_auth_creds.id

    patch_access_token_getter.revoke_refresh_token(
        account_with_single_auth_creds.auth_credentials[0].refresh_token)
    account_with_single_auth_creds.verify_all_credentials()
    assert len(account_with_single_auth_creds.valid_auth_credentials) == 0
    g_token_manager.clear_cache(account_with_single_auth_creds)

    # connect_account() takes an /expunged/ account object
    # that has the necessary relationships eager-loaded
    object_session(account_with_single_auth_creds).expunge(
        account_with_single_auth_creds)
    assert not object_session(account_with_single_auth_creds)

    account = db.session.query(GmailAccount).options(
        joinedload(GmailAccount.auth_credentials)).get(
        account_id)
    db.session.expunge(account)
    assert not object_session(account)

    g = GmailAuthHandler('gmail')

    with pytest.raises(OAuthError):
        g.connect_account(account)

    invalid_account = db.session.query(GmailAccount).get(account_id)
    for auth_creds in invalid_account.auth_credentials:
        assert not auth_creds.is_valid
Пример #3
0
    def last_modified(self):
        """ Returns last change of the elections. """

        changes = [self.last_change, self.last_result_change]
        session = object_session(self)
        election_ids = [election.id for election in self.elections]

        # Get the last election change
        result = object_session(self).query(Election.last_change)
        result = result.order_by(desc(Election.last_change))
        result = result.filter(Election.id.in_(election_ids))
        changes.append(result.first()[0] if result.first() else None)

        # Get the last candidate change
        result = object_session(self).query(Candidate.last_change)
        result = result.order_by(desc(Candidate.last_change))
        result = result.filter(Candidate.election_id.in_(election_ids))
        changes.append(result.first()[0] if result.first() else None)

        # Get the last list connection change
        result = session.query(ListConnection.last_change)
        result = result.order_by(desc(ListConnection.last_change))
        result = result.filter(ListConnection.election_id.in_(election_ids))
        changes.append(result.first()[0] if result.first() else None)

        # Get the last list change
        result = session.query(List.last_change)
        result = result.order_by(desc(List.last_change))
        result = result.filter(List.election_id == self.id)
        changes.append(result.first()[0] if result.first() else None)

        changes = [change for change in changes if change]
        return max(changes) if changes else None
Пример #4
0
    def test_explicit_expunge_deleted(self):
        users, User = self.tables.users, self.classes.User

        mapper(User, users)
        sess = Session()
        sess.add(User(name='x'))
        sess.commit()

        u1 = sess.query(User).first()
        sess.delete(u1)

        sess.flush()

        assert was_deleted(u1)
        assert u1 not in sess
        assert object_session(u1) is sess

        sess.expunge(u1)
        assert was_deleted(u1)
        assert u1 not in sess
        assert object_session(u1) is None

        sess.rollback()
        assert was_deleted(u1)
        assert u1 not in sess
        assert object_session(u1) is None
Пример #5
0
def answer_mode(survey_node: AnswerableSurveyNode):
    """Get the mode of the answers."""
    type_constraint = survey_node.the_type_constraint
    allowable_types = {
        'text', 'integer', 'decimal', 'date', 'time', 'timestamp', 'location',
        'facility', 'multiple_choice'
    }
    if type_constraint not in allowable_types:
        raise InvalidTypeForOperation((type_constraint, 'mode'))
    answer_cls = ANSWER_TYPES[survey_node.the_type_constraint]
    result = (
        object_session(survey_node)
        .execute(
            sa.text(
                'SELECT MODE() WITHIN GROUP (ORDER BY main_answer)'
                ' FROM {table} JOIN {answer_table} ON'
                ' {table}.id = {answer_table}.id'
                ' WHERE {answer_table}.survey_node_id = :sn_id'.format(
                    table=answer_cls.__table__,
                    answer_table=Answer.__table__,
                )
            ),
            {'sn_id': survey_node.id}
        )
        .scalar()
    )
    if type_constraint == 'multiple_choice' and result:
        result = object_session(survey_node).query(Choice).get(str(result))
    return result
Пример #6
0
    def __init__(self, namespace_id=None, subject=None, thread_public_id=None,
                 started_before=None, started_after=None,
                 last_message_before=None, last_message_after=None,
                 any_email=None, to_addr=None, from_addr=None, cc_addr=None,
                 bcc_addr=None, filename=None, tag=None, detached=True):

        self.namespace_id = namespace_id
        self.subject = subject
        self.thread_public_id = thread_public_id
        self.started_before = started_before
        self.started_after = started_after
        self.last_message_before = last_message_before
        self.last_message_after = last_message_after
        self.any_email = any_email
        self.to_addr = to_addr
        self.from_addr = from_addr
        self.cc_addr = cc_addr
        self.bcc_addr = bcc_addr
        self.filename = filename
        self.tag = tag

        if detached and object_session(self) is not None:
            s = object_session(self)
            s.expunge(self)
            # Note, you can later add this object to a session by doing
            # session.merge(detached_objecdt)

        # For transaction filters
        self.filters = []
        self.setup_filters()
Пример #7
0
def on_after_update(mapper, connection, target):
    request = getattr(target, '_request', None)
    from .documents import BaseDocument

    # Reindex old one-to-one related object
    committed_state = attributes.instance_state(target).committed_state
    columns = set()
    for field, value in committed_state.items():
        if isinstance(value, BaseDocument):
            obj_session = object_session(value)
            # Make sure object is not updated yet
            if not obj_session.is_modified(value):
                obj_session.expire(value)
            index_object(value, with_refs=False,
                         request=request)
        else:
            id_pos = field.rfind('_id')
            if id_pos >= 0:
                rel_name = field[:id_pos]
                rel = mapper.relationships.get(rel_name, False)
                if rel and any(c.name == field for c in rel.local_columns):
                    columns.add(rel_name)

    # Reload `target` to get access to processed fields values
    columns = columns.union([c.name for c in class_mapper(target.__class__).columns])
    object_session(target).expire(target, attribute_names=columns)
    index_object(target, request=request, with_refs=False, nested_only=True)

    # Reindex the item's parents. This must be done after the child has been processes
    for parent, children_field in target.get_parent_documents(nested_only=True):
        columns = [c.name for c in class_mapper(parent.__class__).columns]
        object_session(parent).expire(parent, attribute_names=columns)
        ES(parent.__class__.__name__).index_nested_document(parent, children_field, target)
Пример #8
0
 def __getattr__( self, name ):
     if name in self.spec:
         if name in self.parent._metadata:
             return self.spec[name].wrap( self.parent._metadata[name], object_session( self.parent ) )
         return self.spec[name].wrap( self.spec[name].default, object_session( self.parent ) )
     if name in self.parent._metadata:
         return self.parent._metadata[name]
Пример #9
0
 def test_i18n( self ):
     from camelot.model.i18n import Translation, ExportAsPO
     session = Session()
     session.execute( Translation.__table__.delete() )
     self.assertEqual( Translation.translate( 'bucket', 'nl_BE' ), None )
     # run twice to check all branches in the code
     Translation.translate_or_register( 'bucket', 'nl_BE' )
     Translation.translate_or_register( 'bucket', 'nl_BE' )
     self.assertEqual( Translation.translate( 'bucket', 'nl_BE' ), 'bucket' )
     self.assertEqual( Translation.translate( '', 'nl_BE' ), '' )
     self.assertEqual( Translation.translate_or_register( '', 'nl_BE' ), '' )
     # clear the cache
     Translation._cache.clear()
     # fill the cache again
     translation = Translation( language = 'nl_BE', source = 'bucket',
                                value = 'emmer', uid=1 )
     orm.object_session( translation ).flush()
     self.assertEqual( Translation.translate( 'bucket', 'nl_BE' ), 'emmer' )
     export_action = ExportAsPO()
     model_context = MockModelContext()
     model_context.obj = translation
     try:
         generator = export_action.model_run( model_context )
         file_step = generator.next()
         generator.send( ['/tmp/test.po'] )
     except StopIteration:
         pass
Пример #10
0
    def longslit_calibrate_to_database(self, gmos_long_slit_arc_calibration=None, destination_dir='.', force=False):

        session = object_session(self)

        if self.wave_cal is not None:
            if not force:
                raise GMOSDatabaseDuplicate('This arc already has a wavelength calibration {0}. '
                                            'Use force=True to override'.format(self.wave_cal))
            else:
                session.delete(self.wave_cal)
                session.commit()

        arctab, linesall, shift, fake = self.longslit_calibrate(gmos_long_slit_arc_calibration=gmos_long_slit_arc_calibration)

        wavecal_store_fname = 'arccal-{}.h5'.format(self.raw.fits.fname.replace('.fits',''))
        wavecal_store_full_path = os.path.join(destination_dir, wavecal_store_fname)
        arctab.write(wavecal_store_full_path, path='arc', overwrite=True, format='hdf5')
        #linesall.write(wavecal_store_full_path, path='lines', append=True, format='hdf5')

        session = object_session(self)

        gmos_long_slit_wavecal = GMOSLongSlitArcWavelengthSolution(id = self.id, fname=wavecal_store_fname,
                                                                   path=os.path.abspath(destination_dir))

        session.add(gmos_long_slit_wavecal)
        session.commit()

        return gmos_long_slit_wavecal
Пример #11
0
def set_session_profile(survey, survey_session, profile):
    """Set up the survey session using a given profile.

    :param survey: the survey to use
    :type survey: :py:class:`euphorie.content.survey.Survey`
    :param survey_session: survey session to update
    :type survey_session: :py:class:`euphorie.client.model.SurveySession`
    :param dict profile: desired profile
    :rtype: :py:class:`euphorie.client.model.SurveySession`
    :return: the update session (this might be a new session)

    This will rebuild the survey session tree if the profile has changed.
    """
    if not survey_session.hasTree():
        BuildSurveyTree(survey, profile, survey_session)
        return survey_session

    current_profile = extractProfile(survey, survey_session)
    if current_profile == profile and not treeChanges(survey_session, survey):
        survey_session.touch()
        return survey_session

    new_session = create_survey_session(
            survey_session.title, survey, survey_session.account)
    BuildSurveyTree(survey, profile, new_session, survey_session)
    new_session.copySessionData(survey_session)
    object_session(survey_session).delete(survey_session)
    return new_session
Пример #12
0
 def __exit__( self, exc_type, exc_val, exc_tb ):
     if exc_type != None:
         self.add_exception_to_message( exc_type, exc_val, exc_tb )
         self.status = 'errors'
     elif self.status == 'running':
         self.status = 'success'
     orm.object_session( self ).commit()
     return True
Пример #13
0
 def should_suppress_transaction_creation(self):
     if (self in object_session(self).new or
             self in object_session(self).deleted):
         return False
     obj_state = inspect(self)
     return not (obj_state.attrs.name.history.has_changes() or
                 obj_state.attrs.description.history.has_changes() or
                 obj_state.attrs.read_only.history.has_changes())
Пример #14
0
    def fix(self, geolocation):
        if self.latitude == 0 or self.longitude == 0:
            position = geolocation.get_position_by_name(self.address)

            self.latitude = position[0]
            self.longitude = position[1]

            object_session(self).commit()
            object_session(self).flush()
Пример #15
0
 def make_copy( self, value, target_context, source_context ):
     value = self.wrap( value, object_session( target_context.parent ) )
     if value:
         new_value = galaxy.model.MetadataFile( dataset = target_context.parent, name = self.spec.name )
         object_session( target_context.parent ).add( new_value )
         object_session( target_context.parent ).flush()
         shutil.copy( value.file_name, new_value.file_name )
         return self.unwrap( new_value )
     return None
Пример #16
0
  def log_activity(self, sender, actor, verb, object, target=None):
    assert self.running
    entry = ActivityEntry(actor=actor, verb=verb, object=object, target=target)
    entry.object_type = object.entity_type

    if target is not None:
        entry.target_type = target.entity_type

    object_session(object).add(entry)
 def _record(self, mapper, model, operation):
     pk = tuple(mapper.primary_key_from_instance(model))
     # FIXME: Some hack just to prevent from crashing when trying to look
     # for _model_changes attribute. Happens when loading fixutres with
     # the fixture library.
     if not hasattr(orm.object_session(model), '_model_changes'):
         orm.object_session(model)._model_changes = dict()
     orm.object_session(model)._model_changes[pk] = (model, operation)
     return EXT_CONTINUE
Пример #18
0
 def get_object_state(self):
     if object_session(self) is None and not has_identity(self):
         return 'transient'
     elif object_session(self) is not None and not has_identity(self):
         return 'pending'
     elif object_session(self) is None and has_identity(self):
         return 'detached'
     elif object_session(self) is not None and has_identity(self):
         return 'persistent'
     raise Exception
Пример #19
0
    def test_current_authentication(self):
        from camelot.model.authentication import get_current_authentication

        authentication = get_current_authentication()
        # current authentication cache should survive
        # a session expire + expunge
        orm.object_session(authentication).expire_all()
        orm.object_session(authentication).expunge_all()
        authentication = get_current_authentication()
        self.assertTrue(authentication.username)
Пример #20
0
 def is_canceled( self ):
     """Verifies if this Batch Job is canceled.  Returns :keyword:`True` if 
     it is.  This method is thus suiteable to call inside a running batch job 
     to verifiy if another user has canceled the running job.  Create a
     batch job object through the :meth:`create` method to make sure
     requesting the status does not interfer with the normal session.
     
     :return: :keyword:`True` or :keyword:`False`
     """
     orm.object_session( self ).expire( self, ['status'] )
     return self.current_status == 'canceled'
Пример #21
0
 def new_file( self, dataset = None, **kwds ):
     if object_session( dataset ):
         mf = galaxy.model.MetadataFile( name = self.spec.name, dataset = dataset, **kwds )
         object_session( dataset ).add( mf )
         object_session( dataset ).flush() #flush to assign id
         return mf
     else:
         #we need to make a tmp file that is accessable to the head node,
         #we will be copying its contents into the MetadataFile objects filename after restoring from JSON
         #we do not include 'dataset' in the kwds passed, as from_JSON_value() will handle this for us
         return MetadataTempFile( **kwds )
Пример #22
0
 def also_set_tag(self, key, folderitem, is_remove):
     # Also add or remove the associated tag whenever a folder is added or
     # removed.
     with object_session(self).no_autoflush:
         folder = folderitem.folder
         tag = folder.get_associated_tag(object_session(self))
         if is_remove:
             self.remove_tag(tag)
         else:
             self.apply_tag(tag)
     return folderitem
Пример #23
0
 def refresh_total_usage(self):
     accepted_status_list = (
         201, 301, 311, 511, 601, 521, 621, 701, 321, 231, 241)
     if object_session(self):
         value = object_session(self).query(Deal)\
             .filter(Deal.coupon == self)\
             .filter(Deal.deal_status_id.in_(accepted_status_list))\
             .count()
     else:
         value = 0
     self.used_total = value
Пример #24
0
 def _create_versions(self):
     # be careful! makes flush!
     if not self._front_item:
         object_session(self).flush()
         # XXX it is better to do this automatically on before_insert or
         #     after_insert
         self._create_front_object()
     # the item is created, we set PRIVATE state as default
     # XXX hasattr looks hacky
     if hasattr(self, 'state') and (
             self.state is None or self.state == self.ABSENT):
         self.state = self.PRIVATE
Пример #25
0
def print_state(obj):
    from sqlalchemy.orm import object_session 
    from sqlalchemy.orm.util import has_identity 
    obj = obj
    if object_session(obj) is None and not has_identity(obj):
        print "transient:" 
    if object_session(obj) is not None and not has_identity(obj):
        print "pending: "
    if object_session(obj) is None and has_identity(obj):
        print "# detached: "
    if object_session(obj) is not None and has_identity(obj):
        print "# persistent: "


    print type(obj)
Пример #26
0
 def test_getitem_removed_session(self):
     # Corner case: admin removed survey, but survey session still exists
     import mock
     from sqlalchemy.orm import object_session
     from euphorie.client.tests.utils import addAccount
     from euphorie.client.model import SurveySession
     account = addAccount()
     survey_session = SurveySession(title=u'Dummy',
             zodb_path='does/not/exist', account=account)
     object_session(account).add(survey_session)
     sessions = self.Sessions('sessions', None, account)
     with mock.patch('euphorie.client.api.sessions.get_survey') \
             as mock_get:
         mock_get.return_value = None
         self.assertRaises(KeyError, sessions.__getitem__, '1')
Пример #27
0
    def __getitem__(self, version):
        session = orm.object_session(self)
        canonical_version = packaging.utils.canonicalize_version(version)

        try:
            return (
                session.query(Release)
                .filter(
                    (Release.project == self)
                    & (Release.canonical_version == canonical_version)
                )
                .one()
            )
        except MultipleResultsFound:
            # There are multiple releases of this project which have the same
            # canonical version that were uploaded before we checked for
            # canonical version equivalence, so return the exact match instead
            try:
                return (
                    session.query(Release)
                    .filter((Release.project == self) & (Release.version == version))
                    .one()
                )
            except NoResultFound:
                # There are multiple releases of this project which have the
                # same canonical version, but none that have the exact version
                # specified, so just 404
                raise KeyError from None
        except NoResultFound:
            raise KeyError from None
Пример #28
0
 def from_fits_object(cls, fits_object):
     session = object_session(fits_object)
     mask_name = fits_object.header['DATALAB'].lower().strip()
     mask_program = session.query(gemini_alchemy.Program).filter_by(name=fits_object.header['GEMPRGID'].lower().strip()).one()
     mask_object = cls(mask_name, mask_program.id)
     mask_object.fits_id = fits_object.id
     return mask_object
Пример #29
0
 def _get_loan_details(self):
     where = [tables.loans.c.movie_id==self.movie_id]
     if self.collection_id is not None:
         where.append(tables.loans.c.collection_id==self.collection_id)
     if self.volume_id is not None:
         where.append(tables.loans.c.volume_id==self.volume_id)
     return object_session(self).query(Loan).filter(and_(tables.loans.c.return_date==None, or_(*where))).first()
Пример #30
0
def start_taxonomy_check():
    """run the batch taxonomy check (BTC)
    """

    view = GenericEditorView(
        os.path.join(paths.lib_dir(), "plugins", "plants", "taxonomy_check.glade"),
        parent=None,
        root_widget_name="dialog1",
    )
    model = type("BTCStatus", (object,), {})()
    model.page = 1
    model.selection = view.get_selection()
    model.tick_off = None
    model.report = None
    model.file_path = ""

    if model.selection is None:
        return
    from sqlalchemy.orm import object_session

    presenter = BatchTaxonomicCheckPresenter(model, view, refresh_view=True, session=object_session(model.selection[0]))
    error_state = presenter.start()
    if error_state:
        presenter.session.rollback()
    else:
        presenter.commit_changes()
        from bauble import gui

        view = gui.get_view()
        if hasattr(view, "update"):
            view.update()
    presenter.cleanup()
    return error_state
Пример #31
0
    def contents(self):
        # type: () -> List[str]
        from ..astro_object import AstronomicalObject

        # Get a valid database session:
        #
        #   I'm not sure if the object_session is required, but it guarantees
        #   that we are working with the session that this archive belongs to.
        #   In theory, fidia.mappingdb_session should be the only session present.
        session = object_session(self)
        if session is None:
            session = fidia.mappingdb_session

        query = session.query(AstronomicalObject._identifier)

        query_results = query.filter_by(
            _db_archive_id=self._db_archive_id).all()
        # Results will contain a list of tuples, so we must get the first column out so we have a simple list.

        contents = [i[0] for i in query_results]  # type: List[str]
        return contents
Пример #32
0
 def test_do_GET_use_vocabulary_token(self):
     from sqlalchemy.orm import object_session
     from zope.publisher.browser import TestRequest
     from euphorie.client.model import Risk
     self.loginAsPortalOwner()
     (account, survey, survey_session) = _setup_session(self.portal)
     risk = survey['1']['2']
     risk.title = u'Everything is under control.'
     risk.problem_description = u'Not everything under control.'
     risk.description = None
     risk.evaluation_method = 'calculated'
     request = TestRequest()
     request.survey = survey
     risk = object_session(survey_session).query(Risk).first()
     risk.probability = 3
     risk.frequency = 7
     view = self.View(risk, request)
     response = view.do_GET()
     self.assertEqual(response['probability'], 'medium')
     self.assertEqual(response['frequency'], 'constant')
     self.assertEqual(response['effect'], None)
Пример #33
0
    def mds_hw_listen(instance, args, kwargs):
        print(args, kwargs)
        session.add(instance)

        def got_id(session, flush_context):
            printD(instance)
            try:
                for mds in instance.type.metadatasources:  #FIXME test to see if we can get stuff from args/kwargs
                    session.add(instance.id, mds.id, mds.hardware_id)
                    session.flush()
            except:
                printD(
                    'it would seem your exp doesnt have a type or the type has not mdses, what kind of experiment is this!?'
                )
                #raise
            finally:
                pass
                #rem_got_id() #FIXME why does this... fail on Project...?

        hld.store(got_id)
        event.listen(object_session(instance), 'after_flush', got_id, instance)
Пример #34
0
    def __acl__(self):
        session = orm.object_session(self)
        acls = [
            (Allow, "group:admins", "admin"),
            (Allow, "group:moderators", "moderator"),
        ]

        # Get all of the users for this project.
        query = session.query(Role).filter(Role.project == self)
        query = query.options(orm.lazyload("project"))
        query = query.options(orm.joinedload("user").lazyload("emails"))
        query = query.join(User).order_by(User.id.asc())
        for role in sorted(
                query.all(),
                key=lambda x: ["Owner", "Maintainer"].index(x.role_name)):
            if role.role_name == "Owner":
                acls.append(
                    (Allow, str(role.user.id), ["manage:project", "upload"]))
            else:
                acls.append((Allow, str(role.user.id), ["upload"]))
        return acls
Пример #35
0
    def set_inherit_security(self, obj, inherit_security):
        """
    """
        assert isinstance(obj, InheritSecurity)
        assert isinstance(obj, Entity)

        obj.inherit_security = inherit_security
        session = object_session(obj) if obj is not None else db.session
        session.add(obj)

        manager = self._current_user_manager(session=session)
        op = (SecurityAudit.SET_INHERIT
              if inherit_security else SecurityAudit.UNSET_INHERIT)
        audit = SecurityAudit(manager=manager,
                              op=op,
                              object=obj,
                              object_id=obj.id,
                              object_type=obj.entity_type,
                              object_name=obj.name)
        session.add(audit)
        self._needs_flush()
Пример #36
0
    def first_publication_number(self):
        """ Returns the first publication number of this issue based on the
        last issue of the same year. """

        from onegov.gazette.models.notice import GazetteNotice  # circular

        session = object_session(self)

        issues = session.query(Issue.name)
        issues = issues.filter(extract('year', Issue.date) == self.date.year)
        issues = issues.filter(Issue.date < self.date)
        issues = [issue[0] for issue in issues]
        if not issues:
            return 1

        numbers = []
        for issue in issues:
            query = session.query(GazetteNotice._issues[issue])
            query = query.filter(GazetteNotice._issues.has_key(issue))  # noqa
            numbers.extend([int(x[0]) for x in query if x[0]])
        return max(numbers) + 1 if numbers else 1
Пример #37
0
    def date_observer(self, date_):
        """ Changes the issue date of the notices when updating the date
        of the issue.

        At this moment, the transaction is not yet commited: Querying the
        current issue returns the old date.

        """

        issues = object_session(self).query(Issue.name, Issue.date)
        issues = dict(issues.order_by(Issue.date))
        issues[self.name] = date_
        issues = {
            key: standardize_date(as_datetime(value), 'UTC')
            for key, value in issues.items()
        }

        for notice in self.notices():
            dates = [issues.get(issue, None) for issue in notice._issues]
            dates = [date for date in dates if date]
            notice.first_issue = min(dates)
Пример #38
0
 def composition(self):
     composition = CompositionType()
     session = object_session(self)
     for fragment_composition_relation in self._fragment_composition:
         symbol = fragment_composition_relation.brick_string
         isotope, element = re.search(r"(?P<isotope>\d*)?(?P<element>\S+)", symbol).groups()
         count = fragment_composition_relation.count
         if count is not None:
             count = int(count)
         else:
             count = 1
         if isotope != "":
             name = _make_isotope_string(element, isotope)
         else:
             name = element
         is_brick = session.query(Brick).filter(Brick.brick == name).first()
         if is_brick is None:
             composition[str(name)] += count
         else:
             composition += is_brick.composition * count
     return composition
Пример #39
0
 def merged_sections(self):
     """
     Returns a list of all the sections that share an active, merged
     gradebook with this section. Merged sections DO NOT have to have the
     same course_code.
     """
     merged_sections = []
     query = (
         object_session(self).query(GradebookTeacherSection).filter(
             self.school_year == GradebookTeacherSection.school_year
         ).filter(self.school_code == GradebookTeacherSection.school_code).
         filter(self.gb_course_id == GradebookTeacherSection.course_id).
         filter(GradebookTeacherSection.active == True).filter(
             or_(
                 self.course_section !=
                 GradebookTeacherSection.course_section, self.course_code !=
                 GradebookTeacherSection.course_code))  # filter out ourself
     )
     for gb_teacher_section in query:
         merged_sections.append(gb_teacher_section.section)
     return merged_sections
Пример #40
0
 def test_get(self):
     import datetime
     import json
     from sqlalchemy.orm import object_session
     from euphorie.client.model import Risk
     from euphorie.client.model import ActionPlan
     from euphorie.client.api.authentication import generate_token
     from euphorie.client.api.tests.test_risk import _setup_session
     self.loginAsPortalOwner()
     (account, survey, survey_session) = _setup_session(self.portal)
     risk = object_session(survey_session).query(Risk).one()
     risk.action_plans.append(
         ActionPlan(action_plan=u'This is the plan',
                    planning_start=datetime.date(2012, 6, 3)))
     browser = Browser()
     browser.addHeader('X-Euphorie-Token', generate_token(account))
     browser.open('http://nohost/plone/client/api/users/1/'
                  'sessions/1/1/1/actionplans/1')
     self.assertEqual(browser.headers['Content-Type'], 'application/json')
     response = json.loads(browser.contents)
     self.assertEqual(response['plan'], u'This is the plan')
Пример #41
0
    def add_strings_to_message( self, strings, color = None ):
        """Add strings to the message of this batch job.

        This method executes within it's own transaction, to make sure the state
        of the session is rolled back on failure.

        :param strings: a list or generator of strings
        :param color: the html color to be used for the strings (`'red'`, 
        `'green'`, ...), None if the color needs no change. 
        """
        if color:
            strings = [u'<font color="%s">'%color] + strings + [u'</font>']
        session = orm.object_session( self )
        with session.begin():
            # message might be changed in the orm
            session.flush()
            batch_table = self.__table__
            update = batch_table.update().where( batch_table.c.id == self.id )
            update = update.values( message = sql.func.coalesce( batch_table.c.message, '' ) + sql.bindparam('line') )
            for line in strings:
                session.execute( update, params = {'line':line + '<br/>'} )
Пример #42
0
 def from_fits_object(cls, fits_object):
     session = object_session(fits_object)
     category_keyword_value = fits_object.header[
         cls.category_keyword].lower().strip()
     if session.query(cls).filter_by(
             name=category_keyword_value).count() == 0:
         logger.info('%s %s mentioned in %s not found in database - adding',
                     cls.__name__, category_keyword_value,
                     fits_object.fname)
         new_category_object = cls(category_keyword_value)
         session.add(new_category_object)
         session.commit()
         return new_category_object
     else:
         category_object = session.query(cls).filter_by(
             name=category_keyword_value).one()
         logger.debug(
             '%s %s mentioned in %s found in database - returning existing object %s',
             cls.__name__, category_keyword_value, fits_object.fname,
             category_object)
         return category_object
Пример #43
0
    def get_role_assignements(self, object):
        session = object_session(object) if object is not None else db.session
        if not session:
            session = db.session()
        q = session.query(RoleAssignment)
        q = q.filter(RoleAssignment.object == object)\
             .options(subqueryload('user.groups'))

        role_assignments = q.all()

        results = []
        for ra in role_assignments:
            principal = None
            if ra.anonymous:
                principal = AnonymousRole
            elif ra.user:
                principal = ra.user
            else:
                principal = ra.group
            results.append((principal, ra.role))
        return results
Пример #44
0
def make_aliquot_label(aliquot):
    db_session = orm.object_session(aliquot)
    specimen = aliquot.specimen
    study = specimen.cycle.study
    cycle = specimen.cycle
    patient = specimen.patient
    pid = patient.pid
    collect_date_label = aliquot.collect_date.strftime('%m/%d/%Y')
    collect_time_label = aliquot.collect_time.strftime('%H:%M')
    study_label = study.short_title
    cycle_label = cycle.week or cycle.title
    type_label = aliquot.aliquot_type.name

    # A major blunder of LIMS is to not store the enrollment info the
    # sample was collected for, so we take a best guess by finding
    # a disinct enrollment based on the sample
    enrollment_query = (db_session.query(studies.Enrollment).filter_by(
        patient=patient,
        study=study).filter(studies.Enrollment.reference_number != sa.null()))

    try:
        enrollment = enrollment_query.one()
    except (orm.exc.NoResultFound, orm.exc.MultipleResultsFound):
        enrollment_number = u''
    else:
        enrollment_number = enrollment.reference_number

    if aliquot.amount:
        units = aliquot.aliquot_type.units or ''
        type_label += ' {}{}'.format(aliquot.amount, units)

    barcode = 0
    lines = [
        u'{}'.format(aliquot.id),
        u'{}   {}   {}'.format(aliquot.id, pid, enrollment_number),
        u'{} {}'.format(collect_date_label, collect_time_label),
        u'{} - {} - {}'.format(study_label, cycle_label, type_label)
    ]

    return barcode, lines
Пример #45
0
    def get_as_of(self, revision=None):
        '''Get this domain object at the specified revision.
        
        If no revision is specified revision will be looked up on the global
        session object. If that not found return head.

        get_as_of does most of the crucial work in supporting the
        versioning.
        '''
        sess = object_session(self)
        if revision:  # set revision on the session so dom traversal works
            # TODO: should we test for overwriting current session?
            # if rev != revision:
            #     msg = 'The revision on the session does not match the one you' + \
            #     'requesting.'
            #     raise Exception(msg)
            logger.debug('get_as_of: setting revision and not_as_HEAD: %s',
                         revision)
            SQLAlchemySession.set_revision(sess, revision)
            SQLAlchemySession.set_not_at_HEAD(sess)
        else:
            revision = SQLAlchemySession.get_revision(sess)

        if SQLAlchemySession.at_HEAD(sess):
            return self
        else:
            revision_class = self.__revision_class__
            # TODO: when dealing with multi-col pks will need to update this
            # (or just use continuity)
            out = sess.query(revision_class).join('revision').\
                filter(
                    Revision.timestamp <= revision.timestamp
                ).\
                filter(
                    revision_class.id == self.id
                ).\
                order_by(
                    Revision.timestamp.desc()
                )
            return out.first()
Пример #46
0
    def remove_tag(self, tag, execute_action=False):
        """Remove the given Tag instance from this thread. Does nothing if the
        tag isn't present. Contains extra logic for validating input and
        triggering dependent changes. Callers should use this method instead of
        directly calling Thread.tags.discard(tag).

        Parameters
        ----------
        tag: Tag instance
        execute_action: bool
            True if removing the tag should trigger a syncback action.
        """
        if tag not in self.tags:
            return
        self.tags.remove(tag)

        if execute_action:
            schedule_action_for_tag(tag.public_id,
                                    self,
                                    object_session(self),
                                    tag_added=False)

        # Add or remove dependent tags.
        inbox_tag = self.namespace.tags['inbox']
        archive_tag = self.namespace.tags['archive']
        unread_tag = self.namespace.tags['unread']
        unseen_tag = self.namespace.tags['unseen']
        spam_tag = self.namespace.tags['spam']
        trash_tag = self.namespace.tags['trash']
        if tag == unread_tag:
            # Remove the 'unseen' tag when the unread tag is removed.
            self.tags.discard(unseen_tag)
        if tag == inbox_tag:
            self.tags.add(archive_tag)
        elif tag == archive_tag:
            self.tags.add(inbox_tag)
        elif tag == trash_tag:
            self.tags.add(inbox_tag)
        elif tag == spam_tag:
            self.tags.add(inbox_tag)
Пример #47
0
    def get_associations(self, kind=None, include_parents=False):
        """Get associated files, optionally limiting by association kind.

        Parameters
        ----------
        kind : str
            The kind of association to return (e.g., "Child").
            By default, all associations are returned.
        include_parents : bool
            If True, files related through inheritance
            are included in the returned list. If False, only directly
            associated files are returned. For example, a file's JSON
            sidecar will always be returned, but other JSON files from
            which the sidecar inherits will only be returned if
            include_parents=True.

        Returns
        -------
        list
            A list of BIDSFile instances.
        """
        if kind is None:
            return self._associations
        session = object_session(self)
        q = (session.query(BIDSFile).join(
            FileAssociation,
            BIDSFile.path == FileAssociation.dst).filter_by(kind=kind,
                                                            src=self.path))
        associations = q.all()

        if not include_parents:
            return associations

        def collect_associations(results, bidsfile):
            results.append(bidsfile)
            for p in bidsfile.get_associations('Child'):
                results = collect_associations(results, p)
            return results

        return chain(*[collect_associations([], bf) for bf in associations])
Пример #48
0
    def create_datasource(self, path: str, type, datatype, commit=False, **args):
        """
        """
        # 
        if self.datasource is not None:
            raise MetadataMissingError('Datasource already exists. You can edit that one.')

        # get a session
        session = object_session(self)

        # load the datasource type
        if isinstance(type, int):
            ds_type = api.find_datasource_type(session=session, id=type, return_iterator=True).one()
        elif isinstance(type, str):
            ds_type = api.find_datasource_type(session=session, name=type, return_iterator=True).first()
        else:
            raise AttributeError('type has to be of type int or str')
        
        # TODO need the API for DataTypes here!!
        dtype = session.query(models.DataType).filter(models.DataType.name==datatype).one()
        
        # build the datasource object
        ds = models.DataSource(type=ds_type, datatype=dtype, path=path)

        # add the args
        ds.save_args_from_dict(args)

        # append to self
        self.datasource = ds

        if commit:
            try:
                session.add(self)
                session.commit()
            except Exception as e:
                session.rollback()
                raise e
        
        # return
        return ds
Пример #49
0
    def apply_tag(self, tag, execute_action=False):
        """Add the given Tag instance to this thread. Does nothing if the tag
        is already applied. Contains extra logic for validating input and
        triggering dependent changes. Callers should use this method instead of
        directly calling Thread.tags.add(tag).

        Parameters
        ----------
        tag: Tag instance
        execute_action: bool
            True if adding the tag should trigger a syncback action.
        """
        if tag in self.tags:
            return
        self.tags.add(tag)

        if execute_action:
            schedule_action_for_tag(tag.public_id,
                                    self,
                                    object_session(self),
                                    tag_added=True)

        # Add or remove dependent tags.
        # TODO(emfree) this should eventually live in its own utility function.
        inbox_tag = self.namespace.tags['inbox']
        archive_tag = self.namespace.tags['archive']
        sent_tag = self.namespace.tags['sent']
        drafts_tag = self.namespace.tags['drafts']
        spam_tag = self.namespace.tags['spam']
        trash_tag = self.namespace.tags['trash']
        if tag == inbox_tag:
            self.tags.discard(archive_tag)
        elif tag == archive_tag:
            self.tags.discard(inbox_tag)
        elif tag == sent_tag:
            self.tags.discard(drafts_tag)
        elif tag == spam_tag:
            self.tags.discard(inbox_tag)
        elif tag == trash_tag:
            self.tags.discard(inbox_tag)
Пример #50
0
    def update_config(self, key, value):
        """ Creates or updates db config of the VarDictGroup. Requires bound to db tree. """
        dataset = self._top._config.dataset
        session = object_session(self._top._config)
        logger.debug(
            'Updating VarDictGroup config. dataset: {}, type: {}, key: {}, value: {}'
            .format(dataset, self._top._type, key, value))

        if not self._parent._config:
            self._parent.update_config()

        # create or update group config
        self._config, created = get_or_create(session,
                                              Config,
                                              d_vid=dataset.vid,
                                              type=self._top._type,
                                              parent=self._parent._config,
                                              group=self._key,
                                              key=self._key,
                                              dataset=dataset)
        self._top._add_valid(self._config)

        # create or update value config
        config, created = get_or_create(session,
                                        Config,
                                        parent=self._config,
                                        d_vid=dataset.vid,
                                        type=self._top._type,
                                        key=key,
                                        dataset=dataset)

        if config.value != value:
            # sync db value with term value.
            config.value = value
            session.merge(config)
            session.commit()
            logger.debug(
                'Config bound to the VarDictGroup key updated. config: {}'.
                format(config))
        self._top._add_valid(config)
Пример #51
0
def dependent_foreign_keys(model_class):
    """
    Returns dependent foreign keys as dicts for given model class.

    ** Experimental function **
    """
    session = object_session(model_class)

    engine = session.bind
    inspector = reflection.Inspector.from_engine(engine)
    table_names = inspector.get_table_names()

    dependent_foreign_keys = {}

    for table_name in table_names:
        fks = inspector.get_foreign_keys(table_name)
        if fks:
            dependent_foreign_keys[table_name] = []
            for fk in fks:
                if fk['referred_table'] == model_class.__tablename__:
                    dependent_foreign_keys[table_name].append(fk)
    return dependent_foreign_keys
Пример #52
0
    def __getitem__(self, version):
        session = orm.object_session(self)
        canonical_version = packaging.utils.canonicalize_version(version)

        try:
            return (session.query(Release).filter((Release.project == self) & (
                Release.canonical_version == canonical_version)).one())
        except MultipleResultsFound:
            # There are multiple releases of this project which have the same
            # canonical version that were uploaded before we checked for
            # canonical version equivalence, so return the exact match instead
            try:
                return (session.query(
                    Release).filter((Release.project == self)
                                    & (Release.version == version)).one())
            except NoResultFound:
                # There are multiple releases of this project which have the
                # same canonical version, but none that have the exact version
                # specified, so just 404
                raise KeyError from None
        except NoResultFound:
            raise KeyError from None
Пример #53
0
            def __enter__(self):
                self.session = s = object_session(node)
                if s.new or s.dirty:
                    raise Exception(
                        "Refusing to create a new tagged node version. Session must be clean!"
                    )

                uow = versioning_manager.unit_of_work(s)
                tx = uow.create_transaction(s)

                if user is not None:
                    tx.user = user

                if tag:
                    if node.get_tagged_version(tag):
                        raise ValueError("tag already exists")
                    tx.meta[u"tag"] = tag
                elif publish:
                    if node.get_published_version():
                        raise ValueError("publish version already exists")
                    tx.meta[u"publish"] = publish
                else:
                    NodeVersion = version_class(node.__class__)
                    # in case you were wondering: order_by(None) resets the default order_by
                    last_tagged_version = node.tagged_versions.order_by(
                        None).order_by(
                            NodeVersion.transaction_id.desc()).first()
                    if last_tagged_version is not None:
                        next_version = int(last_tagged_version.tag) + 1
                    else:
                        node.versions[-1].tag = u"1"
                        next_version = 2

                    tx.meta[u"tag"] = unicode(next_version)

                if comment:
                    tx.meta[u"comment"] = comment

                return tx
Пример #54
0
    def get_roles_stats(self, date_start=None) -> dict:
        """
        Returns stats for all roles for the player

        :param date_start: DateTime to start the stats at
        :return: [role]['games', 'wins',]
        """
        session = object_session(self)

        query = session.query(
            GameParticipant.role,
            func.count().label('games'),
            type_coerce(func.sum(GameParticipant.team == Game.winner), Integer).label('wins')) \
            .select_from(Game) \
            .join(GameParticipant) \
            .filter(GameParticipant.player_id == self.discord_id) \
            .group_by(GameParticipant.role)

        if date_start:
            query = query.filter(Game.date > date_start)

        return {row.role: row for row in query}
Пример #55
0
    def scoreboard_packet(self):
        packet = smpacket.SMPacketServerNSCGON(nb_players=0, ids=[])

        session = object_session(self)

        options = ("score", "grade", "difficulty", "miss", "bad", "good",
                   "great", "perfect", "flawless", "held", "max_combo",
                   "options")

        for option in options:
            packet[option] = []

        for songstat in (session.query(song_stat.SongStat).filter_by(
                game_id=self.id).order_by(desc(song_stat.SongStat.score))):

            packet["nb_players"] += 1
            packet["ids"].append(
                user.User.user_index(songstat.user.id, self.room_id, session))
            for option in options:
                packet[option].append(getattr(songstat, option, None))

        return packet
Пример #56
0
    def returned_on(self, date=None):
        """
        Marks the loan as returned and clears loaned flag in related movies.
        """

        if date is None:
            date = func.current_date()
        # note that SQLAlchemy will convert YYYYMMDD strings to datetime, no need to touch it

        if self.return_date: # already returned, just update the date
            self.return_date = date
            return True

        session = object_session(self)

        if self.collection_id:
            self.collection.loaned = False # will update the loaned flag in all associated movies as well
        if self.volume_id:
            self.volume.loaned = False # will update the loaned flag in all associated movies as well
        if self.movie_id:
            self.movie.loaned = False
        self.return_date = date
Пример #57
0
 def getResultsForStudent(self, student):
     session = object_session(self)
     pointsQuery = self.exercise_points
     pointsQuery = pointsQuery.filter(
         ExerciseStudent.student_id == student.id)
     results = {}
     for points in pointsQuery.all():
         results[points.exercise_id] = {
             'points': points.points,
             'exercise': points.exercise
         }
     nonNullPoints = [
         x for x in [r['points'] for r in results.values()] if x
     ]
     if nonNullPoints:
         results['sum'] = sum(nonNullPoints)
     else:
         results['sum'] = None
     for e in self.exercises:
         if e.id not in results:
             results[e.id] = {'points': None, 'exercise': e}
     return results
Пример #58
0
    def _generate_properties_cache(self):
        live_calcs = live_calculation.parser.parse_property_names("dbid()",*self.with_calculations)
        session = object_session(self.base_halo)

        all_halo_ids = self._link_cache.keys()
        if self.base_halo.id not in all_halo_ids:
            all_halo_ids=list(all_halo_ids)+[self.base_halo.id]

        with temporary_halolist.temporary_halolist_table(session,
                                                         all_halo_ids) as temptable:
            query = temporary_halolist.halo_query(temptable)
            query = live_calcs.supplement_halo_query(query)
            sql_query_results = query.all()
            calculation_results = live_calcs.values(sql_query_results)

        self._properties_cache = {}
        for result in calculation_results.T:
            properties_this = {}
            for name, value in zip(self.with_calculations, result[1:]):
                properties_this[name]=value
            halo_id = result[0]
            self._properties_cache[halo_id] = properties_this
Пример #59
0
def _expire_relationship(target, relationship_prop):
    """Expire relationship backrefs

    used when an object with relationships is deleted
    """

    session = object_session(target)
    # get peer objects to be expired
    peers = getattr(target, relationship_prop.key)
    if peers is None:
        # no peer to clear
        return
    # many-to-many and one-to-many have a list of peers
    # many-to-one has only one
    if (
        relationship_prop.direction is interfaces.MANYTOONE
        or not relationship_prop.uselist
    ):
        peers = [peers]
    for obj in peers:
        if inspect(obj).persistent:
            session.expire(obj, [relationship_prop.back_populates])
Пример #60
0
    def get_last_5_quests(self):
        session = object_session(self)

        completed_quests = session.query(Quest).filter_by(
            user=self.user).filter_by(
                confirmed=True).filter(Quest.id != self.id).order_by(
                    desc(Quest.completed_date)).limit(5).all()

        # gets last 5 quests that were not completed and have expired
        current_time = datetime.datetime.now()
        expired_quests = session.query(Quest).filter_by(
            user=self.user).filter_by(confirmed=False).filter(
                Quest.expiry_date < current_time).order_by(
                    desc(Quest.expiry_date)).limit(5).all()

        quests = completed_quests
        for q in expired_quests:
            q.completed_date = q.expiry_date
            q.actual_reward = 0
            quests.append(q)
        quests.sort(key=lambda x: x.completed_date, reverse=True)
        return quests[:5]