示例#1
0
    def edit(self, pid=None):
        """Edit deposit."""
        pid = pid or self.pid

        def _edit(record):
            """Update selected keys."""
            data = record.dumps()
            # Keep current record revision for merging.
            data['_deposit']['pid']['revision_id'] = record.revision_id
            data['_deposit']['status'] = 'draft'
            data['$schema'] = self.build_deposit_schema(record)
            return data

        with db.session.begin_nested():
            before_record_update.send(self)

            record_pid, record = self.fetch_published()
            assert PIDStatus.REGISTERED == record_pid.status
            assert record['_deposit'] == self['_deposit']

            self.model.json = _edit(record)

            flag_modified(self.model, 'json')
            db.session.merge(self.model)

        after_record_update.send(self)
        return self.__class__(self.model.json, model=self.model)
示例#2
0
    def set_setting(self, key, value):
        if key not in org_settings:
            raise KeyError(key)

        self.settings.setdefault('settings', {})
        self.settings['settings'][key] = value
        flag_modified(self, 'settings')
示例#3
0
    def update_model(self, entity, instance):
        """ Update an instance from entity dict by merging the fields

        - Properties are copied over
        - JSON dicts are shallowly merged

        :param entity: Entity dict
        :type entity: dict
        :param instance: The instance to update
        :type instance: sqlalchemy.ext.declarative.DeclarativeMeta
        :return: New instance, updated
        :rtype: sqlalchemy.ext.declarative.DeclarativeMeta
        :raises AssertionError: validation errors
        """
        assert isinstance(entity, dict), 'Update model: entity should be a dict'

        # Check columns
        unk_cols = self.check_columns(entity.keys())
        assert not unk_cols, 'Update model: unknown fields: {}'.format(unk_cols)

        # Update
        for name, val in entity.items():
            if isinstance(val, dict) and self.mongomodel.model_bag.columns.is_column_json(name):
                # JSON column with a dict: do a shallow merge
                getattr(instance, name).update(val)
                # Tell SqlAlchemy that a mutable collection was updated
                flag_modified(instance, name)
            else:
                # Other columns: just assign
                setattr(instance, name, val)

        # Finish
        return instance
示例#4
0
def accept_reservation(self, request):
    if not self.data or not self.data.get('accepted'):
        collection = ResourceCollection(request.app.libres_context)
        resource = collection.by_id(self.resource)
        scheduler = resource.get_scheduler(request.app.libres_context)
        reservations = scheduler.reservations_by_token(self.token)

        send_html_mail(
            request=request,
            template='mail_reservation_accepted.pt',
            subject=_("Your reservation was accepted"),
            receivers=(self.email, ),
            content={
                'model': self,
                'resource': resource,
                'reservations': reservations
            }
        )

        for reservation in reservations:
            reservation.data = reservation.data or {}
            reservation.data['accepted'] = True

            # libres does not automatically detect changes yet
            flag_modified(reservation, 'data')

        request.success(_("The reservation was accepted"))
    else:
        request.warning(_("The reservation has already been accepted"))

    return morepath.redirect(request.params['return-to'])
    def update(self, instance, log=True, modified_attrs=(),
               validate_global=False):
        """Add `instance` to the DB session, and attempt to commit

        :param instance: Instance to be updated in the DB
        :param log: Should the update message be logged
        :param modified_attrs: Names of attributes that have been modified.
                               This is only required for some nested
                               attributes (e.g. when sub-keys of a runtime
                               properties dict that have been modified).
                               If DB updates aren't happening but no errors
                               are reported then you probably need this.
        :param validate_global: Verify that modification of this global
                                resource is permitted
        :return: The updated instance
        """
        if instance.is_resource and validate_global:
            validate_global_modification(instance)
        if log:
            current_app.logger.debug('Update {0}'.format(instance))
        db.session.add(instance)
        self._validate_unique_resource_id_per_tenant(instance)
        for attr in modified_attrs:
            flag_modified(instance, attr)
        self._safe_commit()
        return instance
示例#6
0
def update_email_log_state(log_entry, failed=False):
    if failed:
        log_entry.data['state'] = 'failed'
    else:
        log_entry.data['state'] = 'sent'
        log_entry.data['sent_dt'] = now_utc(False).isoformat()
    flag_modified(log_entry, 'data')
示例#7
0
    def update_data_association(self, event, vc_room, event_vc_room, data):
        super(DummyPlugin, self).update_data_association(event, vc_room, event_vc_room, data)
        event_vc_room.data.update({key: data.pop(key) for key in [
            'show_phone_numbers'
        ]})

        flag_modified(event_vc_room, 'data')
示例#8
0
    def update_model(self, model_ins, new_data, overwrite=False):
        '''Updates a SQLAlchemy model instance with a dict object.
        If a key's item is a list or dict the attribute will
        be marked as changed.

        :param models: SQLAlchemy instance
        :param new_data: dict
        :param overwrite: boolean
        '''
        try:
            for key in new_data:
                if not hasattr(model_ins, key):
                    continue
                if isinstance(new_data[key], dict) and not overwrite:
                    getattr(model_ins, key).update(new_data[key])
                    flag_modified(model_ins, key)
                elif isinstance(new_data[key], list) and not overwrite:
                    setattr(model_ins, key, 
                        list(set(
                            getattr(model_ins, key) + new_data[key]
                        ))
                    )
                    flag_modified(model_ins, key)
                else:
                    setattr(model_ins, key, new_data[key])
        except Exception as e:
            raise TypeError(
                'Update model failed for the following key: {} with error: {}'.format(
                    key, 
                    e.message,
                )
            )
示例#9
0
    def post(self):
        i = request.form
        archive_id = i.get('archive_id')
        name = i.get('name')
        desc = i.get('description')

        try:
            b = api.Book.get(archive_id=archive_id)
        except:
            b = api.Book(archive_id=archive_id)
            b.create()

        author_ids = i.get('aids')
        if author_ids:
            author_ids = [a.strip() for a in author_ids.split(',')]
            for aid in author_ids:
                b.authors.append(api.Author.get(aid))
        if name:
            b.name = name
        if desc:
            from sqlalchemy.orm.attributes import flag_modified
            b.data[u'description'] = desc
            flag_modified(b, 'data')
        b.save()
        return b.dict()
示例#10
0
 def meta(self, meta):
     if isinstance(meta, Metadata):
         self.content_hash = meta.content_hash
         self.foreign_id = meta.foreign_id
         meta = meta.data
     self._meta = meta
     flag_modified(self, '_meta')
示例#11
0
def add_image_size(context):
    images = FileCollection(context.session, type='image')

    for image in images.query():
        if not hasattr(image.reference, 'size'):

            # potentially dangerous and might not work with other storage
            # providers, so don't reuse unless you are sure about the
            # consequences
            image.reference._thaw()

            if image.reference.content_type == 'image/svg+xml':
                image.reference.size = get_svg_size_or_default(
                    image.reference.file
                )
            else:
                image.reference.size = get_image_size(
                    Image.open(image.reference.file)
                )

                thumbnail_metadata = copy(image.reference.thumbnail_small)
                thumbnail_metadata['size'] = get_image_size(
                    Image.open(
                        context.app.bound_depot.get(
                            image.get_thumbnail_id(size='small')
                        )
                    )
                )

                image.reference.thumbnail_small = thumbnail_metadata

            flag_modified(image, 'reference')
示例#12
0
	def gain_xp (self, xp):
		rules = self.game_session.rules

		sum_xp = self.xp + xp
		new_level = rules.get_level(sum_xp)
		if new_level is None:
			# TODO: log error
			return

		self.xp = sum_xp

		# Level up?
		level_diff = new_level - self.level
		for i in range(0, level_diff):
			level_info = rules.get_level_info(self.level + 1)
			if not level_info:
				break # TODO: WTF?
			self.level += 1

			# inc skills
			for cat_id, _formula in level_info.get('skills_categories_formulas', {}).iteritems():
				self.skill_points.setdefault(cat_id, 0)
				points_gained = formula.FormulaEvaluator({}, _formula, self).evaluate()
				self.skill_points[cat_id] += points_gained

			flag_modified(self, 'skill_points')

		self.add()
示例#13
0
    def proc(record):
        try:
            if 'authors' not in record.json:
                error('no authors for record %s' % record.json['control_number'])
                return

            for author_index, author_data in enumerate(record.json['authors']):
                if 'affiliations' not in author_data:
                    error('no affiliations for record %s' % record.json['control_number'])
                    continue

                for aff_index, aff_data in enumerate(author_data['affiliations']):
                    counts['all'] += 1

                    new_country = find_country(aff_data['value'])
                    if aff_data['country'] != new_country:
                        counts['changed'] += 1

                        info('Changed country for record with id %s from %s to %s' % (record.json['control_number'],
                                                                                      aff_data['country'], new_country))
                        record.json['authors'][author_index]['affiliations'][aff_index]['country'] = new_country

            if not dry_run:
                flag_modified(record, 'json')
        except Exception as e:
            error(str(e))
示例#14
0
    def commit(self):
        """Store changes on current instance in database.

        Procedure followed:

        #. The signal :data:`invenio_records.signals.before_record_insert` is
            called with the record as function parameter.

        #. The record data is validate.

        #. The record is committed to the database.

        #. The signal :data:`invenio_records.signals.after_record_insert` is
            called with the record as function parameter.

        :returns: The Record instance.
        """
        if self.model is None or self.model.json is None:
            raise MissingModelError()

        with db.session.begin_nested():
            before_record_update.send(self)

            self.validate()

            self.model.json = dict(self)
            flag_modified(self.model, 'json')

            db.session.merge(self.model)

        after_record_update.send(self)
        return self
示例#15
0
    def _update_metadata(self, **options):
        """ Updates the underlying metadata with the give values. This
        operats on low-level interfaces of Depot and assumes local storage.

        You should have a good reason for using this.

        """
        assert set(options.keys()).issubset({'content_type', 'filename'})

        if not hasattr(self.reference.file, '_metadata_path'):
            raise NotImplementedError(
                "The current depot storage backend does not support "
                "in-place metadata updates"
            )

        path = Path(self.reference.file._metadata_path)

        # store the pending metadata on the session to commit them later
        session = object_session(self)

        if 'pending_metadata_changes' not in session.info:
            session.info['pending_metadata_changes'] = []

        # only support upating existing values - do not create new ones
        for key, value in options.items():
            session.info['pending_metadata_changes'].append((path, key, value))

        # make sure we cause a commit here
        flag_modified(self, 'reference')
示例#16
0
    def config(self, request):
        if request.method == 'OPTIONS':
            request.app.fire('on_preflight', request, methods=('GET', 'PATCH'))
            return request.response

        user = self.model.get_instance(request).obj
        resource = 'applications:%s:config' % user.application_id.hex
        with self.model.session(request) as session:
            session.add(user)
            if request.method == 'GET':
                Resource(resource, 'read')(request)
                result = user.application.config or {}
            else:
                form_class = get_form_class(request, 'application-config')
                Resource(resource, 'update')(request)
                data, files = request.data_and_files()
                form = form_class(request, data=data, files=files)

                if form.is_valid(exclude_missing=True):
                    application = user.application
                    result = application.config
                    if result is None:
                        result = {}
                        application.config = result
                    session.add(application)
                    for key, value in form.cleaned_data.items():
                        if not value:
                            result.pop(key, None)
                        else:
                            result[key] = value
                    flag_modified(application, 'config')
                else:
                    result = form.tojson()
        return self.json_response(request, result)
示例#17
0
    def _update_uid_resync_status(self, uid=None, status=None):
        # Helper function to make it easier to update resync data.
        with session_scope(self.namespace_id) as db_session:
            account = db_session.query(Account).options(
                load_only('_sync_status')).get(self.account_id)

            folder_id = str(self.folder_id)

            if 's3_resync_status' not in account._sync_status:
                account._sync_status['s3_resync_status'] = {}

            s3_resync_status = account._sync_status.get('s3_resync_status')

            if folder_id not in s3_resync_status:
                s3_resync_status[folder_id] = {}

            if uid is not None:
                s3_resync_status[folder_id]['last_synced_uid'] = uid

            if status is not None:
                s3_resync_status[folder_id]['status'] = status

            # We need to do this because SQLAlchemy doesn't pick up updates
            # to the fields of a MutableDict.
            flag_modified(account, '_sync_status')

            db_session.commit()
def downgrade(pyramid_env):
    with context.begin_transaction():
        op.add_column(
            'agent_email_account',
            sa.Column("preferred", sa.SmallInteger,
                      default=False, server_default='0'))
    # Do stuff with the app's models here.
    from assembl import models as m
    db = m.get_session_maker()()

    with transaction.manager:
        # get from previous values
        db.execute("""UPDATE agent_email_account SET preferred=(
            SELECT abstract_agent_account.preferred
            FROM abstract_agent_account
            WHERE abstract_agent_account.id = agent_email_account.id
            AND abstract_agent_account."type" = 'agent_email_account')""")
        # Force update, transaction manager saw nothing
        aaa = db.query(m.Role).first()
        flag_modified(aaa, 'name')

    with context.begin_transaction():
        db.execute('ALTER TABLE agent_email_account ADD CHECK (preferred IN (0, 1))')
        op.drop_column(
            'abstract_agent_account', "preferred")
示例#19
0
 def proc(r):
     for k, v in dict(r.results).iteritems():
         new_k = COUNTRIES_DEFAULT_MAPPING.get(k, k)
         if k != new_k:
             info('%d: %s => %s' % (r.control_number, k, new_k))
             r.results[new_k] = v
             r.results.pop(k)
             flag_modified(r, 'results')
示例#20
0
文件: db.py 项目: digideskio/puffin
def update_model_with_json(model):
    # Needed for JSON fields, see https://bashelton.com/2014/03/updating-postgresql-json-fields-via-sqlalchemy/
    mapper = object_mapper(model)
    for column in mapper.columns.values():
        if isinstance(column.type, JSON):
            flag_modified(model, column.name)
    
    db.session.add(model)
示例#21
0
    def update_data_vc_room(self, vc_room, data):
        super(VidyoPlugin, self).update_data_vc_room(vc_room, data)

        for key in ['description', 'owner', 'room_pin', 'moderation_pin', 'auto_mute']:
            if key in data:
                vc_room.data[key] = data.pop(key)

        flag_modified(vc_room, 'data')
示例#22
0
 def run(self, failed_migration=None, verbose=None):
     """Run the upgrade."""
     if not self.loaded:
         self.load()
     alembic = current_app.extensions['invenio-db'].alembic
     migration = Migration(
         version = self.dst_version,
         data = dict(steps=[], error=None, status='start')
     )
     # save the migration state
     if db.engine.dialect.has_table(db.engine, 'b2share_migrations'):
         db.session.add(migration)
         db.session.commit()
     for step in self.steps:
         step_log = dict(
             name=step.run.__name__,
             status='start'
         )
         migration.data['steps'].append(step_log)
         try:
             alembic.migration_context.bind.close()
             if step.condition is None or step.condition(alembic,
                                                         failed_migration):
                 if verbose:
                     click.secho(step.run.__doc__, fg='green')
                 step.run(alembic, verbose)
                 step_log['status'] = 'success'
             else:
                 step_log['status'] = 'skip'
         except BaseException as e:
             db.session.rollback()
             migration.data['steps'].append(dict(
                 name=step.run.__name__,
                 status='error'
             ))
             migration.data['error'] = traceback.format_exc()
             migration.data['status'] = 'error'
             if not db.engine.dialect.has_table(db.engine,
                                                'b2share_migrations'):
                 click.secho(
                     'Failed to upgrade while running upgrade {0} -> {1}. '
                     'Step {2}.\nTraceback:\n{3}'.format(
                         self.src_version, self.dst_version,
                         step.run.__name__, traceback.format_exc())
                 )
             raise e
         finally:
             # save the migration state
             if db.engine.dialect.has_table(db.engine,
                                            'b2share_migrations'):
                 flag_modified(migration, 'data')
                 db.session.add(migration)
                 db.session.commit()
     # mark the migration as successful and save it
     migration.data['status'] = 'success'
     db.session.add(migration)
     flag_modified(migration, 'data')
     db.session.commit()
示例#23
0
    def update_room(self, vc_room, event):
        client = AdminClient(self.settings)

        try:
            room_obj = self.get_room(vc_room)
        except RoomNotFoundAPIException:
            raise VCRoomNotFoundError(_("This room has been deleted from Vidyo"))

        owner = retrieve_principal(vc_room.data['owner'], allow_groups=False, legacy=False)
        changed_owner = room_obj.ownerName not in iter_user_identities(owner)
        if changed_owner:
            login_gen = iter_user_identities(owner)
            login = next(login_gen, None)
            if login is None:
                raise VCRoomError(_("No valid Vidyo account found for this user"), field='owner')
            room_obj.ownerName = login

        room_obj.name = vc_room.name
        room_obj.description = vc_room.data['description']

        room_obj.RoomMode.hasPIN = vc_room.data['room_pin'] != ""
        room_obj.RoomMode.hasModeratorPIN = vc_room.data['moderation_pin'] != ""

        if room_obj.RoomMode.hasPIN:
            room_obj.RoomMode.roomPIN = vc_room.data['room_pin']
        if room_obj.RoomMode.hasModeratorPIN:
            room_obj.RoomMode.moderatorPIN = vc_room.data['moderation_pin']

        vidyo_id = vc_room.data['vidyo_id']
        while True:
            try:
                client.update_room(vidyo_id, room_obj)
            except RoomNotFoundAPIException:
                raise VCRoomNotFoundError(_("This room has been deleted from Vidyo"))
            except APIException as err:
                err_msg = err.message
                if err_msg.startswith('Room exist for name'):
                    raise VCRoomError(_("Room name already in use"), field='name')

                elif err_msg.startswith('Member not found for ownerName'):
                    if changed_owner:
                        login = next(login_gen, None)
                    if not changed_owner or login is None:
                        raise VCRoomError(_("No valid Vidyo account found for this user"), field='owner')
                    room_obj.ownerName = login

                else:
                    raise
            else:
                updated_room_obj = self.get_room(vc_room)

                update_room_from_obj(self.settings, vc_room, updated_room_obj)
                flag_modified(vc_room, 'data')

                client.set_automute(vidyo_id, vc_room.data['auto_mute'])
                break
示例#24
0
def process_event(self, event_id):
    """Process event in Celery."""
    with db.session.begin_nested():
        event = Event.query.get(event_id)
        event._celery_task = self  # internal binding to a Celery task
        event.receiver.run(event)  # call run directly to avoid circular calls
        flag_modified(event, 'response')
        flag_modified(event, 'response_headers')
        db.session.add(event)
    db.session.commit()
示例#25
0
 def save(self, status=None):
     """Save object to persistent storage."""
     with db.session.begin_nested():
         self.modified = datetime.now()
         if status is not None:
             self.status = status
         if self.extra_data is None:
             self.extra_data = dict()
         flag_modified(self, 'extra_data')
         db.session.merge(self)
示例#26
0
    def refresh_room(self, vc_room, event):
        client = AdminClient(self.settings)
        try:
            room_obj = self.get_room(vc_room)
        except RoomNotFoundAPIException:
            raise VCRoomNotFoundError(_("This room has been deleted from Vidyo"))

        update_room_from_obj(self.settings, vc_room, room_obj)
        vc_room.data['auto_mute'] = client.get_automute(room_obj.roomID)
        flag_modified(vc_room, 'data')
示例#27
0
def handle_note_update(self, request):
    request.assert_valid_csrf_token()
    self.note = request.POST.get('note')

    # when updating the alt text we offer the option not to update the
    # modified date, which is helpful if the files are in modified order
    # and the order should remain when the note is changed
    if request.POST.get('keep-timestamp') in ('1', 'true', 'yes'):
        self.modified = self.modified
        flag_modified(self, 'modified')
示例#28
0
    def proc(record):
        if not record.json:
            rerror('no json.', record)
            return

        if 'record_creation_year' not in record.json:
            date = parse_date(record.json['record_creation_date'])
            if not date:
                rerror("Date couldn't be parsed: %s" % record.json['record_creation_date'], record)

            record.json['record_creation_year'] = date.year
            flag_modified(record, 'json')
示例#29
0
    def proc_delete(record):
        to_delete = []
        for i, a in enumerate(record.json['authors']):
            s = sum(map(bool, a.values()))
            if s == 0:
                to_delete.append(i)

        if to_delete:
            for d in to_delete:
                del record.json['authors'][d]
            flag_modified(record, 'json')
        info('DELETE %d authors' % len(to_delete))
示例#30
0
    def update_every_login_social_data(self, user:IUserModel, data:dict):
        """Update internal user data on every login.

        Bt default, sets user.user_data["social"]["facebook"] or user.user_data["social"]["yoursocialnetwork"] to reflect the raw data given us by ``import_social_media_user()``.
        """

        # Non-destructive update - don't remove values which might not be present in the new data
        user.user_data["social"][self.provider_id] = user.user_data["social"].get(self.provider_id) or {}
        user.user_data["social"][self.provider_id].update(data)

        # Because we are doing direct
        flag_modified(user, "user_data")
示例#31
0
def api_ci_commit(commit_id=None):
  if request.method == 'POST':
    hexsha = request.json.get('commit_sha', request.json['git_commit_sha']) if not commit_id else commit_id
    try:
      commit = CiCommit.get_or_create(
        session=db_session,
        hexsha=hexsha,
        project_id=request.json['project'],
        data=request.json,
      )
    except:
      return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404
    if not commit.data:
      commit.data = {}
    # Clients can store any metadata with each commit.
    # We've been using it to store code quality metrics per subproject in our monorepo,
    # Then we use other tools (e.g. metabase) to create dashboards.
    commit_data = request.json.get('data', {})
    commit.data = {**commit.data, **commit_data}
    flag_modified(commit, "data")
    if commit.deleted:
      commit.deleted = False
    db_session.add(commit)
    db_session.commit()
    return jsonify({"status": "OK"})


  project_id = request.args['project']
  if not commit_id:
    commit_id = request.args.get('commit', None)
    try:
      project = Project.query.filter(Project.id==project_id).one()
      default_branch = project.data['qatools_config']['project']['reference_branch']
    except:
      default_branch = 'master'
    branch = request.args.get('branch', default_branch)
    ci_commit = latest_successful_commit(db_session, project_id=project_id, branch=branch, batch_label=request.args.get('batch'))
    if not ci_commit:
      return jsonify({'error': f'Sorry, we cant find any commit with results for the {project_id} on {branch}.'}), 404
  else:
    try:
      ci_commit = (db_session
                   .query(CiCommit)
                   .options(
                     joinedload(CiCommit.batches).
                     joinedload(Batch.outputs)
                    )
                   .filter(
                     CiCommit.project_id==project_id,
                     CiCommit.hexsha.startswith(commit_id),
                   )
                   .one()
                  )
    except NoResultFound:
      try:
        # TODO: This is a valid use case for having read-rights to the repo,
        #       we can identify a commit by the tag/branch
        #       To replace this without read rights, we should listen for push events and build a database
        project = Project.query.filter(Project.id==project_id).one()
        commit = project.repo.tags[commit_id].commit
        try:
          commit = project.repo.commit(commit_id)
        except:
          try:
            commit = project.repo.refs[commit_id].commit
          except:
            commit = project.repo.tags[commit_id].commit
        ci_commit = CiCommit(commit, project=project)
        db_session.add(ci_commit)
        db_session.commit()
      except:
        return jsonify({'error': f'Sorry, we could not find any data on commit {commit_id} in project {project_id}.'}), 404
    except BadName:
      return jsonify({f'error': f'Sorry, we could not understand the commid ID {commit_id} for project {project_id}.'}), 404
    except Exception as e:
      raise(e)
      return jsonify({'error': 'Sorry, the request failed.'}), 500

  batch = request.args.get('batch', None)
  with_batches = [batch] if batch else None # by default we show all batches
  with_aggregation = json.loads(request.args.get('metrics', '{}'))
  response = make_response(ujson.dumps(ci_commit.to_dict(with_aggregation, with_batches=with_batches, with_outputs=True)))
  response.headers['Content-Type'] = 'application/json'
  return response
示例#32
0
    def update_room(self, vc_room, event):
        client = AdminClient(self.settings)

        try:
            room_obj = self.get_room(vc_room)
        except RoomNotFoundAPIException:
            raise VCRoomNotFoundError(
                _("This room has been deleted from Vidyo"))

        owner = retrieve_principal(vc_room.data['owner'])
        changed_owner = room_obj.ownerName not in iter_user_identities(owner)
        if changed_owner:
            login_gen = iter_user_identities(owner)
            login = next(login_gen, None)
            if login is None:
                raise VCRoomError(
                    _("No valid Vidyo account found for this user"),
                    field='owner_user')
            room_obj.ownerName = login

        room_obj.name = vc_room.name
        room_obj.description = vc_room.data['description']

        room_obj.RoomMode.hasPIN = bool(vc_room.data['room_pin'])
        room_obj.RoomMode.hasModeratorPIN = bool(
            vc_room.data['moderation_pin'])

        if room_obj.RoomMode.hasPIN:
            room_obj.RoomMode.roomPIN = vc_room.data['room_pin']
        if room_obj.RoomMode.hasModeratorPIN:
            room_obj.RoomMode.moderatorPIN = vc_room.data['moderation_pin']

        vidyo_id = vc_room.data['vidyo_id']
        while True:
            try:
                client.update_room(vidyo_id, room_obj)
            except RoomNotFoundAPIException:
                raise VCRoomNotFoundError(
                    _("This room has been deleted from Vidyo"))
            except APIException as err:
                err_msg = err.message
                if err_msg.startswith('Room exist for name'):
                    raise VCRoomError(_("Room name already in use"),
                                      field='name')

                elif err_msg.startswith('Member not found for ownerName'):
                    if changed_owner:
                        login = next(login_gen, None)
                    if not changed_owner or login is None:
                        raise VCRoomError(
                            _("No valid Vidyo account found for this user"),
                            field='owner_user')
                    room_obj.ownerName = login

                else:
                    raise
            else:
                updated_room_obj = self.get_room(vc_room)

                update_room_from_obj(self.settings, vc_room, updated_room_obj)
                flag_modified(vc_room, 'data')

                client.set_automute(vidyo_id, vc_room.data['auto_mute'])
                break
示例#33
0
 def add_to_pit(self, pit_num):
     self.pits[pit_num] += 1
     flag_modified(self, 'pits')
示例#34
0
def update_batch():
    data = request.get_json()
    try:
        ci_commit = CiCommit.get_or_create(
            session=db_session,
            hexsha=request.json['git_commit_sha'],
            project_id=request.json['project'],
            data=data,
        )
    except:
        return f"404 ERROR:\n ({request.json['project']}): There is an issue with your commit id ({request.json['git_commit_sha']})", 404

    batch = ci_commit.get_or_create_batch(data['batch_label'])
    # Clients can store any metadata in each batch.
    # Currently it's used by `qa optimize` to store info on iterations
    if not batch.data:
        batch.data = {}
    batch_data = request.json.get('data', {})
    # And each batch can have changes vs its commit's config and metrics.
    # The use case is usually working locally with `qa --share` and
    # seeing updated visualizations and metrics.
    if "qaboard_config" in data and data["qaboard_config"] != ci_commit.data[
            "qatools_config"]:
        batch.data["config"] = data["qaboard_config"]
    if "qaboard_metrics" in data and data["qaboard_metrics"] != ci_commit.data[
            "qatools_metrics"]:
        batch.data["qatools_config"] = data["qaboard_metrics"]
    batch.data = {**batch.data, **batch_data}

    # Save info on each "qa batch" command in the batch, mainly to list them in logs
    command = request.json.get('command')
    if command:
        batch.data["commands"] = {**batch.data.get('commands', {}), **command}
        flag_modified(batch, "data")

    # It's a `qa optimzize` experiment
    if batch_data.get('optimization'):
        if 'best_iter' in batch_data:
            # we will save the outputs from the best iteration in the batch,
            # so first we need to remove any previous best results
            for o in batch.outputs:
                if o.output_type != 'optim_iteration':
                    o.delete(soft=False)
            db_session.add(batch)
            db_session.commit()
            # Move results from the best iteration in this batch
            batch_batch_label = batch_data['last_iteration_label']
            best_batch = ci_commit.get_or_create_batch(batch_batch_label)
            for o in best_batch.outputs:
                o.output_dir_override = str(o.output_dir)
                o.batch = batch
                db_session.add(o)
            db_session.commit()

            # Deleting old iterations
            for b in ci_commit.batches:
                if b.label.startswith(
                        f"{data['batch_label']}|iter"
                ) and b.label != batch_data['last_iteration_label']:
                    print(f'Deleting {b.label}')
                    if b.label != batch_data['last_iteration_label']:
                        b.delete(db_session)

    db_session.add(batch)
    db_session.commit()
    return jsonify({"status": "OK", "id": batch.id})
示例#35
0
    def run(self, event):
        """Run AVC workflow for video transcoding.

        Steps:
          * Download the video file (if not done yet).
          * Extract metadata from the video.
          * Run video transcoding.
          * Extract frames from the video.

        Mandatory fields in the payload:
          * uri, if the video needs to be downloaded.
          * bucket_id, only if URI is provided.
          * key, only if URI is provided.
          * version_id, if the video has been downloaded via HTTP (the previous
            fields are not needed in this case).
          * deposit_id

        Optional:
          * sse_channel, if set all the tasks will publish their status update
            to it.
          * video_presets, if not set the default presets will be used.
          * frames_start, if not set the default value will be used.
          * frames_end, if not set the default value will be used.
          * frames_gap, if not set the default value will be used.

        For more info see the tasks used in the workflow:
          * :func: `~cds.modules.webhooks.tasks.download_to_object_version`
          * :func: `~cds.modules.webhooks.tasks.video_metadata_extraction`
          * :func: `~cds.modules.webhooks.tasks.video_extract_frames`
          * :func: `~cds.modules.webhooks.tasks.video_transcode`
        """
        assert ('uri' in event.payload and 'bucket_id' in event.payload
                and 'key' in event.payload) or ('version_id' in event.payload)
        assert 'deposit_id' in event.payload

        event_id = str(event.id)

        with db.session.begin_nested():
            if 'version_id' in event.payload:
                object_version = as_object_version(event.payload['version_id'])
                first_step = video_metadata_extraction.si(
                    uri=object_version.file.uri,
                    object_version=str(object_version.version_id),
                    deposit_id=event.payload['deposit_id'])
            else:
                object_version = ObjectVersion.create(
                    bucket=event.payload['bucket_id'],
                    key=event.payload['key'])
                ObjectVersionTag.create(object_version, 'uri_origin',
                                        event.payload['uri'])
                first_step = group(
                    download_to_object_version.si(
                        #  event.payload['uri'],
                        object_version=str(object_version.version_id),
                        event_id=event_id,
                        **event.payload),
                    video_metadata_extraction.si(
                        #  event.payload['uri'],
                        object_version=str(object_version.version_id),
                        event_id=event_id,
                        **event.payload),
                )

            ObjectVersionTag.create(object_version, '_event_id', event_id)

        mypayload = event.payload
        obj_id = str(object_version.version_id)
        obj_key = object_version.key
        obj_tags = object_version.get_tags()
        db.session.expunge(event)
        db.session.commit()

        result = chain(
            first_step,
            group(
                video_transcode.si(object_version=obj_id,
                                   event_id=event_id,
                                   **mypayload),
                video_extract_frames.si(object_version=str(obj_id),
                                        event_id=event_id,
                                        **mypayload),
            ),
        ).apply_async()

        with db.session.begin_nested():
            self._serialize_result(event=event, result=result)

            event.response.update(
                links=dict(),
                key=object_version.key,
                version_id=obj_id,
                tags=obj_tags,
            )
            flag_modified(event, 'response')
            flag_modified(event, 'response_headers')
            db.session.add(event)
        db.session.commit()
示例#36
0
    def get_or_create(session, hexsha, project_id, data=None):
        try:
            ci_commit = (session.query(CiCommit).filter(
                CiCommit.project_id == project_id,
                CiCommit.hexsha.startswith(hexsha),
            ).one())
        except NoResultFound:
            # FIXME: if not cimplete hash. fallback git... ? or raise error? (then docs: ask full 32 hash..) or symetrics if hexsha.strtswith(query)
            try:
                from backend.models import Project
                project = Project.get_or_create(session=session, id=project_id)
                if data and data.get('qaboard_config'):
                    is_initialization = not project.data or 'qatools_config' not in data
                    reference_branch = data["qaboard_config"]['project'].get(
                        'reference_branch', 'master')
                    is_reference = data.get(
                        "commit_branch") == reference_branch
                    if is_initialization or is_reference:
                        # FIXME: We put in Project.data.git the content of
                        #       https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#push-events
                        # FIXME: We should really have Project.data.gitlab/github/...
                        if "git" not in project.data:
                            project.data["git"] = {}
                        if "path_with_namespace" not in project.data[
                                "git"] and "name" in data["qaboard_config"].get(
                                    "project", {}
                                ):  # FIXME: it really should be Project.root
                            # FIXME: Doesn't support updates for now... again should have .id: int, name: str, root: str...
                            project.data["git"]["path_with_namespace"] = data[
                                "qaboard_config"]["project"]["name"]
                        project.data.update(
                            {'qatools_config': data['qaboard_config']})
                        if "qaboard_metrics" in data:
                            project.data.update(
                                {'qatools_metrics': data["qaboard_metrics"]})
                        flag_modified(project, "data")
                else:
                    # For backward-compatibility we fallback to reading the data from the commit itself
                    # But in regular use QA-Board doesn't require read rights on repositories
                    try:
                        git_commit = project.repo.commit(hexsha)
                    except Exception as e:
                        error = f'[ERROR] Could not find information on commit {hexsha}. {e}'
                        print(error)
                        raise ValueError(error)

                ci_commit = CiCommit(
                    hexsha,
                    project=project,
                    commit_type='git',  # we don't use anything else
                    parents=data["commit_parents"] if
                    (data and "commit_parents" in data) else
                    [c.hexsha for c in git_commit.parents],
                    message=data["commit_message"] if
                    (data
                     and "commit_message" in data) else git_commit.message,
                    committer_name=data["commit_committer_name"] if
                    (data and "commit_committer_name" in data) else
                    git_commit.committer.name,
                    authored_datetime=data["commit_authored_datetime"] if
                    (data and "commit_authored_datetime" in data) else
                    git_commit.authored_datetime,
                    # commits belong to many branches, so this is a guess
                    branch=data["commit_branch"] if
                    (data and "commit_branch" in data) else find_branch(
                        hexsha, project.repo),
                )
                if data and data.get('qaboard_config'):
                    ci_commit.data.update(
                        {'qatools_config': data['qaboard_config']})
                    if "qaboard_metrics" in data:
                        ci_commit.data.update(
                            {'qatools_metrics': data['qaboard_metrics']})
                    flag_modified(ci_commit, "data")
                session.add(ci_commit)
                session.commit()
            except ValueError:
                error = f'[ERROR] ValueError: could not create a commit for {hexsha}'
                print(error)
                raise ValueError(error)
        if not ci_commit.data:
            ci_commit.data = {}
        return ci_commit
示例#37
0
 def save_data(cls, state: BaseState):
     flag_modified(state.user, 'external')
示例#38
0
 def update_vulnerability(self, vulnerability):
     self.session.add(vulnerability)
     flag_modified(vulnerability, "custom_fields")
     self.session.commit()
     return self._get_create_command_object(vulnerability, 'vulnerability')
示例#39
0
 def del_value(cls, key, only_children=False, subspecifier_value=None):
     setting = sa.get_scoped_session().query(DbSetting).filter(key=key)
     setting.val = None
     setting.time = timezone.datetime.utcnow()
     flag_modified(setting, "val")
     setting.save()
示例#40
0
def update_jsonb_row(stowed):
    """Jump through some hoops to commit changes to a JSONB column."""
    flag_modified(stowed, 'json')
    db.session.merge(stowed)
    std_commit()
示例#41
0
def new_output_webhook():
    """Updates the database when we get new results."""
    data = request.get_json()
    hexsha = data.get('commit_sha', data['git_commit_sha'])
    # We get a handle on the Commit object related to our new output
    try:
        ci_commit = CiCommit.get_or_create(
            session=db_session,
            hexsha=hexsha,
            project_id=data['project'],
            data=data,
        )
    except Exception as e:
        return jsonify({
            "error":
            f"Could not find your commit ({data['git_commit_sha']}). {e}"
        }), 404

    ci_commit.project.latest_output_datetime = datetime.datetime.utcnow()
    ci_commit.latest_output_datetime = datetime.datetime.utcnow()

    # We make sure the Test on which we ran exists in the database
    test_input_path = data.get('input_path')
    if not test_input_path:
        return jsonify({"error": "the input path was not provided"}, 400)
    test_input = TestInput.get_or_create(
        db_session,
        path=test_input_path,
        database=data.get('database', ci_commit.project.database),
    )

    # We save the basic information about our result
    batch = ci_commit.get_or_create_batch(data['batch_label'])
    if not batch.data:
        batch.data = {}
    batch.data.update({"type": data['job_type']})
    if data.get('input_metadata'):
        test_input.data['metadata'] = data['input_metadata']
        flag_modified(test_input, "data")

    platform = data['platform']
    # if platform == 'lsf':
    #   platform = 'linux'
    # elif platform == 'windows':
    #   platform = 'win32'

    configurations = deserialize_config(
        data['configuration']
    ) if 'configuration' in data else data['configurations']
    output = Output.get_or_create(
        db_session,
        batch=batch,
        platform=platform,
        configurations=configurations,
        extra_parameters=data['extra_parameters'],
        test_input=test_input,
    )
    output.output_type = data.get('input_type', '')

    output.data = data.get('data', {})
    # we can only trust CI outputs to run on the exact code from the commit
    output.data["ci"] = data['job_type'] == 'ci'
    if output.deleted:
        output.deleted = False

    # We allow users to save their data in custom locations
    # at the commit and output levels
    if Path(data.get('commit_ci_dir', ci_commit.commit_dir)).resolve() != Path(
            ci_commit.commit_dir):
        ci_commit.commit_dir_override = data.get('commit_ci_dir')
    if Path(data.get('output_directory',
                     output.output_dir)) != output.output_dir:
        output.output_dir_override = data.get('output_directory')

    # We update the output's status
    output.is_running = data.get('is_running', False)
    if output.is_running:
        output.is_pending = True
    else:
        output.is_pending = data.get('is_pending', False)

    # We save the output's metrics
    if not output.is_pending:
        metrics = data.get('metrics', {})
        output.metrics = metrics
        output.is_failed = data.get('is_failed',
                                    False) or metrics.get('is_failed')

    db_session.add(output)
    db_session.commit()
    return jsonify(output.to_dict())
示例#42
0
    def run(self, event):
        """Create object version and send celery task to download.

        Mandatory fields in the payload:
          * uri, location to download the view.
          * bucket_id
          * key, file name.
          * deposit_id

        Optional:
          * sse_channel, if set all the tasks will publish their status update
            to it.

        For more info see the task
        :func: `~cds.modules.webhooks.tasks.download_to_object_version` this
        receiver is using.
        """
        assert 'bucket_id' in event.payload
        assert 'uri' in event.payload
        assert 'key' in event.payload
        assert 'deposit_id' in event.payload

        event_id = str(event.id)

        with db.session.begin_nested():
            object_version = ObjectVersion.create(
                bucket=event.payload['bucket_id'], key=event.payload['key'])

            ObjectVersionTag.create(object_version, 'uri_origin',
                                    event.payload['uri'])
            ObjectVersionTag.create(object_version, '_event_id', event_id)
            db.session.expunge(event)
        db.session.commit()

        task = download_to_object_version.s(
            #event.payload['uri'],
            object_version=str(object_version.version_id),
            event_id=event_id,
            **event.payload)

        self._serialize_result(event=event, result=task.apply_async())

        with db.session.begin_nested():
            object_version = as_object_version(object_version.version_id)
            event.response.update(
                links={
                    'self':
                    url_for(
                        'invenio_files_rest.object_api',
                        bucket_id=str(object_version.bucket_id),
                        key=object_version.key,
                        _external=True,
                    ),
                    'version':
                    url_for(
                        'invenio_files_rest.object_api',
                        bucket_id=str(object_version.bucket_id),
                        key=object_version.key,
                        versionId=str(object_version.version_id),
                        _external=True,
                    ),
                    'cancel':
                    url_for(
                        'invenio_webhooks.event_list',
                        receiver_id='downloader',
                        _external=True,
                    )
                },
                key=object_version.key,
                version_id=str(object_version.version_id),
                tags=object_version.get_tags(),
            )
            flag_modified(event, 'response')
            flag_modified(event, 'response_headers')
            db.session.add(event)
        db.session.commit()
示例#43
0
 def remove_from_pit(self, pit_num):
     in_pit = self.pits[pit_num]
     self.pits[pit_num] = 0
     flag_modified(self, 'pits')
     return in_pit
示例#44
0
    def post_track_status(self, allow_check=True):
        allow_post = True
        if allow_check:
            last_post_date = self.last_post_date
            if last_post_date:
                today = date.today()
                previous_date = today - timedelta(days=7)

                if previous_date != self.last_post_date:
                    allow_post = False

        if allow_post:
            top_tracks = self.get_top_tracks()

            posted_tracks_ids = self.posted_tracks
            if not posted_tracks_ids:
                posted_tracks_ids = []
                track = top_tracks[0]
            else:
                # posted_tracks_ids = json.loads(posted_tracks_ids)
                min_found = 5
                for track_item in top_tracks:
                    if track_item['id'] in posted_tracks_ids:
                        found = posted_tracks_ids.index(track_item['id'])

                        if found < min_found:
                            min_found = found
                            track = track_item
                    else:
                        track = track_item
                        break

            twitter_profile = self.twitter_profile

            credentials = TwitterProfile.get_credentials()
            api = twitter.Api(consumer_key=credentials[0],
                              consumer_secret=credentials[1],
                              access_token_key=twitter_profile.token,
                              access_token_secret=twitter_profile.token_secret)

            domain_url = settings.domain_url
            tweet = (
                f"{track['name']} - {track['artist']}: {track['url']}\n"
                "This is one of my most listened songs on Spotify the last few weeks.\n\n"
                f"Check yours at: {domain_url}")

            try:
                status = api.PostUpdate(tweet)
            except Exception as e:
                print(f'Unable to post status on user {self.id}. Error: {e}')
            else:
                if status:
                    status_url = f'https://twitter.com/{twitter_profile.username}/status/{status.id}'
                    self.last_post_date = date.today()

                    if len(posted_tracks_ids) == 4:
                        posted_tracks_ids.pop(0)
                    posted_tracks_ids.append(track['id'])
                    self.posted_tracks = posted_tracks_ids

                    flag_modified(self, 'posted_tracks')

                    db.session.commit()

                    return status_url

        return None
示例#45
0
 def _merge_users(self, target, source, **kwargs):
     super(VidyoPlugin, self)._merge_users(target, source, **kwargs)
     for ext in VidyoExtension.find(owned_by_user=source):
         ext.owned_by_user = target
         flag_modified(ext.vc_room, 'data')
示例#46
0
 def update_meta(self):
     flag_modified(self, 'meta')
示例#47
0
文件: user.py 项目: MrBean355/pajbot
 def login(self, new_login: str) -> None:
     self._login = new_login
     # force SQLAlchemy to update the value in the database even if the value did not change
     # see above comment for details on why this is implemented this way
     flag_modified(self, "_login")
示例#48
0
def signup():
    if ("name" not in request.json or "phoneNumber" not in request.json
            or "cards" not in request.json or "fcmToken" not in request.json):
        print('ERROR: One of the required fields is missing')
        return "", constants.STATUS_BAD_REQUEST

    if ("password" not in request.json and "phoneAuth" not in request.json):
        print('ERROR: No password or phone verification found.')
        return "", constants.STATUS_BAD_REQUEST

    name = request.json["name"]
    phoneNumber = request.json["phoneNumber"]
    inviteCodeUsed = request.json["inviteCode"]
    fcmToken = ""
    profileImgUrl = "" if "profileImgUrl" not in request.json else request.json[
        "profileImgUrl"]
    fcmToken = "" if "fcmToken" not in request.json else request.json[
        "fcmToken"]
    created_time = utils.getDateTimeAsString(
        datetime.datetime.now(tz=pytz.timezone(constants.TIMEZONE_KOLKATA)))

    # Cards
    selectedCards = json.loads(request.json["cards"])
    cards = []
    for selectedCard in selectedCards:
        cards.append(selectedCard["id"])

    strPhone = str(phoneNumber)
    print('Adding user: '******'ERROR: User with phone number already exists')
        db.session.close()
        return "", constants.STATUS_CONFLICT_ERROR

    newUser = User(name=name,
                   fcmToken=fcmToken,
                   upiID="",
                   phoneNumber=phoneNumber,
                   inviteCodeUsed=inviteCodeUsed,
                   cards=cards,
                   suspended=False,
                   joined=created_time,
                   idCode=idCode,
                   profileImgUrl=profileImgUrl)

    # SQLAlchemy not aware of ARRAY changes unless flag_modified is called
    flag_modified(newUser, 'cards')
    db.session.add(newUser)

    try:
        db.session.flush()
        for selectedCard in selectedCards:
            theCard = Card.query.get(selectedCard["id"])
            if theCard is not None:
                if theCard.users is None:
                    theCard.users = []
                theCard.users.append(newUser.id)
                flag_modified(theCard, 'users')
        db.session.commit()
        newUserId = newUser.id
        newUserPhone = newUser.phoneNumber
        access_token = newUser.generate_auth_token(
            expiration=constants.TOKEN_EXPIRATION,
            key=os.environ['SECRET_KEY']).decode('ascii')
    except exc.IntegrityError as ex:
        db.session.rollback()
        print('ERROR: For User: '******'. Exception while committing to database: ' + str(ex))
        return "", constants.STATUS_CONFLICT_ERROR
    except Exception as err:
        db.session.rollback()
        print('ERROR: For User: '******'. Exception while committing to database: ' + str(err))
        return "", constants.STATUS_SERVER_ERROR
    finally:
        db.session.close()

    return jsonify({
        'access_token': access_token,
        'id': newUserId,
        "phoneNumber": newUserPhone
    }), constants.STATUS_OK
示例#49
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import db
import os
from sqlalchemy.orm.attributes import flag_modified

quantidade = 0
# folha_nao_digitalizada = '/var/data/nfs/provas/SGA/folha-nao-digitalizada.png'

for att in db.session.query(db.Attachments).all():
    if att.sheets_data is not None:
        modificado = False
        for pageInfo in att.sheets_data:
            if pageInfo['path'].startswith(
                    '/var/data/nfs/provas//home/provas/dados/SGA/2019b3/provas/'
            ):
                arquivo = pageInfo['path'].replace('/home/provas/dados/', '')
                print(pageInfo['path'], '--->', arquivo)
                pageInfo['path'] = arquivo
                modificado = True

        if modificado:
            flag_modified(att, "sheets_data")
            db.session.commit()
            print('Atualizado')
示例#50
0
def send_notification_emails(sqla):
    __banned_users_to_check = sqla.session.query(
        sqlm.User).filter_by(banned=True).all()
    for _u in __banned_users_to_check:
        notifications = sqla.session.query(sqlm.Notification) \
            .filter_by(seen=False, acknowledged=False, emailed=False) \
            .filter_by(user=_u) \
            .order_by(sqla.desc(sqlm.Notification.created)).all()
        for n in notifications:
            n.emailed = True
            sqla.session.add(n)
            sqla.session.commit()

    __muted_users_to_check = sqla.session.query(
        sqlm.User).filter_by(emails_muted=True).all()
    for _u in __muted_users_to_check:
        notifications = sqla.session.query(sqlm.Notification) \
            .filter_by(seen=False, acknowledged=False, emailed=False) \
            .filter_by(user=_u) \
            .order_by(sqla.desc(sqlm.Notification.created)).all()
        for n in notifications:
            n.emailed = True
            sqla.session.add(n)
            sqla.session.commit()

    _users_to_check = sqla.session.query(sqlm.User).filter_by(
        banned=False, validated=True).all()

    notification_formats = {}
    notification_full_names = {}
    for t in sqlm.Notification.NOTIFICATION_CATEGORIES:
        notification_formats[t[0]] = t[3]
        notification_full_names[t[0]] = (t[4], t[5])

    for u in _users_to_check:
        if u.banned:
            continue

        if u.minimum_time_between_emails == None:
            u.minimum_time_between_emails = 360

        notifications = sqla.session.query(sqlm.Notification) \
            .filter_by(seen=False, acknowledged=False, emailed=False) \
            .filter_by(user=u) \
            .order_by(sqla.desc(sqlm.Notification.created)).all()
        notifications_count = len(notifications)

        try:
            if u.last_sent_notification_email > arrow.utcnow().replace(
                    minutes=-u.minimum_time_between_emails).datetime.replace(
                        tzinfo=None):
                continue
        except:
            pass

        _list = []
        _list_k = {}
        _list_url = {}

        _details = []
        _details_k = {}

        _summaries = []
        _summaries_k = {}
        _total = 0

        for n in notifications:
            if u.notification_preferences is None:
                u.notification_preferences = {}
                flag_modified(u, "notification_preferences")
                sqla.session.add(u)
                sqla.session.commit()

            if not u.notification_preferences.get(n.category, {
                    "email": True
            }).get("email"):
                n.emailed = True
                sqla.session.add(n)
                sqla.session.commit()
                continue
            else:
                _total += 1

            if notification_formats[n.category] == "summarized":
                if n.category not in _summaries_k:
                    _summaries_k[n.category] = 1
                    _summaries.append(n.category)
                else:
                    _summaries_k[n.category] += 1

            if notification_formats[n.category] == "listed":
                if n.category not in _summaries_k:
                    _summaries_k[n.category] = 1
                    _summaries.append(n.category)
                else:
                    _summaries_k[n.category] += 1

                if _base_url + n.url in _list_url:
                    _list_url[_base_url + n.url] += 1
                    continue
                else:
                    _list_url[_base_url + n.url] = 1

                if n.category not in _list_k:
                    _list_k[n.category] = [{
                        "message":
                        n.author.display_name + " " + n.message,
                        "url":
                        _base_url + n.url
                    }]
                    _list.append(n.category)
                else:
                    _list_k[n.category].append({
                        "message":
                        n.author.display_name + " " + n.message,
                        "url":
                        _base_url + n.url
                    })

            if notification_formats[n.category] == "detailed":
                if n.category not in _summaries_k:
                    _summaries_k[n.category] = 1
                    _summaries.append(n.category)
                else:
                    _summaries_k[n.category] += 1

                if n.category not in _details_k:
                    _details_k[n.category] = [{
                        "url":
                        _base_url + n.url,
                        "message":
                        n.author.display_name + " " + n.message,
                        "description":
                        get_preview_for_email(n.snippet)
                    }]
                    _details.append(n.category)
                else:
                    _details_k[n.category].append({
                        "url":
                        _base_url + n.url,
                        "message":
                        n.author.display_name + " " + n.message,
                        "description":
                        get_preview_for_email(n.snippet)
                    })

        if not u.emails_muted:
            _to_email_address = False
            if _debug:
                if not u.is_admin and not u.is_allowed_during_construction:
                    continue
                else:
                    _to_email_address = u.email_address
            else:
                _to_email_address = u.email_address

            if len(_list) == 0 and len(_details) == 0 and len(_summaries) == 0:
                continue

            _template = get_template_lookup().get_template("notification")
            _rendered = _template.render(
                _user=u,
                _base=_base_url,
                _list=_list,
                _list_k=_list_k,
                _list_url=_list_url,
                _details=_details,
                _details_k=_details_k,
                _summaries=_summaries,
                _summaries_k=_summaries_k,
                _notification_names=notification_full_names)

            u.last_sent_notification_email = arrow.utcnow().datetime.replace(
                tzinfo=None)
            sqla.session.add(u)
            sqla.session.commit()

            notifications_update = sqla.session.query(sqlm.Notification) \
                .filter_by(seen=False, acknowledged=False, emailed=False) \
                .filter_by(user=u) \
                .all()
            for n in notifications_update:
                n.emailed = True
                sqla.session.add(n)
            sqla.session.commit()

            if _total == 1:
                subject = "You have a notification at %s" % (
                    app.get_site_config("core.site-name"), )
            else:
                subject = "You have %s notifications at %s" % (
                    _total, app.get_site_config("core.site-name"))

            if not app.settings_file.get("lockout_on", False):
                result = requests.post(
                    _mgurl + "/messages",
                    auth=("api", _api),
                    data={
                        "from":
                        "%s <%s>" %
                        (app.get_site_config("core.site-email-name"),
                         app.get_site_config("core.site-email")),
                        "to":
                        _to_email_address,
                        "subject":
                        subject,
                        "text":
                        _rendered
                    })
            else:
                result = "LOCKDOWN ON"

            new_email_log = sqlm.EmailLog()
            new_email_log.to = u
            new_email_log.sent = arrow.utcnow().datetime.replace(tzinfo=None)
            new_email_log.subject = "You have %s notifications at %s" % (
                _total, app.get_site_config("core.site-name"))
            new_email_log.body = _rendered
            new_email_log.result = str(result)
            sqla.session.add(new_email_log)
            sqla.session.commit()
示例#51
0
def main(argv):
    """main()"""
    try:
        opts, args = getopt.getopt(argv, "r")
    except getopt.GetoptError:
        print('update_stats.py -r')
        sys.exit(2)
    refresh = False
    for opt, arg in opts:
        if opt == '-r':
            refresh = True
            print("Recalculating statistics from the scratch")

    logger = logging.getLogger('collector')
    logger.propagate = False
    logger.setLevel(logging.INFO)
    handler = RotatingFileHandler(os.path.join(ROOT, 'logs/collector.log'),
                                  maxBytes=10000000,
                                  backupCount=5,
                                  encoding='utf-8',
                                  mode='a')
    handler.setFormatter(
        logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
    logger.addHandler(handler)
    st = StatsHandler()
    matches_folder = st.matches_folder()

    start_date = DT.date.today() - DT.timedelta(days=1)

    agent = bb2.Agent(app.config['BB2_API_KEY'])

    stats = st.get_stats(refresh)

    logger.info("Getting matches since %s", start_date)

    try:
        data = agent.matches(start=start_date,
                             league=','.join(app.config['LEAGUES']))
    except Exception as exc:
        logger.error(exc)
        raise exc

    logger.info("Matches colleted")

    st.set_folders()

    for match in data['matches']:
        filename = os.path.join(matches_folder, f"{match['uuid']}.json")
        if os.path.isfile(filename):
            logger.info("File %s exists", filename)
            continue
        logger.info("Collecting match %s", match['uuid'])
        try:
            detailed = agent.match(id=match['uuid'])
            file = open(filename, "a")
            file.write(json.dumps(detailed))
            file.close()
        except Exception as exc:
            logger.error(exc)
            raise exc
        logger.info("Match %s saved", match['uuid'])

    # stats rebuilding
    matchfiles = [
        f for f in os.listdir(matches_folder)
        if os.path.isfile(os.path.join(matches_folder, f))
    ]

    if not 'matchfiles' in stats:
        stats['matchfiles'] = []

    for file_name in matchfiles:
        file = open(os.path.join(matches_folder, file_name), "r")
        data = json.loads(file.read())
        file.close()

        match = bb2.Match(data)

        st.create_folder(st.competition_folder(match.competition_id()))
        st.write_file(
            os.path.join(st.competition_folder(match.competition_id()),
                         match.uuid()), data)

        if data['uuid'] in stats['matchfiles']:
            continue
        stats['matchfiles'].append(data['uuid'])

        logger.info("Processing stat calculation of match %s ", data['uuid'])

        # ignore concedes
        if bb2.match.is_concede(data):
            logger.info("Match %s is concede", data['uuid'])
            continue

        # initialize coaches
        coach1 = data['match']['coaches'][0]
        if not coach1['coachname'] in stats['coaches']:
            stats['coaches'][coach1['coachname']] = {
                'wins': 0,
                'losses': 0,
                'draws': 0,
                'matches': 0,
                'points': 0,
                'max': {}
            }
            stats['coaches'][coach1['coachname']]['name'] = coach1['coachname']
            stats['coaches'][coach1['coachname']]['teams'] = {}
        coach2 = data['match']['coaches'][1]
        if not coach2['coachname'] in stats['coaches']:
            stats['coaches'][coach2['coachname']] = {
                'wins': 0,
                'losses': 0,
                'draws': 0,
                'matches': 0,
                'points': 0,
                'max': {}
            }
            stats['coaches'][coach2['coachname']]['name'] = coach2['coachname']
            stats['coaches'][coach2['coachname']]['teams'] = {}

        # initialize teams
        team1 = data['match']['teams'][0]
        idraces1 = str(team1['idraces'])
        if not idraces1 in stats['teams']:
            stats['teams'][idraces1] = {
                'wins': 0,
                'losses': 0,
                'draws': 0,
                'matches': 0,
                'points': 0
            }
            stats['teams'][idraces1]['idraces'] = idraces1
        team2 = data['match']['teams'][1]
        idraces2 = str(team2['idraces'])
        if not idraces2 in stats['teams']:
            stats['teams'][idraces2] = {
                'wins': 0,
                'losses': 0,
                'draws': 0,
                'matches': 0,
                'points': 0
            }
            stats['teams'][idraces2]['idraces'] = idraces2

        #alias coaches and teams
        coach1_stats = stats['coaches'][coach1['coachname']]
        coach2_stats = stats['coaches'][coach2['coachname']]
        team1_stats = stats['teams'][idraces1]
        team2_stats = stats['teams'][idraces2]

        # initialize the team under coach
        if idraces1 not in coach1_stats['teams']:
            coach1_stats['teams'][idraces1] = {
                'wins': 0,
                'losses': 0,
                'draws': 0,
                'matches': 0,
                'points': 0
            }
        if idraces2 not in coach2_stats['teams']:
            coach2_stats['teams'][idraces2] = {
                'wins': 0,
                'losses': 0,
                'draws': 0,
                'matches': 0,
                'points': 0
            }

        # coach team alias
        coach1_team_stats = coach1_stats['teams'][idraces1]
        coach2_team_stats = coach2_stats['teams'][idraces2]

        coach1_stats['matches'] += 1
        team1_stats['matches'] += 1
        coach1_team_stats['matches'] += 1
        coach2_stats['matches'] += 1
        team2_stats['matches'] += 1
        coach2_team_stats['matches'] += 1

        for stat in [
                "inflictedtouchdowns",
                "inflictedtackles",
                "inflictedcasualties",
                'inflictedinjuries',
                'inflictedko',
                'inflicteddead',
                'inflictedmetersrunning',
                'inflictedpasses',
                'inflictedcatches',
                'inflictedinterceptions',
                'sustainedexpulsions',
                'sustainedcasualties',
                'sustainedko',
                'sustainedinjuries',
                'sustaineddead',
                'inflictedmeterspassing',
        ]:
            if not stat in coach1_stats:
                coach1_stats[stat] = 0
            coach1_stats[stat] += team1[stat]
            if not stat in coach2_stats:
                coach2_stats[stat] = 0
            coach2_stats[stat] += team2[stat]

            if not stat in team1_stats:
                team1_stats[stat] = 0
            team1_stats[stat] += team1[stat]
            if not stat in team2_stats:
                team2_stats[stat] = 0
            team2_stats[stat] += team2[stat]

            if not stat in coach1_team_stats:
                coach1_team_stats[stat] = 0
            coach1_team_stats[stat] += team1[stat]
            if not stat in coach2_team_stats:
                coach2_team_stats[stat] = 0
            coach2_team_stats[stat] += team2[stat]

        #sustd workaround
        if not 'sustainedtouchdowns' in coach1_stats:
            coach1_stats['sustainedtouchdowns'] = 0
        coach1_stats['sustainedtouchdowns'] += team2['inflictedtouchdowns']
        if not 'sustainedtouchdowns' in coach2_stats:
            coach2_stats['sustainedtouchdowns'] = 0
        coach2_stats['sustainedtouchdowns'] += team1['inflictedtouchdowns']

        if not 'sustainedtouchdowns' in team1_stats:
            team1_stats['sustainedtouchdowns'] = 0
        team1_stats['sustainedtouchdowns'] += team2['inflictedtouchdowns']
        if not 'sustainedtouchdowns' in team2_stats:
            team2_stats['sustainedtouchdowns'] = 0
        team2_stats['sustainedtouchdowns'] += team1['inflictedtouchdowns']

        if not 'sustainedtouchdowns' in coach1_team_stats:
            coach1_team_stats['sustainedtouchdowns'] = 0
        coach1_team_stats['sustainedtouchdowns'] += team2[
            'inflictedtouchdowns']
        if not 'sustainedtouchdowns' in coach2_team_stats:
            coach2_team_stats['sustainedtouchdowns'] = 0
        coach2_team_stats['sustainedtouchdowns'] += team1[
            'inflictedtouchdowns']

        # inflictedpushouts fix
        if not 'inflictedpushouts' in coach1_stats:
            coach1_stats['inflictedpushouts'] = 0
        coach1_stats['inflictedpushouts'] += sum([
            player['stats']['inflictedpushouts'] for player in team1['roster']
        ])
        if not 'inflictedpushouts' in coach2_stats:
            coach2_stats['inflictedpushouts'] = 0
        coach2_stats['inflictedpushouts'] += sum([
            player['stats']['inflictedpushouts'] for player in team2['roster']
        ])

        if not 'inflictedpushouts' in team1_stats:
            team1_stats['inflictedpushouts'] = 0
        team1_stats['inflictedpushouts'] += sum([
            player['stats']['inflictedpushouts'] for player in team1['roster']
        ])

        if not 'inflictedpushouts' in team2_stats:
            team2_stats['inflictedpushouts'] = 0
        team2_stats['inflictedpushouts'] += sum([
            player['stats']['inflictedpushouts'] for player in team2['roster']
        ])

        if not 'inflictedpushouts' in coach1_team_stats:
            coach1_team_stats['inflictedpushouts'] = 0
        coach1_team_stats['inflictedpushouts'] += sum([
            player['stats']['inflictedpushouts'] for player in team1['roster']
        ])

        if not 'inflictedpushouts' in coach2_team_stats:
            coach2_team_stats['inflictedpushouts'] = 0
        coach2_team_stats['inflictedpushouts'] += sum([
            player['stats']['inflictedpushouts'] for player in team2['roster']
        ])

        # max tracking
        for stat in [
                "inflictedtouchdowns", "inflictedtackles",
                "inflictedcasualties", 'inflictedinjuries',
                'inflictedinterceptions'
        ]:
            if not stat in coach1_stats['max']:
                coach1_stats['max'][stat] = 0
            if team1[stat] > coach1_stats['max'][stat]:
                coach1_stats['max'][stat] = team1[stat]

            if not stat in coach2_stats['max']:
                coach2_stats['max'][stat] = 0
            if team2[stat] > coach2_stats['max'][stat]:
                coach2_stats['max'][stat] = team2[stat]

        if not 'max_cas_win' in coach1_stats['max']:
            coach1_stats['max']['max_cas_win'] = 0
        if not 'max_cas_win' in coach2_stats['max']:
            coach2_stats['max']['max_cas_win'] = 0
        if not 'max_tvdiff_win' in coach1_stats['max']:
            coach1_stats['max']['max_tvdiff_win'] = 0
        if not 'max_tvdiff_win' in coach2_stats['max']:
            coach2_stats['max']['max_tvdiff_win'] = 0
        # wins/drawslosses
        if team1['inflictedtouchdowns'] > team2['inflictedtouchdowns']:
            coach1_stats['wins'] += 1
            coach1_stats['points'] += 3
            coach2_stats['losses'] += 1

            team1_stats['wins'] += 1
            team1_stats['points'] += 3
            team2_stats['losses'] += 1

            coach1_team_stats['wins'] += 1
            coach1_team_stats['points'] += 3
            coach2_team_stats['losses'] += 1

            # cas achievement check
            if team1['sustainedcasualties'] > coach1_stats['max'][
                    'max_cas_win']:
                coach1_stats['max']['max_cas_win'] = team1[
                    'sustainedcasualties']

            # down TV achievement check
            tv_diff = team2['value'] - team1['value']
            if tv_diff > coach1_stats['max']['max_tvdiff_win']:
                coach1_stats['max']['max_tvdiff_win'] = tv_diff

        elif team1['inflictedtouchdowns'] < team2['inflictedtouchdowns']:
            coach2_stats['wins'] += 1
            coach2_stats['points'] += 3
            coach1_stats['losses'] += 1

            team2_stats['wins'] += 1
            team2_stats['points'] += 3
            team1_stats['losses'] += 1

            coach2_team_stats['wins'] += 1
            coach2_team_stats['points'] += 3
            coach1_team_stats['losses'] += 1

            # cas achievement check
            if team2['sustainedcasualties'] > coach2_stats['max'][
                    'max_cas_win']:
                coach2_stats['max']['max_cas_win'] = team2[
                    'sustainedcasualties']

            # down TV achievement check
            tv_diff = team1['value'] - team2['value']
            if tv_diff > coach2_stats['max']['max_tvdiff_win']:
                coach2_stats['max']['max_tvdiff_win'] = tv_diff
        else:
            coach1_stats['draws'] += 1
            coach1_stats['points'] += 1
            coach2_stats['draws'] += 1
            coach2_stats['points'] += 1

            team1_stats['draws'] += 1
            team1_stats['points'] += 1
            team2_stats['draws'] += 1
            team2_stats['points'] += 1

            coach1_team_stats['draws'] += 1
            coach1_team_stats['points'] += 1
            coach2_team_stats['draws'] += 1
            coach2_team_stats['points'] += 1

        tcoach = CoachService.link_bb2_coach(coach1['coachname'],
                                             team1['teamname'])
        if tcoach:
            msg = f"{coach1['coachname']} account linked to {tcoach.short_name()}"
            Notificator("achievement").notify(msg)
            logger.info(msg)
        tcoach = CoachService.link_bb2_coach(coach2['coachname'],
                                             team2['teamname'])
        if tcoach:
            msg = f"{coach2['coachname']} account linked to {tcoach.short_name()}"
            Notificator("achievement").notify(msg)
            logger.info(msg)
        db.session.commit()
        logger.info("Stats calculation of match %s completed", data['uuid'])

    try:
        all_coaches = Coach.query.all()
        stats['coaches_extra'] = leaderboard_coach_schema.dump(
            all_coaches).data
        stats['coaches'].pop('', None)
        st.save_stats(stats)
    except Exception as exp:
        logger.error(exp)
        raise exp
    logger.info("Stats recalculated")

    logger.info("Achievement processing")
    # update achievements
    for coach in Coach.query.all():
        if not coach.bb2_name:
            continue
        coach_stats = stats['coaches'].get(coach.bb2_name, None)
        if not coach_stats:
            continue
        coach.achievements['match']['winwithall']['best'] = 0
        # team achievements
        for team_id, data in coach_stats['teams'].items():
            team_id = str(team_id)
            # win for all achievement
            if data['wins'] > 0:
                coach.achievements['match']['winwithall']['best'] += 1

            for key, ach in coach.achievements['team'][team_id][
                    'played'].items():
                ach['best'] = data['matches']
            for key, ach in coach.achievements['team'][team_id]['wins'].items(
            ):
                ach['best'] = data['wins']
            for key, ach in coach.achievements['team'][team_id][
                    'touchdowns'].items():
                ach['best'] = data['inflictedtouchdowns']
            for key, ach in coach.achievements['team'][team_id][
                    'casualties'].items():
                ach['best'] = data['inflictedcasualties']
            for key, ach in coach.achievements['team'][team_id]['kills'].items(
            ):
                ach['best'] = data['inflicteddead']
            for key, ach in coach.achievements['team'][team_id][
                    'passes'].items():
                ach['best'] = data['inflictedpasses']

        # match achievements
        coach.achievements['match']['passingtotal1']['best'] = coach_stats[
            'inflictedmeterspassing']
        coach.achievements['match']['passingtotal2']['best'] = coach_stats[
            'inflictedmeterspassing']

        coach.achievements['match']['runningtotal1']['best'] = coach_stats[
            'inflictedmetersrunning']
        coach.achievements['match']['runningtotal2']['best'] = coach_stats[
            'inflictedmetersrunning']

        coach.achievements['match']['surfstotal1']['best'] = coach_stats[
            'inflictedpushouts']
        coach.achievements['match']['surfstotal2']['best'] = coach_stats[
            'inflictedpushouts']

        coach.achievements['match']['blocks1game1']['best'] = coach_stats[
            'max']['inflictedtackles']
        coach.achievements['match']['blocks1game2']['best'] = coach_stats[
            'max']['inflictedtackles']

        coach.achievements['match']['breaks1game1']['best'] = coach_stats[
            'max']['inflictedinjuries']
        coach.achievements['match']['breaks1game2']['best'] = coach_stats[
            'max']['inflictedinjuries']

        coach.achievements['match']['cas1game1']['best'] = coach_stats['max'][
            'inflictedcasualties']
        coach.achievements['match']['cas1game2']['best'] = coach_stats['max'][
            'inflictedcasualties']

        coach.achievements['match']['score1game1']['best'] = coach_stats[
            'max']['inflictedtouchdowns']
        coach.achievements['match']['score1game2']['best'] = coach_stats[
            'max']['inflictedtouchdowns']

        coach.achievements['match']['int1game1']['best'] = coach_stats['max'][
            'inflictedinterceptions']

        coach.achievements['match']['sufferandwin1']['best'] = coach_stats[
            'max']['max_cas_win']
        coach.achievements['match']['sufferandwin2']['best'] = coach_stats[
            'max']['max_cas_win']

        coach.achievements['match']['win500down']['best'] = coach_stats['max'][
            'max_tvdiff_win']

        flag_modified(coach, "achievements")
        db.session.commit()

        # update achievements
        coach_mention = f'<@{coach.disc_id}>'
        for key, achievement in coach.achievements['match'].items():
            if achievement['target'] <= achievement[
                    'best'] and not achievement['completed']:
                achievement_bank_text = f"{achievement['award_text']} awarded - {achievement['desc']}"
                Notificator("achievement").notify(
                    f"{coach.short_name()}: {achievement['desc']} - completed")
                call, arg = achievement['award'].split(",")
                res, error = getattr(coach, call)(arg, achievement['desc'])
                if res:
                    logger.info("%s: %s awarded", coach_mention,
                                {achievement['desc']})
                    Notificator("bank").notify(
                        f"{coach_mention}: {achievement_bank_text}")
                    coach.achievements['match'][key]['completed'] = True
                    flag_modified(coach, "achievements")
                else:
                    logger.error(error)
                    Notificator("bank").notify(
                        f"{coach_mention}: {achievement['award_text']} " +
                        f"could not be awarded - {error}")
        for key1, stat in coach.achievements['team'].items():
            for key2, item in stat.items():
                for key3, achievement in item.items():
                    if (achievement['target'] <= achievement['best']
                            and not achievement['completed']):
                        achievement_bank_text = f"{achievement['award_text']} awarded - {achievement['desc']}"
                        Notificator("achievement").notify(
                            f"{coach.short_name()}: {achievement['desc']} - completed"
                        )
                        call, arg = achievement['award'].split(",")
                        res, error = getattr(coach, call)(arg,
                                                          achievement['desc'])
                        if res:
                            logger.info("%s: %s awarded", coach_mention,
                                        {achievement['desc']})
                            coach.achievements['team'][key1][key2][key3][
                                'completed'] = True
                            flag_modified(coach, "achievements")
                            Notificator("bank").notify(
                                f"{coach_mention}: {achievement_bank_text}")
                        else:
                            logger.error(error)
                            Notificator("bank").notify(
                                f"{coach_mention}: {achievement['award_text']} could "
                                + f"not be awarded - {error}")
        db.session.commit()
    logger.info("Achievement processed")
示例#52
0
    def _db_store(self, with_transaction=True):
        """
        Store a new node in the DB, also saving its repository directory
        and attributes.

        After being called attributes cannot be
        changed anymore! Instead, extras can be changed only AFTER calling
        this store() function.

        :note: After successful storage, those links that are in the cache, and
            for which also the parent node is already stored, will be
            automatically stored. The others will remain unstored.

        :parameter with_transaction: if False, no transaction is used. This
          is meant to be used ONLY if the outer calling function has already
          a transaction open!

        :param bool use_cache: Whether I attempt to find an equal node in the DB.
        """
        from aiida.backends.sqlalchemy import get_scoped_session
        session = get_scoped_session()

        # TODO: This needs to be generalized, allowing for flexible methods
        # for storing data and its attributes.

        # I save the corresponding django entry
        # I set the folder
        # NOTE: I first store the files, then only if this is successful,
        # I store the DB entry. In this way,
        # I assume that if a node exists in the DB, its folder is in place.
        # On the other hand, periodically the user might need to run some
        # bookkeeping utility to check for lone folders.
        self._repository_folder.replace_with_folder(
            self._get_temp_folder().abspath, move=True, overwrite=True)

        import aiida.backends.sqlalchemy
        try:
            # aiida.backends.sqlalchemy.get_scoped_session().add(self._dbnode)
            session.add(self._dbnode)
            # Save its attributes 'manually' without incrementing
            # the version for each add.
            self._dbnode.attributes = self._attrs_cache
            flag_modified(self._dbnode, "attributes")
            # This should not be used anymore: I delete it to
            # possibly free memory
            del self._attrs_cache

            self._temp_folder = None
            self._to_be_stored = False

            # Here, I store those links that were in the cache and
            # that are between stored nodes.
            self._store_cached_input_links(with_transaction=False)

            if with_transaction:
                try:
                    # aiida.backends.sqlalchemy.get_scoped_session().commit()
                    session.commit()
                except SQLAlchemyError as e:
                    # print "Cannot store the node. Original exception: {" \
                    #      "}".format(e)
                    session.rollback()
                    raise

        # This is one of the few cases where it is ok to do a 'global'
        # except, also because I am re-raising the exception
        except:
            # I put back the files in the sandbox folder since the
            # transaction did not succeed
            self._get_temp_folder().replace_with_folder(
                self._repository_folder.abspath, move=True, overwrite=True)
            raise

        self._dbnode.set_extra(_HASH_EXTRA_KEY, self.get_hash())
        return self
示例#53
0
 def manager_save(cls, req, data):
     super().manager_save(req, data)
     req.data['custom_webcast_url'] = data.get('custom_webcast_url')
     req.data['webcast_hidden'] = data.get('webcast_hidden', False)
     flag_modified(req, 'data')
示例#54
0
 def add_to_store(self, player_store):
     self.stores[player_store - 1] += 1
     flag_modified(self, 'stores')
示例#55
0
def proccess_create_or_modify_user_request(
        attribute_dict,
        organisation=None,
        allow_existing_user_modify=False,
        is_self_sign_up=False,
        modify_only=False
):
    """
    Takes a create or modify user request and determines the response. Normally what's in the top level API function,
    but here it's one layer down because there's multiple entry points for 'create user':
    - The admin api
    - The register api

    :param attribute_dict: attributes that can be supplied by the request maker
    :param organisation:  what organisation the request maker belongs to. The created user is bound to the same org
    :param allow_existing_user_modify: whether to return an error when the user already exists for the supplied IDs
    :param is_self_sign_up: does the request come from the register api?
    :param modify_only: whether to allow the creation of a  new user
    :return: An http response
    """

    if not attribute_dict.get('custom_attributes'):
        attribute_dict['custom_attributes'] = {}

    user_id = attribute_dict.get('user_id')

    email = attribute_dict.get('email')
    phone = attribute_dict.get('phone')

    account_types = attribute_dict.get('account_types', [])

    referred_by = attribute_dict.get('referred_by')

    blockchain_address = attribute_dict.get('blockchain_address')

    provided_public_serial_number = attribute_dict.get('public_serial_number')

    uuid = attribute_dict.get('uuid')

    require_identifier = attribute_dict.get('require_identifier', True)

    if not user_id:
        # Extract ID from Combined User ID and Name String if it exists
        try:
            user_id_name_string = attribute_dict.get('user_id_name_string')

            user_id_str = user_id_name_string and user_id_name_string.split(':')[0]

            if user_id_str:
                user_id = int(user_id_str)

        except SyntaxError:
            pass

    if not blockchain_address and provided_public_serial_number:

        try:
            blockchain_address = to_checksum_address(
                provided_public_serial_number)

            # Since it's actually an ethereum address set the provided public serial number to None
            # so it doesn't get used as a transfer card
            provided_public_serial_number = None
        except Exception:
            pass

    require_transfer_card_exists = attribute_dict.get(
        'require_transfer_card_exists', g.active_organisation.require_transfer_card)

    public_serial_number = (provided_public_serial_number
                            or attribute_dict.get('payment_card_qr_code')
                            or attribute_dict.get('payment_card_barcode'))

    location = attribute_dict.get('location')  # address location

    # Yes, we know "GPS" refers to a technology, but "gps_location" is less ambiguous for end users than "geo_location"
    gps_location = attribute_dict.get('gps_location')  # geo location as str of lat, lng

    use_precreated_pin = attribute_dict.get('use_precreated_pin')
    use_last_4_digits_of_id_as_initial_pin = attribute_dict.get(
        'use_last_4_digits_of_id_as_initial_pin')

    transfer_account_name = attribute_dict.get('transfer_account_name')
    first_name = attribute_dict.get('first_name')
    last_name = attribute_dict.get('last_name')

    business_usage_name = attribute_dict.get('business_usage_name')
    business_usage_id = None
    if business_usage_name:
        usage = TransferUsage.find_or_create(business_usage_name)
        business_usage_id = usage.id

    preferred_language = attribute_dict.get('preferred_language')

    primary_user_identifier = attribute_dict.get('primary_user_identifier')
    primary_user_pin = attribute_dict.get('primary_user_pin')

    initial_disbursement = attribute_dict.get('initial_disbursement', None)
    if not account_types:
        account_types = ['beneficiary']
    roles_to_set = []
    for at in account_types:
        if at not in g.active_organisation.valid_roles:
            raise Exception(f'{at} not a valid role for this organisation. Please choose one of the following: {g.active_organisation.valid_roles}')
        roles_to_set.append((ASSIGNABLE_TIERS[at], at))

    chain = get_chain()
    if current_app.config['CHAINS'][chain]['IS_USING_BITCOIN']:
        try:
            base58.b58decode_check(blockchain_address)
        except ValueError:
            response_object = {
                'message': 'Blockchain Address {} Not Valid'.format(blockchain_address)}
            return response_object, 400

    if isinstance(phone, bool):
        phone = None

    if phone and not is_self_sign_up:
        # phone has already been parsed if self sign up
        try:
            phone = proccess_phone_number(phone)
        except NumberParseException as e:
            response_object = {'message': 'Invalid Phone Number: ' + str(e)}
            return response_object, 400

    # Work out if there's an existing transfer account to bind to
    existing_transfer_account = None
    if primary_user_identifier:
        primary_user, _ = find_user_from_public_identifier(
            primary_user_identifier)

        if not primary_user or not primary_user.verify_password(primary_user_pin):
            response_object = {'message': 'Primary User not Found'}
            return response_object, 400

        if not primary_user.verify_password(primary_user_pin):
            response_object = {'message': 'Invalid PIN for Primary User'}
            return response_object, 400

        primary_user_transfer_account = primary_user.transfer_account

        if not primary_user_transfer_account:
            response_object = {
                'message': 'Primary User has no transfer account'}
            return response_object, 400

    if not (phone or email or public_serial_number or blockchain_address or user_id or uuid or not require_identifier):
        response_object = {'message': 'Must provide a unique identifier'}
        return response_object, 400

    if use_precreated_pin and not public_serial_number:
        response_object = {
            'message': 'Must provide public serial number to use a transfer card or pre-created pin'
        }
        return response_object, 400

    if public_serial_number:
        public_serial_number = str(public_serial_number)

        if use_precreated_pin or require_transfer_card_exists:
            transfer_card = TransferCard.query.filter_by(
                public_serial_number=public_serial_number).first()

            if not transfer_card:
                response_object = {'message': 'Transfer card not found'}
                return response_object, 400

    business_usage = None
    if business_usage_id:
        business_usage = TransferUsage.query.get(business_usage_id)
        if not business_usage:
            response_object = {
                'message': f'Business Usage not found for id {business_usage_id}'
            }
            return response_object, 400

    referred_by_user, _ = find_user_from_public_identifier(referred_by)

    if referred_by and not referred_by_user:
        response_object = {
            'message': f'Referrer user not found for public identifier {referred_by}'
        }
        return response_object, 400

    existing_user, _ = find_user_from_public_identifier(
        email, phone, public_serial_number, blockchain_address, uuid)

    if not existing_user and user_id:
        existing_user = User.query.get(user_id)

    if modify_only and existing_user is None:
        response_object = {'message': 'User not found'}
        return response_object, 404

    if existing_user:
        if not allow_existing_user_modify:
            response_object = {'message': 'User already exists for Identifier'}
            return response_object, 400

        try:

            user = update_transfer_account_user(
                existing_user,
                first_name=first_name,
                last_name=last_name,
                preferred_language=preferred_language,
                phone=phone,
                email=email,
                public_serial_number=public_serial_number,
                use_precreated_pin=use_precreated_pin,
                existing_transfer_account=existing_transfer_account,
                roles=roles_to_set,
                business_usage=business_usage
            )

            set_location_conditionally(user, location, gps_location)

            if referred_by_user:
                user.referred_by.clear()  # otherwise prior referrals will remain...
                user.referred_by.append(referred_by_user)

            set_custom_attributes(attribute_dict, user)
            flag_modified(user, "custom_attributes")

            db.session.commit()

            response_object = {
                'message': 'User Updated',
                'data': {'user': user_schema.dump(user).data}
            }

            return response_object, 200

        except Exception as e:
            response_object = {
                'message': str(e)
            }

            return response_object, 400

    user = create_transfer_account_user(
        first_name=first_name, last_name=last_name, preferred_language=preferred_language,
        phone=phone, email=email, public_serial_number=public_serial_number, uuid=uuid,
        organisation=organisation,
        blockchain_address=blockchain_address,
        transfer_account_name=transfer_account_name,
        use_precreated_pin=use_precreated_pin,
        use_last_4_digits_of_id_as_initial_pin=use_last_4_digits_of_id_as_initial_pin,
        existing_transfer_account=existing_transfer_account,
        roles=roles_to_set,
        is_self_sign_up=is_self_sign_up,
        business_usage=business_usage, initial_disbursement=initial_disbursement)

    set_location_conditionally(user, location, gps_location)

    if referred_by_user:
        user.referred_by.append(referred_by_user)

    if attribute_dict.get('gender'):
        attribute_dict['custom_attributes']['gender'] = attribute_dict.get('gender')

    if attribute_dict.get('bio'):
        attribute_dict['custom_attributes']['bio'] = attribute_dict.get('bio')

    set_custom_attributes(attribute_dict, user)

    if is_self_sign_up and attribute_dict.get('deviceInfo', None) is not None:
        save_device_info(device_info=attribute_dict.get(
            'deviceInfo'), user=user)
    send_onboarding_sms_messages(user)
    # Location fires an async task that needs to know user ID
    db.session.flush()

    if phone:
        if is_self_sign_up:
            send_one_time_code(phone=phone, user=user)
            return {'message': 'User Created. Please verify phone number.', 'otp_verify': True}, 200

        elif current_app.config['ONBOARDING_SMS']:
            try:
                send_onboarding_sms_messages(user)
            except Exception as e:
                print(e)
                sentry_sdk.capture_exception(e)
                pass

    response_object = {
        'message': 'User Created',
        'data': {
            'user': user_schema.dump(user).data
        }
    }

    return response_object, 200
示例#56
0
    def create_room(self, vc_room, event):
        """Create a new Vidyo room for an event, given a VC room.

        In order to create the Vidyo room, the function will try to do so with
        all the available identities of the user based on the authenticators
        defined in Vidyo plugin's settings, in that order.

        :param vc_room: VCRoom -- The VC room from which to create the Vidyo
                        room
        :param event: Event -- The event to the Vidyo room will be attached
        """
        client = AdminClient(self.settings)
        owner = retrieve_principal(vc_room.data['owner'])
        login_gen = iter_user_identities(owner)
        login = next(login_gen, None)
        if login is None:
            raise VCRoomError(_("No valid Vidyo account found for this user"),
                              field='owner_user')

        extension_gen = iter_extensions(
            self.settings.get('indico_room_prefix'), event.id)
        extension = next(extension_gen)

        while True:
            room_mode = {
                'isLocked': False,
                'hasPIN': bool(vc_room.data['room_pin']),
                'hasModeratorPIN': bool(vc_room.data['moderation_pin'])
            }
            if room_mode['hasPIN']:
                room_mode['roomPIN'] = vc_room.data['room_pin']
            if room_mode['hasModeratorPIN']:
                room_mode['moderatorPIN'] = vc_room.data['moderation_pin']

            room_obj = client.create_room_object(
                name=vc_room.name,
                RoomType='Public',
                ownerName=login,
                extension=extension,
                groupName=self.settings.get('room_group_name'),
                description=vc_room.data['description'],
                RoomMode=room_mode)

            if room_obj.RoomMode.hasPIN:
                room_obj.RoomMode.roomPIN = vc_room.data['room_pin']
            if room_obj.RoomMode.hasModeratorPIN:
                room_obj.RoomMode.moderatorPIN = vc_room.data['moderation_pin']

            try:
                client.add_room(room_obj)
            except APIException as err:
                err_msg = err.message

                if err_msg.startswith('Room exist for name'):
                    raise VCRoomError(_("Room name already in use"),
                                      field='name')
                elif err_msg.startswith('Member not found for ownerName'):
                    login = next(login_gen, None)
                    if login is None:
                        raise VCRoomError(
                            _("No valid Vidyo account found for this user"),
                            field='owner_user')
                elif err_msg.startswith('Room exist for extension'):
                    extension = next(extension_gen)
                else:
                    raise

            else:
                # get room back, in order to fetch Vidyo-set parameters
                created_room = client.find_room(extension)

                if not created_room:
                    raise VCRoomNotFoundError(
                        _("Could not find newly created room in Vidyo"))
                vc_room.data.update({
                    'vidyo_id': unicode(created_room.roomID),
                    'url': created_room.RoomMode.roomURL,
                    'owner_identity': created_room.ownerName
                })
                flag_modified(vc_room, 'data')
                vc_room.vidyo_extension = VidyoExtension(
                    vc_room_id=vc_room.id,
                    extension=int(created_room.extension),
                    owned_by_user=owner)

                client.set_automute(created_room.roomID,
                                    vc_room.data['auto_mute'])
                break
示例#57
0
def post_tag(osm_type, osm_id, item_id):
    changeset_id = request.form['changeset_id']

    t0 = time()
    osm_backend, auth = get_backend_and_auth()
    save_timing('backend and auth', t0)

    wikidata_id = 'Q{:d}'.format(item_id)

    t0 = time()
    osm = ItemCandidate.query.filter_by(item_id=item_id, osm_type=osm_type, osm_id=osm_id).one_or_none()
    save_timing('get candidate', t0)

    if not osm:
        database.session.commit()
        return Response('not found', mimetype='text/plain')

    url = '{}/{}/{}'.format(osm_api_base, osm_type, osm_id)
    t0 = time()
    r = requests.get(url, headers=user_agent_headers())
    content = r.content
    save_timing('OSM API get', t0)
    if b'wikidata' in content:
        root = etree.fromstring(content)
        existing = root.find('.//tag[@k="wikidata"]')
        if existing is not None and really_save:
            osm.tags['wikidata'] = existing.get('v')
            flag_modified(osm, 'tags')
        database.session.commit()
        return Response('already tagged', mimetype='text/plain')

    if r.status_code == 410 or r.content == b'':
        database.session.commit()
        return Response('deleted', mimetype='text/plain')

    t0 = time()
    root = etree.fromstring(r.content)
    tag = etree.Element('tag', k='wikidata', v=wikidata_id)
    root[0].set('changeset', changeset_id)
    root[0].append(tag)
    save_timing('build tree', t0)

    element_data = etree.tostring(root).decode('utf-8')
    if really_save:
        t0 = time()
        try:
            r = osm_backend.request(url,
                                    method='PUT',
                                    data=element_data,
                                    auth=auth,
                                    headers=user_agent_headers())
        except requests.exceptions.HTTPError as e:
            mail.error_mail('error saving element', element_data, e.response)
            database.session.commit()
            return Response('save error', mimetype='text/plain')
        if not r.text.strip().isdigit():
            database.session.commit()
            return Response('save error', mimetype='text/plain')
        save_timing('add tag via OSM API', t0)

    t0 = time()
    if really_save:
        osm.tags['wikidata'] = wikidata_id
        flag_modified(osm, 'tags')

    if changeset_id:
        change = Changeset.query.get(changeset_id)
        change.update_count = change.update_count + 1
    save_timing('update database', t0)

    database.session.commit()

    return Response('done', mimetype='text/plain')
示例#58
0
    def create_room(self, vc_room, event):
        """Create a new Zoom room for an event, given a VC room.

        In order to create the Zoom room, the function will try to get
        a valid e-mail address for the user in question, which can be
        use with the Zoom API.

        :param vc_room: the VC room from which to create the Zoom room
        :param event: the event to the Zoom room will be attached
        """
        client = ZoomIndicoClient()
        host = principal_from_identifier(vc_room.data['host'])
        host_email = find_enterprise_email(host)

        # get the object that this booking is linked to
        vc_room_assoc = vc_room.events[0]
        link_obj = vc_room_assoc.link_object
        is_webinar = vc_room.data.setdefault('meeting_type',
                                             'regular') == 'webinar'
        scheduling_args = get_schedule_args(
            link_obj) if link_obj.start_dt else {}

        try:
            settings = {
                'host_video': not vc_room.data['mute_host_video'],
            }

            kwargs = {}
            if is_webinar:
                kwargs['type'] = (ZoomMeetingType.webinar
                                  if scheduling_args else
                                  ZoomMeetingType.recurring_webinar_no_time)
                settings['alternative_hosts'] = host_email
            else:
                kwargs = {
                    'type':
                    (ZoomMeetingType.scheduled_meeting if scheduling_args else
                     ZoomMeetingType.recurring_meeting_no_time),
                    'schedule_for':
                    host_email
                }
                settings.update({
                    'mute_upon_entry':
                    vc_room.data['mute_audio'],
                    'participant_video':
                    not vc_room.data['mute_participant_video'],
                    'waiting_room':
                    vc_room.data['waiting_room'],
                    'join_before_host':
                    self.settings.get('join_before_host'),
                })

            kwargs.update({
                'topic': vc_room.name,
                'agenda': vc_room.data['description'],
                'password': vc_room.data['password'],
                'timezone': event.timezone,
                'settings': settings
            })
            kwargs.update(scheduling_args)
            if is_webinar:
                meeting_obj = client.create_webinar(host_email, **kwargs)
            else:
                meeting_obj = client.create_meeting(host_email, **kwargs)
        except HTTPError as e:
            self.logger.exception('Error creating Zoom Room: %s',
                                  e.response.content)
            raise VCRoomError(
                _('Could not create the room in Zoom. Please contact support if the error persists'
                  ))

        vc_room.data.update({
            'zoom_id':
            str(meeting_obj['id']),
            'start_url':
            meeting_obj['start_url'],
            'host':
            host.identifier,
            'alternative_hosts':
            process_alternative_hosts(meeting_obj['settings'].get(
                'alternative_hosts', ''))
        })
        vc_room.data.update(get_url_data_args(meeting_obj['join_url']))
        flag_modified(vc_room, 'data')

        # e-mail Host URL to meeting host
        if self.settings.get('send_host_url'):
            notify_host_start_url(vc_room)
示例#59
0
    def update_data_association(self, event, vc_room, room_assoc, data):
        # XXX: This feels slightly hacky. Maybe we should change the API on the core?
        association_is_new = room_assoc.vc_room is None
        old_link = room_assoc.link_object

        # in a new room, `meeting_type` comes in `data`, otherwise it's already in the VCRoom
        is_webinar = data.get(
            'meeting_type', vc_room.data
            and vc_room.data.get('meeting_type')) == 'webinar'

        super().update_data_association(event, vc_room, room_assoc, data)

        if vc_room.data:
            try:
                # this is not a new room
                if association_is_new:
                    # this means we are updating an existing meeting with a new vc_room-event association
                    update_zoom_meeting(
                        vc_room.data['zoom_id'], {
                            'start_time':
                            None,
                            'duration':
                            None,
                            'type': (ZoomMeetingType.recurring_webinar_no_time
                                     if is_webinar else
                                     ZoomMeetingType.recurring_meeting_no_time)
                        })
                elif room_assoc.link_object != old_link:
                    # the booking should now be linked to something else
                    new_schedule_args = (
                        get_schedule_args(room_assoc.link_object)
                        if room_assoc.link_object.start_dt else {})
                    meeting = fetch_zoom_meeting(vc_room)
                    current_schedule_args = {
                        k: meeting[k]
                        for k in {'start_time', 'duration'} if k in meeting
                    }

                    # check whether the start time / duration of the scheduled meeting differs
                    if new_schedule_args != current_schedule_args:
                        if new_schedule_args:
                            update_zoom_meeting(vc_room.data['zoom_id'],
                                                new_schedule_args)
                        else:
                            update_zoom_meeting(
                                vc_room.data['zoom_id'], {
                                    'start_time':
                                    None,
                                    'duration':
                                    None,
                                    'type':
                                    (ZoomMeetingType.recurring_webinar_no_time
                                     if is_webinar else
                                     ZoomMeetingType.recurring_meeting_no_time)
                                })
            except VCRoomNotFoundError as exc:
                raise UserValueError(str(exc)) from exc

        room_assoc.data['password_visibility'] = data.pop(
            'password_visibility')
        flag_modified(room_assoc, 'data')
示例#60
0
def do_add_tags(place, table):
    osm_backend, auth = get_backend_and_auth()

    comment = request.form['comment']
    changeset = new_changeset(comment)

    r = osm_backend.request(osm_api_base + '/changeset/create',
                            method='PUT',
                            data=changeset,
                            auth=auth,
                            headers=user_agent_headers())
    changeset_id = r.text.strip()
    update_count = 0

    for item, osm in table:
        wikidata_id = 'Q{:d}'.format(item.item_id)
        url = '{}/{}/{}'.format(osm_api_base, osm.osm_type, osm.osm_id)
        r = requests.get(url, headers=user_agent_headers())
        if 'wikidata' in r.text:  # done already
            print('skip:', wikidata_id)
            continue

        if r.status_code == 410 or r.content == b'':
            continue  # element has been deleted

        root = etree.fromstring(r.content)
        tag = etree.Element('tag', k='wikidata', v=wikidata_id)
        root[0].set('changeset', changeset_id)
        root[0].append(tag)

        element_data = etree.tostring(root).decode('utf-8')
        r = osm_backend.request(url,
                                method='PUT',
                                data=element_data,
                                auth=auth,
                                headers=user_agent_headers())
        assert(r.text.strip().isdigit())

        osm.tags['wikidata'] = wikidata_id
        flag_modified(osm, 'tags')
        database.session.commit()
        database.session.expire(osm)
        assert osm.tags['wikidata'] == wikidata_id
        update_count += 1

    osm_backend.request(osm_api_base + '/changeset/{}/close'.format(changeset_id),
                        method='PUT',
                        auth=auth,
                        headers=user_agent_headers())

    change = Changeset(id=changeset_id,
                       place=place,
                       created=func.now(),
                       comment=comment,
                       update_count=update_count,
                       user=g.user)

    database.session.add(change)
    database.session.commit()

    mail.announce_change(change)

    return update_count