Exemple #1
0
    def _create_event_sequence(self,
                               length,
                               brand_involved=False,
                               customer=None):
        '''
        Create an event sequence with length of events for this customer. If brand
        is involved then the events toggle between inbound and outbound.
        '''
        events = []
        customer = self.customer if customer is None else customer

        # Set the user in thread local storage
        from solariat_bottle.db.user import set_user
        set_user(self.user)

        for i in range(length):
            kw = {}
            if brand_involved:
                kw['is_inbound'] = True if i % 2 == 0 else False
                if i % 2 == 0:
                    kw['actor_id'] = customer.id
                else:
                    kw['actor_id'] = self.brand.id
                    kw['parent_event'] = event
                kw['channels'] = [self.sc.inbound
                                  ] if i % 2 == 0 else [self.sc.outbound]
            else:
                kw['actor_id'] = customer.id
                kw['is_inbound'] = True
                kw['channels'] = [self.sc.inbound]

            event = Event.objects.create_by_user(user=self.user, **kw)
            events.append(event)
        return events
Exemple #2
0
def postprocess_events(user):
    from solariat_bottle.db.user import set_user

    set_user(user)
    account = user.account

    start = time.time()
    try:
        _postprocess_events(account)

        # TODO: to remove
        # [11/11/16, 5:11:01 PM] Bogdan Neacsa: Hey Vlad, the way the architecture is going to work this is a scheduled task
        # [11/11/16, 5:11:10 PM] Bogdan Neacsa: So it will just restart automatically on next iteration
        # stop = False
        # while not stop:
        #     _postprocess_events(account)
        #     account.reload()
        #     if account.event_processing_needs_restart:
        #         account.update(event_processing_needs_restart=False)
        #         continue
        #     stop = True
    except:
        LOGGER.critical('[DynamicEvents Postprocessing] Cannot process events:', exc_info=True)
    finally:
        account.update(event_processing_lock=False, event_processing_needs_restart=False)

    LOGGER.info('[DynamicEvents Postprocessing] took: %s sec', time.time() - start)
Exemple #3
0
 def _wrapped(*args, **kw):
     inject_tz_offset()
     user = _get_user()
     log_staff_request(user)
     if user:
         set_user(user)
         kw['user'] = user
         return view_func(*args, **kw)
     else:
         return redirect(
             url_for('login', next=urllib.quote_plus(request.path)))
Exemple #4
0
def create_post(user, sync=False, **kw):
    """ Creates a proper platform Post given a user and post components.
        Special args:
        sync - <bool:default=False> forces synchronous postprocessing
        skip_acl_check - <bool:default=False> creates post w/o checking acl permissions on parent channel (e.g. bots)
    """
    """ Creates a proper platform Post given a user and post components.

        Special args:

        sync - <bool:default=False> forces synchronous postprocessing
    """
    # Set user in thread local storage
    from solariat_bottle.db.user import set_user
    set_user(user)

    from solariat_bottle.db.post.utils import get_platform_class
    from solariat_bottle.utils.post import get_language
    from solariat_bottle.db.channel.base import Channel
    from solariat_bottle.utils.posts_tracking import log_state, PostState, get_post_natural_id

    log_state(kw.get('channel', kw.get('channels', None)),
              get_post_natural_id(kw), PostState.DELIVERED_TO_TANGO)

    post_lang = get_language(kw)
    if post_lang.support_level == Support.NO:
        logger.info("Detect message for unsupported language: %s" %
                    post_lang.lang)
        logger.info("Unsupported message value is: %s" % str(kw))
        return
    else:
        kw['lang'] = post_lang
    kw = normalize_post_params(user, kw)
    klass = get_platform_class(kw['_platform'], event_type=kw['event_type'])

    # we have channels resolved in normalize_post_params
    channels = kw['channels']
    accounts = set([ch.account for ch in channels])
    for account in accounts:
        if _check_account_volume(user, account):
            msg = u"Account '{} ({})' has reached its monthly volume threshold.".format(
                account.name, account.id)
            LOGGER.warning(msg)
        if _check_account_daily_volume(user, account):
            msg = u"Account '{} ({})' has reached its daily volume threshold.".format(
                account.name, account.id)
            LOGGER.warning(msg)

    return klass.objects.create_by_user(user,
                                        safe_create=True,
                                        sync=sync,
                                        **kw)
Exemple #5
0
    def wrapper(*args, **kwargs):
        #LOGGER.debug("API Request Args: {} | Params: {}".format(args, kwargs))
        assert args
        assert isinstance(args[0], BaseAPIView)
        view = args[0]  # Method decorator
        try:
            args = args[1:]
        except IndexError:
            args = ()

        params = _get_request_data()
        params.update(kwargs)  # Pass URL variables to the view function

        start_execution = datetime.utcnow()
        # Execute API method
        try:
            # Assert authentication
            LOGGER.debug("Started executing API call: %s.%s(%s, %s) " % (
                view.__class__.__name__, func.__name__, args, kwargs))
            if allow_basic_auth is True:
                user = _get_user()
                if user is None:
                    user = authenticate_api_user(params)
                    if user is None:
                        raise api_exc.AuthorizationError("User is not authenticated. Parameters for call: " + str(params))
            else:
                user = authenticate_api_user(params)

            # Set user in thread local storage
            set_user(user)

            resp = func(view, user, *args, **params)
            elapsed = datetime.utcnow() - start_execution
            if elapsed.total_seconds() >= 2:
                log = LOGGER.info
            else:
                log = LOGGER.debug

            log("API call: %s.%s(%s,%s) finished after %s Parameters: %s" % (view.__class__.__name__, func.__name__,
                                                                             str(args)[:500], str(kwargs)[:500],
                                                                             elapsed, str(params)[:500]))

        # auth token expiration and other auth errors
        except api_exc.AuthorizationError, exc:
            LOGGER.info(exc)
            return view.format_api_error_response(exc, msg=str(exc), description=exc.description)
Exemple #6
0
 def setUp(self):
     super(DataLoaderCase, self).setUp()
     set_user(self.user)
     self.user.is_superuser = True
     self.user.save()
Exemple #7
0
    def setUp(self):
        assert get_var('APP_MODE') == 'test', \
            "Attempt to run test in '{}' app mode.".format(get_var('APP_MODE'))

        reset_db()

        self.db = get_connection()

        for coll_name in self.db.collection_names():
            if coll_name != RPC_COLLECTION and not coll_name.startswith('system.'):
                coll = self.db[coll_name]
                if coll.options().get('capped'):
                    coll.drop()
                else:
                    if coll_name.startswith('dataset') or coll_name.startswith('agent_profile') or coll_name.startswith('customer'):
                         # Drop custom collections
                        coll.drop()
                    elif coll.count():
                        # Remove rows
                        coll.remove({})

        settings.DEBUG             = False
        settings.ON_TEST           = True
        settings.EVENT_LOG_ENABLED = False
        settings.TESTING           = True
        settings.SECRET_KEY = os.urandom(32).encode('base64')  # test session
        settings.INBOUND_SPAM_LIMIT = 30
        settings.ENFORCE_API_HTTPS = True
        settings.HOST_DOMAIN       = "http://127.0.0.0:3031"

        # reinitialize mail sender to pick up testing config
        from solariat_bottle.app import app, MAIL_SENDER
        MAIL_SENDER.init_app(app)

        if os.environ.get('ACTOR_NUM_OVERFLOW'):
            from solariat_bottle.db.sequences import NumberSequences
            NumberSequences.objects.coll.find_and_modify(
                {'name': 'ActorCounter'},
                {'$set': {'_next': 2 ** 24}},
                upsert=True,
                new=True)
        # make sure persistent <User> instance don't have any cached refs
        get_tango_handler().user.clear_ref_cache()

        init_task_pool()

        ensure_pricing_packages()
        self.account = Account.objects.create(name="TEST-ACCOUNT")
        self.email = '*****@*****.**'
        self.password = '******'
        self.user = self._create_db_user(
            email    = self.email,
            password = self.password,
            account  = self.account,
            roles    = [ADMIN],
        )
        self._create_static_events(self.user)
        self.channel = TwitterChannel.objects.create_by_user(
            self.user, title='TestChannel_Old',
            type='twitter', intention_types=SA_TYPES)

        self.channel.add_perm(self.user)
        self._post_last_created = now()
        set_user(self.user)
Exemple #8
0
        def import_data(self, user, data_loader, skip_cardinalities=False):
            # TODO: IMPLEMENT IMPORT ERRORS
            set_user(user)
            start = time.time()
            LOGGER.info('Start importing data')

            status = self.sync_status
            if status not in (self.IN_SYNC, self.OUT_OF_SYNC):
                raise ImproperStateError(self)

            if not self.schema and not self.discovered_schema:
                raise ImproperStateError('No schema to import with. You need '
                                         'to have at least discovered_schema')

            self.update(sync_status=self.IMPORTING, load_progress=0)

            sync_errors = defaultdict(list)
            failed = []
            failed_cnt = 0
            total = data_loader.total()
            step = (total / 100) or 1
            log_once = True
            inserted = 0

            data_cls = self.get_data_class()
            auth_doc_fields = {}
            AuthDocument.objects.populate_acl(user, auth_doc_fields)

            # TODO: (speedup)
            # for frame_chunk in dataframe: !
            bulk_insert = data_cls.objects.coll.initialize_unordered_bulk_op()

            failed_cnt = 0
            batch_size = 20000
            _was_inserted = True
            for idx, raw_data in enumerate(data_loader.load_data(), start=1):
                _was_inserted = False
                mongo_data = None
                try:
                    mongo_data = self.enforce_schema(raw_data, status)
                    mongo_data.update(auth_doc_fields)
                    self.preprocess_imported_data(mongo_data)
                    bulk_insert.insert(data_cls(**mongo_data).data)
                    inserted += 1
                except Exception, ex:
                    failed_cnt += 1
                    failed.append({
                        'error': str(ex),
                        'val': mongo_data,
                        'ex': ex,
                    })
                    if len(failed) > self.ERROR_MAX_OUTPUT_ITEMS:
                        del failed[self.ERROR_MAX_OUTPUT_ITEMS:]

                    self._put_sync_error(sync_errors, str(ex), mongo_data, ex)
                    if log_once:
                        LOGGER.debug(
                            'Data cannot be imported with current'
                            ' schema: %s\n\nor discovered_schema: %s. Error is %s',
                            self.schema, self.discovered_schema, ex)
                        log_once = False

                if idx % step == 0:
                    if total:
                        progress = round(float(idx) / total, 2) * 100
                    else:
                        progress = 100
                    self.update_import_progress(progress)
                    if self.is_archived:
                        return

                if inserted % batch_size == 0:
                    self.handle_bulk_insert(user,
                                            inserted,
                                            failed,
                                            total,
                                            bulk_insert,
                                            failed_cnt,
                                            sync_errors,
                                            op_name='import')
                    failed = []
                    _was_inserted = True
                    bulk_insert = data_cls.objects.coll.initialize_unordered_bulk_op(
                    )