Exemplo n.º 1
0
class ApplicationToken(Document):
    """ Non-expiring key which is tied in to a user which was granted an application """
    STATUS_REQUESTED = 'requested'  # State a token will be from the moment it was requested by a user
    # to the moment the request was accepted by a superuser
    STATUS_VALID = 'valid'  # State a token will be since it was created/accepted by a superuser
    # until the moment it's specifically invalidated
    STATUS_INVALID = 'invalid'  # State once token has been revoked by a superuser

    TYPE_BASIC = 'basic'  # Cannot create account/users, only basic access (nlp endpoints)
    TYPE_ACCOUNT = 'account'  # Can create one account, has admin level access on that account
    TYPE_CORPORATE = 'corporate'  # an create multiple accounts, has staff level access across them

    manager = ApplicationTokenManager

    creator = fields.ReferenceField(User)
    status = fields.StringField(
        choices=[STATUS_INVALID, STATUS_VALID, STATUS_REQUESTED])
    type = fields.StringField(
        choices=[TYPE_BASIC, TYPE_CORPORATE, TYPE_ACCOUNT])
    app_key = fields.StringField()

    def validate(self):
        self.status = self.STATUS_VALID
        self.save()

    def invalidate(self):
        self.status = self.STATUS_INVALID
        self.save()
Exemplo n.º 2
0
class FacetCache(AuthDocument):

    collection = "FacetCache"

    hashcode = fields.StringField(db_field='hc', required=True)
    page_type = fields.StringField(db_field='pe', required=True)
    account_id = fields.ObjectIdField(db_field='aid', required=True)
    value = fields.StringField(db_field='ve', required=True)
    created_at = fields.DateTimeField(db_field='ct', required=True)

    def is_up_to_date(self):
        delta = datetime.now() - self.created_at
        return delta.total_seconds() < get_var(
            'MONGO_CACHE_EXPIRATION')  # 30 mins

    @classmethod
    def upsert_cache_record(cls, hashcode, data, page_type, account_id):
        now = datetime.now()
        if 'time_stats' in data:
            del data['time_stats']
        if 'pipelines' in data:
            del data['pipelines']
        cache_records = FacetCache.objects(hashcode=hashcode,
                                           account_id=account_id,
                                           page_type=page_type)
        cache_records_num = cache_records.count()
        if cache_records_num >= 2:
            raise Exception('Too many cache records')
        elif cache_records_num == 1:
            cache = cache_records[0]
            cache.value = json.dumps(data)
            cache.created_at = datetime.now()
            cache.save()
        else:
            cache = FacetCache(hashcode=hashcode,
                               value=json.dumps(data),
                               account_id=account_id,
                               page_type=page_type,
                               created_at=now)
            cache.save()

    @classmethod
    def get_cache(cls, params, account_id, page_type):
        hash_arg = params.copy()
        # import ipdb; ipdb.set_trace()
        for field in ['force_recompute', 'range_alias']:
            if field in hash_arg:
                del hash_arg[field]
        hashcode = md5(str(hash_arg)).hexdigest()
        cache_candidates = FacetCache.objects(hashcode=hashcode,
                                              account_id=account_id,
                                              page_type=page_type)
        if not cache_candidates:
            return hashcode, None
        elif 1 == len(cache_candidates):
            return hashcode, cache_candidates[0]
        else:
            for cache in cache_candidates:
                cache.remove()
            return hashcode, None
Exemplo n.º 3
0
class NPSProfile(UserProfile):

    first_name = fields.StringField(db_field='fe')
    last_name = fields.StringField(db_field='le')

    phone = fields.StringField(db_field='pe')
    company_name = fields.StringField(db_field='cm')
    industry = fields.StringField(db_field='iy')
    department = fields.StringField(db_field='de')
    region = fields.StringField(db_field='rn')
    country = fields.StringField(db_field='cy')
    genesys_account = fields.StringField(db_field='gt')
    nps_user_id = fields.StringField(db_field='nuid')

    manager = NPSProfileManager

    def to_dict(self):
        return {
            'id': str(self.id),
            'user_name': '%s %s' % (self.first_name, self.last_name),
            'screen_name': '%s %s' % (self.first_name, self.last_name),
            'user_id': None,
            'location': self.country,
            'profile_url': None,
            'profile_image_url': '',
            # 'actor_counter': self.customer_profile.actor_counter,
            'klout_score': None
        }
Exemplo n.º 4
0
class FacebookRateLimitInfo(Document):
    access_token = fields.StringField()
    failed_request_time = fields.DateTimeField()
    error_code = fields.NumField(null=True, choices=FB_RATE_LIMIT_ERRORS + [None])
    path = fields.StringField()
    wait_until = fields.DateTimeField()
    channel = fields.StringField()
    log_item = fields.ObjectIdField()

    indexes = [('token', 'error_code')]
    manager = FacebookRateLimitInfoManager
    LIMITS_CONFIG = {
        THROTTLING_USER: BackOffStrategy(30*60, 30*60, 1.0),
        THROTTLING_APP: BackOffStrategy(225, 60*60, 2.0),
        ERROR_MISUSE: BackOffStrategy(60 * 60, 24 * 60 * 60, 3.0),
        THROTTLING_API_PATH: BackOffStrategy(60, 60*60, 2.0)
    }

    @property
    def wait_time(self):
        return (utc(self.wait_until) - utc(self.failed_request_time)).total_seconds()

    @property
    def remaining_time(self):
        return (utc(self.wait_until) - now()).total_seconds()

    @property
    def exc(self):
        return FacebookRateLimitError(
            code=self.error_code,
            remaining_time=self.remaining_time,
            path=self.path)
Exemplo n.º 5
0
class TextChannelFilterItem(Document):
    content = fields.StringField(db_field='ct')
    channel_filter = fields.ReferenceField(AuthTextClassifier, db_field='cr')
    vector = fields.DictField(db_field='vr')
    filter_type = fields.StringField(choices=['rejected', 'accepted'],
                                     default='rejected',
                                     db_field='fe')
Exemplo n.º 6
0
class BaseEventType(ArchivingAuthDocument):  # or still Document?

    collection = 'EventType'
    manager = BaseEventTypeManager
    allow_inheritance = True

    SEP = ' -> '

    platform = fields.StringField(required=True)
    # TODO: check uniqueness
    name = fields.StringField(required=True)  # unique=True
    account_id = fields.ObjectIdField(required=True)

    @property
    def display_name(self):
        return self.SEP.join((self.platform, self.name))

    @staticmethod
    def parse_display_name(display_name):
        platform, name = display_name.split(BaseEventType.SEP)
        return platform, name

    def to_dict(self, fields2show=None):
        data = super(BaseEventType, self).to_dict(fields2show)
        data['display_name'] = self.display_name
        return data
Exemplo n.º 7
0
class TaskMessage(Document):
    '''
    Internal Structure representing the integartion
    data structure with a data stream provider.
    '''
    _created = fields.DateTimeField(db_field='ca', default=datetime.now())
    content = fields.StringField(db_field='ct', required=True)
    type = fields.StringField(db_field='tp', required=True)
    user = fields.ListField(fields.ReferenceField(User))

    manager = TaskMessageManager

    def add_item(self):
        ''' Increment counters'''
        self._update_item(1)

    def remove_item(self):
        ''' Decrement counters or remove if empty '''
        if self.entry_count >= 2:
            self._update_item(-1)
        else:
            self.delete()

    def set_datasift_hash(self, datasift_hash):
        " set atomically datasift hash and update last_sync "

        return self.objects.coll.find_and_modify(
            query={'_id': self.id},
            update={
                '$set': {
                    self.fields['datasift_hash'].db_field: datasift_hash,
                    self.fields['last_sync'].db_field: datetime.now()
                }
            },
            new=True)
Exemplo n.º 8
0
class NPSOutcome(Post):

    manager = NPSOutcomeManager
    PROFILE_CLASS = NPSProfile

    case_number = fields.StringField(db_field='cr', required=True)
    response_type = fields.StringField(db_field='rp', required=True)
    score = fields.NumField(db_field='se', required=True)
    profile_data = fields.DictField(db_field='pd')

    indexes = [('response_type', ), ('_created', )]

    @property
    def computed_tags(self):
        return list(
            set(self._computed_tags +
                [str(smt.id)
                 for smt in self.accepted_smart_tags] + self.assigned_tags))

    @classmethod
    def gen_id(cls,
               is_inbound,
               actor_id,
               _created,
               in_reply_to_native_id,
               parent_event=None):
        actor_num = cls.get_actor(True, actor_id).actor_num
        packed = pack_event_id(actor_num, _created)
        return packed

    def to_dict(self, fields2show=None):
        base_dict = super(NPSOutcome, self).to_dict(fields2show=fields2show)
        base_dict.pop('profile_data')
        return base_dict
Exemplo n.º 9
0
class FacebookHistoricalSubscription(BaseHistoricalSubscription):

    finished = fields.ListField(fields.StringField(
    ))  # Id's of objects (pages, events, groups) which already
    # handled
    actionable = fields.ListField(
        fields.StringField())  # id's of objects, who still need to be handled

    @property
    def _handler(self):
        return fb_process_subscription

    def get_progress(self):

        total_count = len(self.finished) + len(self.actionable)
        return round(len(self.finished) /
                     float(total_count), 2) if total_count > 0 else 0

    def get_history_targets(self):

        channel = self.channel
        targets = [channel.facebook_handle_id]

        for page in channel.facebook_page_ids:
            targets.append(page)

        for event in channel.tracked_fb_event_ids:
            targets.append(event)

        for group in channel.tracked_fb_group_ids:
            targets.append(group)

        return targets
Exemplo n.º 10
0
class FAQDocumentInfo(ArchivingAuthDocument):
    collection = 'FAQDocInfo'

    channel = fields.ReferenceField('Channel', db_field='ch')
    _answer_df = fields.StringField()
    _query_df = fields.StringField()
    _stemmer = fields.StringField()
    query_count = fields.NumField()

    indexes = [('channel'), ]
    # indexes = [('channel'), (('_query_df', TEXT), None, 'english')]

    def __get_answer_df(self):
        return json.loads(self._answer_df)

    def __set_answer_df(self, answer_df):
        self._answer_df = json.dumps(answer_df)

    answer_df = property(__get_answer_df, __set_answer_df)

    def __get_query_df(self):
        return json.loads(self._query_df)

    def __set_query_df(self, query_df):
        self._query_df = json.dumps(query_df)

    query_df = property(__get_query_df, __set_query_df)

    def __get_stemmer(self):
        return json.loads(self._stemmer)

    def __set_stemmer(self, stemmer):
        self._stemmer = json.dumps(stemmer)

    stemmer = property(__get_stemmer, __set_stemmer)
Exemplo n.º 11
0
class ContactLabel(AuthDocument):

    admin_roles = [ADMIN, STAFF, ANALYST]

    manager = ContactLabelManager

    title = fields.StringField(db_field='te')
    created = fields.DateTimeField(db_field='cd', default=datetime.utcnow())
    platform = fields.StringField(db_field='pm')
    status = fields.StringField(db_field='st')
    users = fields.ListField(fields.StringField())

    allow_inheritance = True

    @classmethod
    def class_based_access(cls, account):
        """ Based on the AUTH class we are creating, we might offer some default access
        to certain groups from the account. By default, permissions should only be given to
        admin type users. This can be overwritten in specific classes as needed. E.G. messages -> agents ?
        """
        if account is None:
            return []
        return [
            default_admin_group(account),
            default_analyst_group(account),
            default_reviewer_group(account)
        ]

    @property
    def type_id(self):
        return 0
Exemplo n.º 12
0
class WebProfile(UserProfile):
    sessions = fields.ListField(fields.StringField())
    user_id = fields.StringField()
    browser_cookies = fields.ListField(fields.StringField())
    browser_signatures = fields.ListField(fields.StringField())

    manager = WebProfileManager
Exemplo n.º 13
0
class FooBar(Document):
    name = fields.StringField(db_field='nm')
    status = fields.StringField(db_field='stts', choices=['active', 'deactivated', 'suspended'])
    counter = fields.NumField(db_field='cntr')
    created_at = fields.DateTimeField(db_field='crtd')
    updated_at = fields.DateTimeField(db_field='updtd')
    active = fields.BooleanField(db_field='actv')
    stages = fields.ListField(fields.StringField(), db_field='stgs')
Exemplo n.º 14
0
class WidgetModel(Document):
    """
    A WidgetModel is a abstract widget that can be instantiated to ConcreteWidget
    and used in corresponding typed dashboard
    """
    title = fields.StringField(required=True, unique=True)
    description = fields.StringField()
    settings = fields.DictField()
    created = fields.DateTimeField(default=datetime.now)
Exemplo n.º 15
0
class EventTag(ABCPredictor):

    indexes = [('account_id', 'is_multi', ), ]

    display_name = fields.StringField()
    account_id = fields.ObjectIdField()
    status = fields.StringField(default="Active")
    description = fields.StringField()
    created = fields.DateTimeField()
    channels = fields.ListField(fields.ObjectIdField())

    manager = EventTagManager

    default_threshold = 0.49

    @property
    def inclusion_threshold(self):
        return self.default_threshold

    def save(self):
        self.packed_clf = self.clf.packed_model
        super(EventTag, self).save()

    def match(self, event):
        assert isinstance(event, Event), "EventTag expects Event objects"
        if self.score(event) > self.inclusion_threshold:
            return True
        return False

    def score(self, event):
        assert isinstance(event, Event), "EventTag expects Event objects"
        return super(EventTag, self).score(event)

    def accept(self, event):
        assert isinstance(event, Event), "EventTag expects Event objects"
        return super(EventTag, self).accept(event)

    def reject(self, event):
        assert isinstance(event, Event), "EventTag expects Event objects"
        return super(EventTag, self).reject(event)

    def check_preconditions(self, event):
        if self.precondition:
            return eval(self.precondition)
        return self.feature_extractor.check_preconditions(event, self.features_metadata)

    def rule_based_match(self, event):
        if self.acceptance_rule:
            return eval(self.acceptance_rule)
        return False

    def to_dict(self, fields_to_show=None):
        result_dict = super(EventTag, self).to_dict()
        result_dict.pop('counter')
        result_dict.pop('packed_clf')
        result_dict['channels'] = [str(c) for c in result_dict['channels']]
        return result_dict
Exemplo n.º 16
0
class FollowerTrackingStatus(Document):
    channel = fields.ObjectIdField(db_field='cl')
    twitter_handle = fields.StringField(db_field='th')
    followers_count = fields.NumField(default=0, db_field='fc')
    followers_synced = fields.NumField(default=0, db_field='fs')
    sync_status = fields.StringField(default='idle',
                                     db_field='sy',
                                     choices=('idle', 'sync'))

    indexes = [Index(('channel', 'twitter_handle'), unique=True)]
Exemplo n.º 17
0
class PostFilterEntryPassive(Document):

    entry = fields.StringField(db_field='kd')

    channels = fields.ListField(fields.ReferenceField('Channel'),
                                db_field='cs')

    twitter_handles = fields.ListField(fields.StringField(), db_field='th')

    indexes = [Index(('entry'), unique=True)]
Exemplo n.º 18
0
class MultilanguageChannelMixin(object):

    langs = fields.ListField(fields.StringField(), default=[LangCode.EN])
    post_langs = fields.ListField(fields.StringField(), default=[])

    def add_post_lang(self, post):
        code = post.language
        if code not in self.post_langs:
            self.post_langs.append(code)
            self.save()
Exemplo n.º 19
0
class EventLog(AuthDocument):
    "Store informmation about variouse events in db"
    type_id = fields.NumField(required=True, db_field='ti')
    name = fields.NameField(required=True, db_field='ne')
    timestamp = fields.NumField(default=time.time)
    ip_address = fields.StringField(db_field='ia', default=get_remote_ip)
    user = fields.StringField(default='anonymous', db_field='ur')
    account = fields.StringField(default='solariat', db_field='at')
    note = fields.StringField(db_field='nte')
    extra_info = fields.DictField(db_field='ei')
Exemplo n.º 20
0
class FacebookUserMixin(object):
    _cached_facebook_me = fields.StringField(db_field='fb_me')
    _cached_facebook_me_ts = fields.DateTimeField(db_field='fb_me_ts')

    # Cache channel description on GSA side and update once per day
    _cached_channel_description = fields.StringField()
    _cached_last_description_update = fields.DateTimeField(default=datetime.now())

    def _set_channel_description(self, channel_info):
        self._cached_channel_description = json.dumps(channel_info)
        self._cached_last_description_update = datetime.now()
        self.save()

    def _get_channel_description(self):
        if self._cached_channel_description:
            if self._cached_last_description_update + timedelta(days=1) < datetime.now():
                # After 1 day just consider this to be too old and basically invalid
                self._cached_channel_description = ""
                self.save()
                return None

            # We're still in 'safe' 1 day range, return cached value
            try:
                return json.loads(self._cached_channel_description)
            except Exception:
                return None
        return None

    channel_description = property(_get_channel_description, _set_channel_description)

    def facebook_me(self, force=False):
        default_timeout = 60 * 60  # 1 hour

        def _graph_me():
            graph = get_facebook_api(self)
            return graph.request('/me')

        def _invalidate(timeout=default_timeout,
                        value_attr='_cached_facebook_me',
                        ts_attr='_cached_facebook_me_ts',
                        value_getter=_graph_me):
            date_now = now()

            if not getattr(self, ts_attr) or (date_now - utc(getattr(self, ts_attr))).total_seconds() > timeout:
                self.update(**{ts_attr: date_now,
                               value_attr: json.dumps(value_getter())})

            return json.loads(getattr(self, value_attr))

        timeout = 0 if force is True else default_timeout
        return _invalidate(timeout)

    def set_facebook_me(self, fb_user):
        self.update(_cached_facebook_me_ts=now(),
                    _cached_facebook_me=json.dumps(fb_user))
Exemplo n.º 21
0
class Action(AuthDocument):

    name = fields.StringField()
    tags = fields.ListField(fields.ObjectIdField())
    channels = fields.ListField(fields.ObjectIdField())
    account_id = fields.ObjectIdField()
    type = fields.StringField()

    def to_dict(self, fields_to_show=None):
        return dict(id=str(self.id),
                    account_id=str(self.account_id),
                    name=str(self.name))
Exemplo n.º 22
0
class ABCPredictor(AuthDocument, ClassifierMixin):

    allow_inheritance = True
    collection = 'ABCPredictor'
    precondition = fields.StringField(
    )  # Hold the precondition as string in a grammar
    acceptance_rule = fields.StringField()  # Hold any acceptance rule
    is_dirty = fields.BooleanField()
    features_metadata = fields.DictField(
    )  # Any hints the classifier can use will be stored as a JSON here

    feature_extractor = BaseFeatureExtractor()

    def get_features(self, object):
        return self.feature_extractor.construct_feature_space(
            object, self.features_metadata)

    def save(self):
        self.packed_clf = self.clf.packed_model
        super(ABCPredictor, self).save()

    def match(self, object):
        if self.score(object) > self.inclusion_threshold:
            return True
        return False

    def score(self, object):
        if not self.check_preconditions(object):
            return 0
        if self.rule_based_match(object):
            return 1
        return self.clf.score(self.get_features(object))

    def accept(self, object):
        features = self.get_features(object)
        self.clf.train([features], [1])
        self.is_dirty = True
        self.save()

    def reject(self, object):
        self.clf.train([self.get_features(object)], [0])
        self.is_dirty = True
        self.save()

    @abstractmethod
    def check_preconditions(self, object):
        return self.feature_extractor.check_preconditions(
            object, self.features_metadata)

    @abstractmethod
    def rule_based_match(self, object):
        pass
Exemplo n.º 23
0
class QueueMessage(Document):

    manager = QueueMessageManager

    channel_id = fields.ListField(fields.StringField())
    created_at = fields.DateTimeField()
    reserved_until = fields.DateTimeField()
    post_data = fields.DictField()
    batch_token = fields.StringField()

    indexes = [
        ('channel_id', 'reserved_until'),
        ('batch_token', ),
    ]
Exemplo n.º 24
0
class Funnel(AuthDocument):
    """
    """
    name = fields.StringField(required=True, unique=True)
    description = fields.StringField()
    journey_type = fields.ObjectIdField()
    steps = fields.ListField(fields.ObjectIdField(), required=True)
    owner = fields.ReferenceField(User)
    created = fields.DateTimeField(default=datetime.now)

    def to_dict(self, fields_to_show=None):
        rv = super(Funnel, self).to_dict()
        rv['steps'] = map(str, self.steps)
        return rv
Exemplo n.º 25
0
class PostState(Document):
    INITIALIZED = False

    STATES = ARRIVED_IN_BOT, ARRIVED_IN_RECOVERY, ADDED_TO_WORKER_QUEUE, \
        REMOVED_FROM_WORKER_QUEUE, DELIVERED_TO_TANGO, DELIVERED_TO_GSE_QUEUE, \
        FETCHED_FROM_GSE_QUEUE, CONFIRMED_FROM_GSE_QUEUE = \
        'ARRIVED_IN_BOT', 'ARRIVED_IN_RECOVERY', 'ADDED_TO_WORKER_QUEUE', \
        'REMOVED_FROM_WORKER_QUEUE', 'DELIVERED_TO_TANGO', 'DELIVERED_TO_GSE_QUEUE', \
        'FETCHED_FROM_GSE_QUEUE', 'CONFIRMED_FROM_GSE_QUEUE'

    channel_id = fields.ObjectIdField()
    post_id = fields.StringField()
    state = fields.StringField(choices=STATES)

    indexes = [('post_id', ), ('channel_id', )]
Exemplo n.º 26
0
class BaseProfile(AuthDocument):
    manager = ProfileManager

    allow_inheritance = True
    collection = "BaseProfiles"

    account_id = fields.ObjectIdField()
    first_name = fields.StringField()
    last_name = fields.StringField()
    age = fields.NumField()
    sex = fields.StringField()
    location = fields.StringField()
    seniority = fields.StringField()
    assigned_labels = fields.ListField(fields.ObjectIdField())
    date_of_birth = fields.StringField()
    attached_data = fields.DictField()
    products = fields.ListField(fields.StringField())
    actor_num = AutoIncrementField(counter_name='ActorCounter', db_field='ar')
    created_at = fields.DateTimeField(default=now)

    linked_profile_ids = fields.ListField(fields.StringField())

    indexes = ['actor_num', 'linked_profile_ids']

    @property
    def linked_profiles(self):
        from solariat_bottle.db.user_profiles.user_profile import UserProfile
        return UserProfile.objects(id__in=self.linked_profile_ids)[:]

    def get_profile_of_type(self, typename):
        if not isinstance(typename, basestring):
            typename = typename.__name__

        for profile in self.linked_profiles:
            if profile.__class__.__name__ == typename:
                return profile

    def add_profile(self, profile):
        new_id = str(profile.id)
        if new_id not in self.linked_profile_ids:
            self.linked_profile_ids.append(new_id)
        self.update(addToSet__linked_profile_ids=new_id)

    def get_age(self):
        # Best guess we can make is by date of birth if present and properly formatted
        if self.date_of_birth:
            try:
                dob = datetime.strptime(self.date_of_birth, AGE_FORMAT)
                return relativedelta(datetime.now(), dob).years
            except Exception, ex:
                LOGGER.error(ex)
        # Next, if actual age is present, use that but also store updated dob
        if self.age:
            dob = datetime.now() - relativedelta(years=self.age)
            self.date_of_birth = dob.strftime(AGE_FORMAT)
            self.save()
            return self.age
        return None
Exemplo n.º 27
0
class QueuedHistoricData(Document):
    DATASIFT_DEFAULT = 0
    TWITTER_API_DM = 1
    SOLARIAT_POST_DATA = 2
    TWITTER_API_PUBLIC = 3

    DATA_FORMATS = (DATASIFT_DEFAULT, TWITTER_API_DM, SOLARIAT_POST_DATA)

    subscription = fields.ReferenceField(BaseHistoricalSubscription,
                                         db_field='sub')
    timestamp = fields.NumField(db_field='tsp')
    post_data = fields.StringField(db_field='pd')
    post_data_format = fields.NumField(choices=DATA_FORMATS,
                                       default=DATASIFT_DEFAULT,
                                       db_field='fmt')

    indexes = [('subscription', 'timestamp')]

    @property
    def solariat_post_data(self):
        data = json.loads(self.post_data)
        transform = {
            self.SOLARIAT_POST_DATA: lambda x: x,
            self.DATASIFT_DEFAULT: datasift_to_post_dict,
            self.TWITTER_API_DM: twitter_dm_to_post_dict,
            self.TWITTER_API_PUBLIC: twitter_status_to_post_dict
        }[self.post_data_format]
        try:
            data = transform(data)
        except KeyError:
            data['_transform_error'] = True
        return data
Exemplo n.º 28
0
class DynamicImportedProfile(AuthDocument):

    id = fields.CustomIdField()
    actor_num = AutoIncrementField(counter_name='ActorCounter', db_field='ar')
    linked_profile_ids = fields.ListField(fields.StringField())
    account_id = fields.ObjectIdField()

    @property
    def linked_profiles(self):
        from solariat_bottle.db.user_profiles.user_profile import UserProfile
        return UserProfile.objects(id__in=self.linked_profile_ids)[:]

    def get_profile_of_type(self, typename):
        if not isinstance(typename, basestring):
            typename = typename.__name__

        for profile in self.linked_profiles:
            if profile.__class__.__name__ == typename:
                return profile

    def add_profile(self, platform_profile):
        self.linked_profile_ids.append(str(platform_profile.id))
        self.save()

    def has_linked_profile(self, platform_profile):
        return str(platform_profile.id) in self.linked_profile_ids

    def to_dict(self, **kw):
        base_dict = super(DynamicImportedProfile, self).to_dict(**kw)
        for key, val in base_dict.iteritems():
            if len(str(val)) > 100:
                base_dict[key] = FIELD_TOO_LONG
        return base_dict
Exemplo n.º 29
0
class StaticEventType(BaseEventType):

    attributes = fields.ListField(fields.StringField())

    is_static = True

    EVENT_TYPES = {
        'Facebook': ['Comment'],
        'Twitter': ['Tweet'],
        'Chat': ['Message'],
        'Voice': ['Call'],
        'Email': ['Message'],
        'Web': ['Click'],
        'FAQ': ['Search'],
        'Branch': ['Visit'],
        'VOC': ['Score'],
    }

    @staticmethod
    def generate_static_event_types(user, event_types=EVENT_TYPES):
        types = []
        for platform, names in event_types.iteritems():
            for name in names:
                types.append(StaticEventType.objects.create_by_user(
                    user,
                    account_id=user.account.id,
                    platform=platform,
                    name=name,
                    attributes=['stage_metadata']
                ))
        return types
Exemplo n.º 30
0
class PredictorModelData(SonDocument):
    """Embedded model information to be used in Predictor
    """
    model_id = fields.ObjectIdField()  # reference to PredictorModel

    # denormalized from PredictorModel
    display_name = fields.StringField()
    weight = fields.NumField()
    task_data = fields.EmbeddedDocumentField(TaskData)

    @staticmethod
    def _get_model_data(model):
        return dict(model_id=model.id,
                    display_name=model.display_name,
                    weight=model.weight,
                    task_data=model.task_data)

    @classmethod
    def init_with_model(cls, model):
        return cls(**cls._get_model_data(model))

    def sync_with_model_instance(self, model):
        self.__dict__.update(self._get_model_data(model))

    def __eq__(self, other):
        return isinstance(other, self.__class__) and other.model_id == self.model_id

    def __hash__(self):
        return hash(str(self.model_id))