class WebProfile(UserProfile): sessions = fields.ListField(fields.StringField()) user_id = fields.StringField() browser_cookies = fields.ListField(fields.StringField()) browser_signatures = fields.ListField(fields.StringField()) manager = WebProfileManager
class FacebookHistoricalSubscription(BaseHistoricalSubscription): finished = fields.ListField(fields.StringField( )) # Id's of objects (pages, events, groups) which already # handled actionable = fields.ListField( fields.StringField()) # id's of objects, who still need to be handled @property def _handler(self): return fb_process_subscription def get_progress(self): total_count = len(self.finished) + len(self.actionable) return round(len(self.finished) / float(total_count), 2) if total_count > 0 else 0 def get_history_targets(self): channel = self.channel targets = [channel.facebook_handle_id] for page in channel.facebook_page_ids: targets.append(page) for event in channel.tracked_fb_event_ids: targets.append(event) for group in channel.tracked_fb_group_ids: targets.append(group) return targets
class BaseProfile(AuthDocument): manager = ProfileManager allow_inheritance = True collection = "BaseProfiles" account_id = fields.ObjectIdField() first_name = fields.StringField() last_name = fields.StringField() age = fields.NumField() sex = fields.StringField() location = fields.StringField() seniority = fields.StringField() assigned_labels = fields.ListField(fields.ObjectIdField()) date_of_birth = fields.StringField() attached_data = fields.DictField() products = fields.ListField(fields.StringField()) actor_num = AutoIncrementField(counter_name='ActorCounter', db_field='ar') created_at = fields.DateTimeField(default=now) linked_profile_ids = fields.ListField(fields.StringField()) indexes = ['actor_num', 'linked_profile_ids'] @property def linked_profiles(self): from solariat_bottle.db.user_profiles.user_profile import UserProfile return UserProfile.objects(id__in=self.linked_profile_ids)[:] def get_profile_of_type(self, typename): if not isinstance(typename, basestring): typename = typename.__name__ for profile in self.linked_profiles: if profile.__class__.__name__ == typename: return profile def add_profile(self, profile): new_id = str(profile.id) if new_id not in self.linked_profile_ids: self.linked_profile_ids.append(new_id) self.update(addToSet__linked_profile_ids=new_id) def get_age(self): # Best guess we can make is by date of birth if present and properly formatted if self.date_of_birth: try: dob = datetime.strptime(self.date_of_birth, AGE_FORMAT) return relativedelta(datetime.now(), dob).years except Exception, ex: LOGGER.error(ex) # Next, if actual age is present, use that but also store updated dob if self.age: dob = datetime.now() - relativedelta(years=self.age) self.date_of_birth = dob.strftime(AGE_FORMAT) self.save() return self.age return None
class EventSequenceStatsMixin(object): account_id = fields.ObjectIdField(db_field='aid') channels = fields.ListField(fields.ObjectIdField(), db_field='chs') stage_sequence_names = fields.ListField(fields.StringField(), db_field='sseqnm') status = fields.NumField(db_field='ss', choices=JourneyStageType.STATUSES, default=JourneyStageType.IN_PROGRESS) smart_tags = fields.ListField(fields.ObjectIdField(), db_field='sts') journey_tags = fields.ListField(fields.ObjectIdField(), db_field='jts') journey_type_id = fields.ObjectIdField(db_field='jt') journey_attributes = fields.DictField(db_field='jyas') def __get_journey_type(self): if hasattr(self, '_f_journey_type'): return self._f_journey_type else: self._f_journey_type = JourneyType.objects.get( self.journey_type_id) return self._f_journey_type def __set_journey_type(self, journey_type): self._f_journey_type = journey_type journey_type = property(__get_journey_type, __set_journey_type) @classmethod def translate_static_key_name(cls, key_name): # translate any static key, leave anything else the same if key_name == cls.status.db_field: return 'status' return key_name @classmethod def translate_static_key_value(cls, key_name, key_value): # translate any static key, leave anything else the same if key_name == cls.status.db_field: return JourneyStageType.STATUS_TEXT_MAP[key_value] return key_value @property def full_journey_attributes(self): # Dynamic defined plus any static defined attributes worth considering in facets or analysis from copy import deepcopy base_attributes = deepcopy(self.journey_attributes) base_attributes['status'] = self.status return base_attributes @property def account(self): # TODO Check this for performance. Should cache. return Account.objects.get(self.account_id) event_id = EventIdField().to_mongo(event_id) event_id = EventIdField().to_mongo(event_id)
class PostFilterEntryPassive(Document): entry = fields.StringField(db_field='kd') channels = fields.ListField(fields.ReferenceField('Channel'), db_field='cs') twitter_handles = fields.ListField(fields.StringField(), db_field='th') indexes = [Index(('entry'), unique=True)]
class MultilanguageChannelMixin(object): langs = fields.ListField(fields.StringField(), default=[LangCode.EN]) post_langs = fields.ListField(fields.StringField(), default=[]) def add_post_lang(self, post): code = post.language if code not in self.post_langs: self.post_langs.append(code) self.save()
class StreamLog(Document): """Created on streamref creation, updated on stream stops""" accounts = fields.ListField(fields.ObjectIdField()) channels = fields.ListField(fields.ObjectIdField()) stream_ref_id = fields.BytesField() started_at = fields.DateTimeField(null=True) stopped_at = fields.DateTimeField(null=True) indexes = [('accounts', ), ('channels', ), ('stream_ref_id', )]
class Action(AuthDocument): name = fields.StringField() tags = fields.ListField(fields.ObjectIdField()) channels = fields.ListField(fields.ObjectIdField()) account_id = fields.ObjectIdField() type = fields.StringField() def to_dict(self, fields_to_show=None): return dict(id=str(self.id), account_id=str(self.account_id), name=str(self.name))
class StaticEventType(BaseEventType): attributes = fields.ListField(fields.StringField()) is_static = True EVENT_TYPES = { 'Facebook': ['Comment'], 'Twitter': ['Tweet'], 'Chat': ['Message'], 'Voice': ['Call'], 'Email': ['Message'], 'Web': ['Click'], 'FAQ': ['Search'], 'Branch': ['Visit'], 'VOC': ['Score'], } @staticmethod def generate_static_event_types(user, event_types=EVENT_TYPES): types = [] for platform, names in event_types.iteritems(): for name in names: types.append(StaticEventType.objects.create_by_user( user, account_id=user.account.id, platform=platform, name=name, attributes=['stage_metadata'] )) return types
class ContactLabel(AuthDocument): admin_roles = [ADMIN, STAFF, ANALYST] manager = ContactLabelManager title = fields.StringField(db_field='te') created = fields.DateTimeField(db_field='cd', default=datetime.utcnow()) platform = fields.StringField(db_field='pm') status = fields.StringField(db_field='st') users = fields.ListField(fields.StringField()) allow_inheritance = True @classmethod def class_based_access(cls, account): """ Based on the AUTH class we are creating, we might offer some default access to certain groups from the account. By default, permissions should only be given to admin type users. This can be overwritten in specific classes as needed. E.G. messages -> agents ? """ if account is None: return [] return [ default_admin_group(account), default_analyst_group(account), default_reviewer_group(account) ] @property def type_id(self): return 0
class DynamicImportedProfile(AuthDocument): id = fields.CustomIdField() actor_num = AutoIncrementField(counter_name='ActorCounter', db_field='ar') linked_profile_ids = fields.ListField(fields.StringField()) account_id = fields.ObjectIdField() @property def linked_profiles(self): from solariat_bottle.db.user_profiles.user_profile import UserProfile return UserProfile.objects(id__in=self.linked_profile_ids)[:] def get_profile_of_type(self, typename): if not isinstance(typename, basestring): typename = typename.__name__ for profile in self.linked_profiles: if profile.__class__.__name__ == typename: return profile def add_profile(self, platform_profile): self.linked_profile_ids.append(str(platform_profile.id)) self.save() def has_linked_profile(self, platform_profile): return str(platform_profile.id) in self.linked_profile_ids def to_dict(self, **kw): base_dict = super(DynamicImportedProfile, self).to_dict(**kw) for key, val in base_dict.iteritems(): if len(str(val)) > 100: base_dict[key] = FIELD_TOO_LONG return base_dict
class TaskMessage(Document): ''' Internal Structure representing the integartion data structure with a data stream provider. ''' _created = fields.DateTimeField(db_field='ca', default=datetime.now()) content = fields.StringField(db_field='ct', required=True) type = fields.StringField(db_field='tp', required=True) user = fields.ListField(fields.ReferenceField(User)) manager = TaskMessageManager def add_item(self): ''' Increment counters''' self._update_item(1) def remove_item(self): ''' Decrement counters or remove if empty ''' if self.entry_count >= 2: self._update_item(-1) else: self.delete() def set_datasift_hash(self, datasift_hash): " set atomically datasift hash and update last_sync " return self.objects.coll.find_and_modify( query={'_id': self.id}, update={ '$set': { self.fields['datasift_hash'].db_field: datasift_hash, self.fields['last_sync'].db_field: datetime.now() } }, new=True)
class StreamRef(Document): QUEUED = 'queued' RUNNING = 'running' ERROR = 'error' STOPPED = 'stopped' STREAM_STATUSES = [QUEUED, RUNNING, ERROR, STOPPED] id = fields.BytesField(db_field='_id', unique=True, required=True) track = fields.ListField(fields.StringField()) follow = fields.ListField(fields.StringField()) # user_ids languages = fields.ListField(fields.StringField(), db_field='lng') status = fields.StringField(choices=STREAM_STATUSES) log = fields.ReferenceField('StreamLog') manager = StreamRefManager indexes = [('status', )] def is_stopped(self): return self.status == self.STOPPED or (self.log and self.log.stopped_at is not None) @property def key(self): if not self.id: footprint = self.filters self.id = mhash(footprint, n=128) return self.id @property def filters(self): return tuple( [freeze(self.track), freeze(self.follow), freeze(self.languages)]) def set_added(self): self.update(status=self.RUNNING) self.log.update(started_at=now()) def set_removed(self): self.update(status=self.STOPPED) self.log.update(stopped_at=now()) def save(self, **kw): self.id = self.key # fill hash id super(StreamRef, self).save(**kw)
class LocalStoreChannelFilter(ChannelFilter): _accepted_items = fields.ListField(fields.DictField(), db_field='as') _rejected_items = fields.ListField(fields.DictField(), db_field='rs') @property def accepted_items(self): return self._accepted_items @property def rejected_items(self): return self._rejected_items def reset(self): ''' Remove all related items ''' self._accepted_items = [] self._rejected_items = [] self.save()
class FooBar(Document): name = fields.StringField(db_field='nm') status = fields.StringField(db_field='stts', choices=['active', 'deactivated', 'suspended']) counter = fields.NumField(db_field='cntr') created_at = fields.DateTimeField(db_field='crtd') updated_at = fields.DateTimeField(db_field='updtd') active = fields.BooleanField(db_field='actv') stages = fields.ListField(fields.StringField(), db_field='stgs')
class ABCMultiClassPredictor(AuthDocument): collection = 'ABCMultiPreditor' abc_predictors = fields.ListField( fields.ObjectIdField()) # Just a grouping of binary predictors inclusion_threshold = fields.NumField(default=0.25) is_dirty = fields.BooleanField() __classes = None @property def classes(self): if not self.__classes: options = [ ABCPredictor.objects.get(o_id) for o_id in self.abc_predictors ] self.__classes = options return self.__classes def to_dict(self, fields_to_show=None): base_dict = super(ABCMultiClassPredictor, self).to_dict(fields_to_show=fields_to_show) base_dict['classes'] = [seg.to_dict() for seg in self.classes] return base_dict def score(self, customer_profile): scores = [] for option in self.classes: scores.append( (option.display_name, option.score(customer_profile))) return scores def match(self, customer_profile): max_score = 0 best_option = None for option in self.classes: option_score = option.score(customer_profile) if option_score > max_score: best_option = option max_score = option_score if max_score > self.inclusion_threshold: return True, best_option return False, None def accept(self, customer_profile, accepted_option): for option in self.classes: if option.id == accepted_option.id: option.accept(customer_profile) else: option.reject(customer_profile) self.is_dirty = True self.save() def reject(self, customer_profile, rejected_option): rejected_option.reject(customer_profile) self.is_dirty = True self.save()
class EventTag(ABCPredictor): indexes = [('account_id', 'is_multi', ), ] display_name = fields.StringField() account_id = fields.ObjectIdField() status = fields.StringField(default="Active") description = fields.StringField() created = fields.DateTimeField() channels = fields.ListField(fields.ObjectIdField()) manager = EventTagManager default_threshold = 0.49 @property def inclusion_threshold(self): return self.default_threshold def save(self): self.packed_clf = self.clf.packed_model super(EventTag, self).save() def match(self, event): assert isinstance(event, Event), "EventTag expects Event objects" if self.score(event) > self.inclusion_threshold: return True return False def score(self, event): assert isinstance(event, Event), "EventTag expects Event objects" return super(EventTag, self).score(event) def accept(self, event): assert isinstance(event, Event), "EventTag expects Event objects" return super(EventTag, self).accept(event) def reject(self, event): assert isinstance(event, Event), "EventTag expects Event objects" return super(EventTag, self).reject(event) def check_preconditions(self, event): if self.precondition: return eval(self.precondition) return self.feature_extractor.check_preconditions(event, self.features_metadata) def rule_based_match(self, event): if self.acceptance_rule: return eval(self.acceptance_rule) return False def to_dict(self, fields_to_show=None): result_dict = super(EventTag, self).to_dict() result_dict.pop('counter') result_dict.pop('packed_clf') result_dict['channels'] = [str(c) for c in result_dict['channels']] return result_dict
class AuthDocument(Document, AuthMixin): acl = fields.ListField(fields.StringField(), db_field='acl') manager = AuthManager def to_dict(self, fields_to_show=None): # Switch groups from object ids to string ids so we can json objects d = super(AuthDocument, self).to_dict(fields_to_show) d['groups'] = [str(group) for group in self.acl] return d
class QueueMessage(Document): manager = QueueMessageManager channel_id = fields.ListField(fields.StringField()) created_at = fields.DateTimeField() reserved_until = fields.DateTimeField() post_data = fields.DictField() batch_token = fields.StringField() indexes = [ ('channel_id', 'reserved_until'), ('batch_token', ), ]
class Funnel(AuthDocument): """ """ name = fields.StringField(required=True, unique=True) description = fields.StringField() journey_type = fields.ObjectIdField() steps = fields.ListField(fields.ObjectIdField(), required=True) owner = fields.ReferenceField(User) created = fields.DateTimeField(default=datetime.now) def to_dict(self, fields_to_show=None): rv = super(Funnel, self).to_dict() rv['steps'] = map(str, self.steps) return rv
class PostFilterEntry(Document): entry = fields.StringField(db_field='kd') filter_type_id = fields.NumField(db_field='ee', choices=FILTER_TYPE_IDS) post_filter = fields.ReferenceField(PostFilter, db_field='pr') twitter_handles = fields.ListField(fields.StringField(), db_field='th') channels = fields.ListField(fields.ReferenceField('Channel'), db_field='cs') lang = fields.StringField(default=LangCode.EN) indexes = [('entry', 'channels', 'lang'), Index(('filter_type_id', 'entry', 'lang'), unique=True), ('channels', )] @property def filter_type(self): if self.filter_type_id is not None: return FILTER_TYPES[int(self.filter_type_id)]
class Gallery(Document): """ A gallery is dashboard_type specific, will contain collection of predifined widget models """ dashboard_type = fields.ReferenceField(DashboardType, required=True) widget_models = fields.ListField(fields.ReferenceField(WidgetModel)) created = fields.DateTimeField(default=datetime.now) def to_dict(self): rv = super(Gallery, self).to_dict() rv['display_name'] = self.dashboard_type.display_name rv['type'] = self.dashboard_type.type return rv
class PredictorModel(Document): collection = 'PredictorModel' allow_inheritance = True version = fields.NumField() predictor = fields.ReferenceField('BasePredictor') parent = fields.ObjectIdField() weight = fields.NumField() display_name = fields.StringField() description = fields.StringField() # is_active = fields.BooleanField(default=False) task_data = fields.EmbeddedDocumentField(TaskData) last_run = fields.DateTimeField() context_features = fields.ListField(fields.DictField()) action_features = fields.ListField(fields.DictField()) train_data_percentage = fields.NumField(default=80) n_rows = fields.NumField() min_samples_thresould = fields.NumField(default=1) from_dt = fields.DateTimeField() to_dt = fields.DateTimeField() def score(self, *args, **kwargs): pass def feedback(self, *args, **kwargs): pass def search(self, *args, **kwargs): pass def to_json(self, *args, **kwargs): from solariat_bottle.db.predictors.base_predictor import PredictorConfigurationConversion data = super(PredictorModel, self).to_json(*args, **kwargs) data = PredictorConfigurationConversion.python_to_json(data) return data
class LandingPage(ArchivingAuthDocument): manager = LandingPageManager url = fields.StringField(required=True, unique=True, sparse=True, db_field='ul') display_field = fields.StringField(db_field='df') weighted_fields = fields.ListField( fields.EmbeddedDocumentField(WeightedContentField), db_field='wf') # indexes = [ ('url', ) ] def to_dict(self, fields2show=None): info = ArchivingAuthDocument.to_dict(self, fields2show) if not fields2show or 'weighted_fields' in fields2show: info['weighted_fields'] = [ x.to_dict() for x in self.weighted_fields ] return info
class FAQQueryEvent(Event): query = fields.StringField() speech_acts = fields.ListField(fields.DictField()) manager = FAQEventManager PROFILE_CLASS = WebProfile @classmethod def patch_post_kw(cls, kw): pass @property def platform(self): return 'FAQ' def to_dict(self, fields2show=None): base_dict = super(FAQQueryEvent, self).to_dict() base_dict['actor'] = self.actor.to_dict() return base_dict
class EnterpriseFacebookChannel(FacebookUserMixin, Channel): "channel with facebook specific information for daemon" # user access_token for EnterpriseFacebookChannel facebook_access_token = fields.StringField(db_field = 'fat') facebook_handle_id = fields.StringField(db_field = 'fid') facebook_screen_name = fields.StringField(db_field = 'fsn') user_access_token = fields.StringField(db_field = 'uat') # Keep track of all the page accounts this user has access to facebook_account_ids = fields.ListField(fields.StringField()) # monitored facebook pages facebook_page_ids = fields.ListField(fields.StringField()) tracked_fb_group_ids = fields.ListField(fields.StringField()) tracked_fb_event_ids = fields.ListField(fields.StringField()) review_outbound = fields.BooleanField(default=False, db_field='ro') review_team = fields.ReferenceField(Group, db_field='rg') is_inbound = fields.BooleanField(db_field='in', default=False) @property def is_authenticated(self): return self.facebook_access_token @property def type_id(self): return 2 @property def type_name(self): return "Enterprise Facebook" @property def base_url(self): return "https://facebook.com" @property def platform(self): return "Facebook" @property def is_dispatchable(self): return True @property def is_dispatch_channel(self): return True def get_attached_service_channels(self): candidates = FacebookServiceChannel.objects(account=self.account, _dispatch_channel=self)[:] return candidates def get_service_channel(self, lookup_by_page_ids=True): candidates = self.get_attached_service_channels() if not candidates and lookup_by_page_ids: # Fallback to lookup by token/page ids if self.facebook_access_token: candidates = FacebookServiceChannel.objects.find( account=self.account, facebook_access_token=self.facebook_access_token)[:] if not candidates: candidates = FacebookServiceChannel.objects.find( account=self.account, facebook_page_ids__in=self.facebook_page_ids)[:] if not candidates: return None if len(candidates) == 1: return candidates[0] else: LOGGER.error( "We have multiple candidates for service channel matching for enterprise channel %s" % self) return None if len(candidates) > 1: LOGGER.warn("We have multiple candidates for service channel matching " "for enterprise channel %s" % self) if candidates: return candidates[0] def send_message(self, dry_run, creative, post, user, direct_message=False): """ TODO: for now we always response publicly, will need to extend once we want to integrate private messages based on response type. """ if post.can_comment: post_id = post.native_data['facebook_post_id'] else: if post.parent: # This means we also have picked up the parent for the post. We just need # to issue a reply on that comment instead post_id = post.parent.native_data['facebook_post_id'] else: G = facebook_driver.GraphAPI(self.facebook_access_token, channel=self) comment = G.get_object(post.native_data['facebook_post_id'], fields='object.fields(id)') post_id = comment['parent']['id'] LOGGER.info("Sending '%s' to %s using %s" % (creative, post_id, self)) if post.is_pm: fb_answer_pm.ignore(post, creative) else: fb_comment_by_page.ignore(post, post_id, creative) def get_outbound_channel(self, user): return self
class FacebookServiceChannel(FacebookUserMixin, FacebookChannel, ServiceChannel): # -- pages -- # monitored facebook pages facebook_page_ids = fields.ListField(fields.StringField()) #name should be same as in EnterpriseFacebookChannel facebook_pages = fields.ListField(fields.DictField()) # current pages data all_facebook_pages = fields.ListField(fields.DictField()) # all accessible pages data page_admins = fields.DictField() # {page_id: [list of facebook users json]...} tracked_fb_message_threads_ids = fields.ListField(fields.StringField()) # -- groups -- tracked_fb_group_ids = fields.ListField(fields.StringField()) #name should be same as in EnterpriseFacebookChannel tracked_fb_groups = fields.ListField(fields.DictField()) all_fb_groups = fields.ListField(fields.DictField()) # -- events -- tracked_fb_event_ids = fields.ListField(fields.StringField()) #name should be same as in EnterpriseFacebookChannel tracked_fb_events = fields.ListField(fields.DictField()) all_fb_events = fields.ListField(fields.DictField()) #-- user -- facebook_handle_id = fields.StringField() facebook_access_token = fields.StringField() pull_activity_md = fields.DictField() # there stored info about last data pull operations time fb_pull_mode = fields.NumField(default=PULL_MODE_RARE) last_post_received = fields.StringField() last_pull_success = fields.StringField() @property def InboundChannelClass(self): return InboundFacebookChannel @property def OutboundChannelClass(self): return OutboundFacebookChannel @property def DispatchChannelClass(self): return EnterpriseFacebookChannel def find_direction(self, post): # For now just assume all posts are actionable if posted in one # of the users pages. return 'direct' def set_dispatch_channel(self, value): super(FacebookServiceChannel, self).set_dispatch_channel(value) self.sync_with_account_channel(value) def sync_with_account_channel(self, efc): self.update(_cached_channel_description=None) if efc.facebook_access_token: self.update( facebook_handle_id=efc.facebook_handle_id, facebook_access_token=efc.facebook_access_token, ) self.set_facebook_me(efc.facebook_me()) else: self.update( facebook_page_ids=[], facebook_pages=[], all_facebook_pages=[], tracked_fb_event_ids=[], tracked_fb_events=[], all_fb_events=[] ) def get_access_token(self, user): """ Try to get the access token for this channel. """ # if self.facebook_handle_id: # return self.facebook_access_token # else: try: efc = self.get_outbound_channel(user) if efc: if efc.facebook_access_token: self.sync_with_account_channel(efc) # TODO: do we need sync here? return efc.facebook_access_token error_msg = "Channel %s has no access token. Did you login to facebook from configuration page?" % (efc.title) raise FacebookConfigurationException(error_msg) else: error_msg = 'Please create and configure a channel of type "Facebook : Account" first.' raise FacebookConfigurationException(error_msg) except Exception as ex: LOGGER.error(ex) raise FacebookConfigurationException(ex.message) def get_outbound_channel(self, user): ''' Get the outbound channel based on user access, channel configuration, and as a last resort, channel configurations ''' # The configured channel is only necessary, or correct, if this is no service # channel, or if there is a service channel with multiple candidates if self.dispatch_channel: return self.dispatch_channel configured_user_channel = user.get_outbound_channel(self.platform) configured_account_channel = self.account.get_outbound_channel(self.platform) candidates = EnterpriseFacebookChannel.objects.find_by_user(user, account=self.account, status='Active')[:] # If there are no candidates for the service channel, then do not return anything. if not candidates: return None else: if len(candidates) == 1: return candidates[0] if configured_user_channel in candidates: return configured_user_channel if configured_account_channel in candidates: return configured_account_channel error_msg = "There are multiple Facebook : Account channels on this account: " error_msg += "Channels: (%s), Account: %s. You need to set one in user profile or account settings." % ( [c.title for c in candidates], self.account.name) raise FacebookConfigurationException(error_msg) def track_fb_group(self, group, user): assert isinstance(group, dict) and {'id', 'name'} <= set(group), 'Wrong group object' self.__add_to_tracking(user, group, 'tracked_fb_group_ids', 'tracked_fb_groups') def untrack_fb_group(self, group, user): assert isinstance(group, dict) and {'id', 'name'} <= set(group), 'Wrong group object' self.__remove_from_tracking(user, group, 'tracked_fb_group_ids', 'tracked_fb_groups') def track_fb_event(self, event, user): assert isinstance(event, dict) and {'id', 'name'} <= set(event), 'Wrong event object' self._handle_tracking("add", [], [event['id']]) self.__add_to_tracking(user, event, 'tracked_fb_event_ids', 'tracked_fb_events') def untrack_fb_event(self, event, user): assert isinstance(event, dict) and {'id', 'name'} <= set(event), 'Wrong event object' self._handle_tracking("remove", [], [event['id']]) self.__remove_from_tracking(user, event, 'tracked_fb_event_ids', 'tracked_fb_events') def add_facebook_page(self, page, user): assert isinstance(page, dict) and {'id', 'name'} <= set(page), 'Wrong page object' self._handle_tracking("add", [page['id']]) self.__add_to_tracking(user, page, 'facebook_page_ids', 'facebook_pages') update_page_admins(self, page) def remove_facebook_page(self, page, user): assert isinstance(page, dict) and {'id', 'name'} <= set(page), 'Wrong page object' self._handle_tracking("remove", [page['id']]) self.__remove_from_tracking(user, page, 'facebook_page_ids', 'facebook_pages') def post_received(self, post): """ Adds post to conversations. """ from solariat_bottle.db.post.base import UntrackedPost assert set(post.channels).intersection([self.inbound, self.outbound]) assert not isinstance(post, UntrackedPost), "It should be tracked if we received it." conv = self.upsert_conversation(post, contacts=False) if post.is_pm: self.last_post_received = datetime.utcnow().strftime("%s") self.save() def _handle_tracking(self, action, pages=None, events=None): LOGGER.info(u"Invoked {}[{}]._handle_tracking action={} pages={} events={}".format( self.__class__.__name__, self.id, action, pages, events)) if pages == 'all': pages = self.facebook_page_ids if events == 'all': events = self.tracked_fb_event_ids if pages: FacebookTracking.objects.handle_channel_event(action, self, pages, PAGE) if events: FacebookTracking.objects.handle_channel_event(action, self, events, EVENT) def on_active(self): self.status = 'Active' self.update(set__status='Active') self._handle_tracking("add", "all", "all") subscribe_realtime_updates(self.facebook_pages) self.inbound_channel.on_active() self.outbound_channel.on_active() def on_suspend(self): self.status = 'Suspended' self.update(set__status='Suspended') self._handle_tracking("remove", "all", "all") unsubscribe_realtime_updates(self.facebook_pages) self.inbound_channel.on_suspend() self.outbound_channel.on_suspend() def archive(self): self._handle_tracking("remove", "all", "all") return super(FacebookServiceChannel, self).archive() def list_outbound_channels(self, user): return EnterpriseFacebookChannel.objects.find_by_user(user, account=self.account) def __invalidate_channel_descriptions(self, user): self._cached_channel_description = None # Make sure we no longer consider same cached channel candidates = EnterpriseFacebookChannel.objects.find_by_user(user, account=self.account, status='Active')[:] for candidate in candidates: candidate._cached_channel_description = None candidate.save() def __add_to_tracking(self, user, item, id_fields, tracked_field): if str(item['id']) in getattr(self, id_fields): return getattr(self, tracked_field).append(item) getattr(self, id_fields).append(item['id']) self.__invalidate_channel_descriptions(user) self.save() if self.status == 'Active' and 'access_token' in item and item.get('type') != "event": subscribe_realtime_updates([item]) efc = self.get_outbound_channel(user) if efc: getattr(efc, id_fields).append(str(item['id'])) efc.save() def __remove_from_tracking(self, user, item, ids_field, tracked_field): tracked = getattr(self, tracked_field) setattr(self, tracked_field, filter(lambda p: str(p['id']) != str(item['id']), list(tracked))) try: getattr(self, ids_field).remove(str(item['id'])) except ValueError: pass self.__invalidate_channel_descriptions(user) self.save() # Now also add page to dispatch so facebook bot will have access efc = self.get_outbound_channel(user) if efc: try: getattr(efc, ids_field).remove(str(item['id'])) except ValueError: pass efc.save() if self.status == 'Active' and 'access_token' in item and item.get('type') != "event": unsubscribe_realtime_updates([item]) def get_outbound_ids(self): outbound_ids = set() for page_id, users in (self.page_admins or {}).viewitems(): outbound_ids.add(page_id) for user in users: if user.get('role') == 'Admin' and user.get('id'): outbound_ids.add(user['id']) if self.facebook_handle_id: outbound_ids.add(str(self.facebook_handle_id)) for page_data in list(self.all_facebook_pages): if page_data.get('id'): outbound_ids.add(page_data['id']) return outbound_ids
class Dashboard(AuthDocument): collection = 'Dashboard' manager = DashboardManager type_id = fields.ObjectIdField(required=True) title = fields.StringField(required=True) description = fields.StringField() owner = fields.ReferenceField(User) author = fields.ReferenceField(User) widgets = fields.ListField(fields.ObjectIdField()) shared_to = fields.ListField(fields.ObjectIdField()) filters = fields.DictField() created = fields.DateTimeField(default=datetime.now) admin_roles = {STAFF, ADMIN, REVIEWER, ANALYST} def to_dict(self, fields_to_show=None): rv = super(Dashboard, self).to_dict() rv['widgets'] = map(str, self.widgets) rv['shared_to'] = map(str, self.shared_to) rv['owner_name'] = '%s %s' % (self.owner.first_name or '', self.owner.last_name or '') rv['author_name'] = '%s %s' % (self.author.first_name or '', self.author.last_name or '') rv['owner_email'] = self.owner.email rv['author_email'] = self.author.email rv['account_id'] = str(self.owner.account.id) rv['type'] = DashboardType.objects.get(self.type_id).type return rv def __repr__(self): return "<Dashboard: %s; id: %s>" % (self.title, self.id) def _add_widget(self, widget): """ """ self.widgets.append(widget.id) self.save() def _remove_widget(self, widget): """ widget is not automatically deleted. To delete, use `.delete_widget()` instead. `widget.dashboard_id` will still point to this dashboard. """ self.widgets.remove(widget.id) self.save() def delete_widget(self, widget): if isinstance(widget, (basestring, fields.ObjectId)): widget = DashboardWidget.objects.get(widget) widget.delete() def delete(self): for widget_id in self.widgets: self.delete_widget(widget_id) super(Dashboard, self).delete_by_user(self.owner) def copy_to(self, user, title=None, description=None): dashboard_data = { 'type_id': self.type_id, 'title': title or self.title, 'description': description or self.description, 'author': self.owner, 'owner': user, 'widgets': [], 'shared_to': [], 'filters': self.filters, } # FIX: create_by_user is having role error dashboard = Dashboard.objects.create_by_user(user, **dashboard_data) #dashboard = Dashboard.objects.create(**dashboard_data) for widget_id in self.widgets: widget = DashboardWidget.objects.get(widget_id) widget.copy_to(dashboard) return dashboard
class InsightsAnalysis(Document): KEY_WEIGHT = 'discriminative_weight' KEY_RANK = 'rank' KEY_SCORE = 'score' KEY_VALUES = 'values' KEY_CROSSTAB = 'crosstab_results' KEY_VALUE_TYPE = 'value_type' KEY_PIE = 'pie' KEY_BAR = 'bar' KEY_BOX = 'boxplot' KEY_SCATTER = 'scatter' CLASSIFICATION_TYPE = 'classification' REGRESSION_TYPE = 'regression' BOOLEAN_METRIC = 'Boolean' NUMERIC_METRIC = 'Numeric' LABEL_METRIC = 'Label' METRIC_CONVERTED = "converted" METRIC_ABANDONED = "abandoned" METRIC_STUCK = "stuck" IDX_UNKNOWN = -1 IDX_SKIP = -2 NUM_TIMERANGE_SLOTS = 7 user = fields.ObjectIdField(db_field='usr') title = fields.StringField(db_field='te') created_at = fields.NumField(db_field='ca') account_id = fields.ObjectIdField(db_field='ac') filters = fields.DictField(db_field='ft', required=True) analysis_type = fields.StringField(choices=[CLASSIFICATION_TYPE, REGRESSION_TYPE], db_field='at') application = fields.StringField(db_field='an') # e.g. application which's used for the analysis analyzed_metric = fields.StringField(db_field='me') metric_type = fields.StringField(choices=[BOOLEAN_METRIC, NUMERIC_METRIC, LABEL_METRIC], db_field='mt') metric_values = fields.ListField(fields.StringField(), db_field='mv') metric_values_range = fields.ListField(fields.NumField(), db_field='mvr') # e.g. min/max Numeric values or unique labels progress = fields.NumField(db_field='pg', default=0) _results = fields.StringField(db_field='rt') _timerange_results = fields.StringField(db_field='trt') status_message = fields.StringField(db_field='msg') _cached_from_date = None _cached_to_date = None time_increment = None @property def status_progress(self): if self.progress == PROGRESS_STOPPED: return STATUS_STOPPED, 0 elif self.progress == 0: return STATUS_QUEUE, self.progress elif self.progress == PROGRESS_DONE: return STATUS_DONE, self.progress elif self.progress == PROGRESS_ERROR: return STATUS_ERROR, 0 else: return STATUS_IN_PROGRESS, self.progress def is_stopped(self): return self.progress == PROGRESS_STOPPED def compute_class_names(self): import json metric_names = [] try: if self.analyzed_metric == "stage-paths": for metric in self.metric_values: metric_info = json.loads(metric) metric_names.append("%s at step %s" % (metric_info['stage'], metric_info['step'])) return metric_names if self.analyzed_metric == "paths-comparison": for metric in self.metric_values: metric_info = json.loads(metric) metric_names.append( "%s %s (%s)" % (metric_info['measure'], metric_info['path'], metric_info['metric_value'])) return metric_names if self.metric_type == self.NUMERIC_METRIC and self.analysis_type == self.CLASSIFICATION_TYPE: metric_values = [ '%s(%s:%s)' % (self.analyzed_metric, self.metric_values_range[0], self.metric_values[0]), "%s(%s:%s)" % (self.analyzed_metric, self.metric_values[0], self.metric_values[1]), "%s(%s:%s)" % (self.analyzed_metric, self.metric_values[1], self.metric_values_range[1])] return metric_values except: import logging logging.exception(__name__) return self.metric_values def to_dict(self, fields2show=None): base_dict = super(InsightsAnalysis, self).to_dict() base_dict.pop('_results') base_dict.pop('_timerange_results') base_dict['results'] = self.results base_dict['timerange_results'] = self.timerange_results base_dict['status'] = self.status_progress base_dict['metric_values'] = self.compute_class_names() base_dict['metric_values_range'] = self.metric_values_range base_dict['level'] = self.get_timerange_level() return base_dict def get_timerange_level(self): try: return guess_timeslot_level(parse_datetime(self.filters['from']), parse_datetime(self.filters['to'])) except: LOGGER.warn('Unknown period to determine the timerange level') def get_user(self): return User.objects.get(self.user) def initialize_timeslot_counts(self): time_results = {} self.time_increment = (self._cached_to_date - self._cached_from_date).days * 24 / float(self.NUM_TIMERANGE_SLOTS) for class_idx in range(-1, self.get_num_classes()): time_results[class_idx] = dict() for slot_idx in xrange(self.NUM_TIMERANGE_SLOTS): timeslot = datetime_to_timestamp_ms(self._cached_from_date + timedelta(hours=self.time_increment * slot_idx)) time_results[class_idx][timeslot] = 0 return time_results def get_num_classes(self): if self.metric_type == self.NUMERIC_METRIC: return len(self.metric_values) + 1 else: return len(self.metric_values) + 2 def get_timeslot_index(self, item): for idx in xrange(self.NUM_TIMERANGE_SLOTS): if hasattr(item, 'created_at') and utc(item.created_at) > self._cached_from_date + timedelta(hours=self.time_increment * idx): continue else: break return datetime_to_timestamp_ms(self._cached_from_date + timedelta(hours=self.time_increment * idx)) def process(self): if self.application is None: self.application = self.get_user().account.selected_app if self.application == "Journey Analytics": # process_journeys_analysis.ignore(self) process_journeys_analysis(self) elif self.application == "Predictive Matching": # process_predictive_analysis.ignore(self) process_predictive_analysis(self) def save(self, **kw): if 'upsert' not in kw: kw['upsert'] = False # import json # analysis_file = open('analysis_' + str(self.id) + '.json', 'w') # json_data = {} # from bson import ObjectId # for key, val in self.data.iteritems(): # if not isinstance(val, ObjectId): # json_data[key] = val # json.dump(json_data, analysis_file) # analysis_file.close() if self.id: self.objects.update(self.data, **kw) else: self.id = self.objects.insert(self.data, **kw) def start(self): datetime.strptime('2011-01-01', '%Y-%m-%d') # dummy call (https://bugs.launchpad.net/openobject-server/+bug/947231/comments/8) self.process() def stop(self): self.progress = PROGRESS_STOPPED self.save() def restart(self): self.progress = 0 self.save() self.start() def terminate(self): self.progress = PROGRESS_ERROR self.status_message = 'Process had been terminated.' self.save() @property def timerange_results(self): if self._timerange_results: return json.loads(self._timerange_results) return {} @property def results(self): # Just in case we need some post-processing done if self._results: return json.loads(self._results) return {}
class WorkTimeMixin(object): time_zone = fields.StringField(db_field='tzn', default='UTC') off_time_schedule = fields.ListField(TimeMarkField, db_field='otse') @property def tz(self): tz = pytz.UTC if self.time_zone: try: tz = pytz.timezone(self.time_zone) except pytz.UnknownTimeZoneError: pass return tz def filter_time_marks(self, mark_type, group=False): result = [tm for tm in self.off_time_schedule if tm.mark_type == mark_type] if not group: return result grouped = defaultdict(list) for tm in result: grouped[tm.value].append(tm) return grouped @staticmethod def as_tz(d, tz): if d.tzinfo: d = d.astimezone(tz) else: d = tz.fromutc(d) return tz.normalize(d) def generate_time_line(self, d1, d2): start = self.as_tz(d1, self.tz) end = self.as_tz(d2, self.tz) localize = lambda tp: self.tz.localize(tp) def to_bounds(dt): if dt < start: return start if dt > end: return end return dt dows_group = self.filter_time_marks(TimeMark.DAY_OF_WEEK, group=True) dows_group = {DOW.index(day_of_week): tms for day_of_week, tms in dows_group.items()} periodic_dates = [tm for tm in self.filter_time_marks(TimeMark.PERIODIC_DATE) if tm.date(year=start.year) >= start.date() and tm.date(year=end.year) <= end.date()] static_dates = [tm for tm in self.filter_time_marks(TimeMark.STATIC_DATE) if start.date() <= tm.date() <= end.date()] def _gen(from_, to_): while from_ < to_: dow = from_.weekday() if dow in dows_group: for tm in dows_group[dow]: yield tm.time_points(from_.date()) for tm in periodic_dates: yield tm.time_points(year=from_.year) for tm in static_dates: yield tm.time_points() from_ = from_ + timedelta(days=1) for (tp1, tp2) in _gen(start, end): yield to_bounds(localize(tp1)), 1 yield to_bounds(localize(tp2)), -1 def generate_intervals(self, d1, d2): edge_sum = 0 start = None for (tp, edge_mark) in sorted( self.generate_time_line(d1, d2), key=itemgetter(0)): if edge_sum == 0: start = tp edge_sum += edge_mark if edge_sum == 0: interval = dates_diff(start, tp) yield interval def schedule_aware_dates_diff(self, d1, d2): return dates_diff(d1, d2) - sum(self.generate_intervals(d1, d2))