def wrapper(*args, **kw): data = dict() data.update(request.view_args) data.update(request.args) channel_id = data.get(ch_key) try: channel = get_doc_or_error(Channel, channel_id) kw['channel'] = channel return view_func(*args, **kw) except Exception, e: from solariat_bottle.settings import LOGGER LOGGER.error(e) return jsonify(ok=False, error="No channel exists for id=%s" % channel_id)
def test_multi_agent_scenario(self): sc = self.make_service_channel("SC1", ['brand1', 'brand2']) dispatch_channel_1 = self.make_dispatch_channel("D1", "brand1") dispatch_channel_2 = self.make_dispatch_channel("D2", "brand2") #try: # ch = sc.get_outbound_channel(self.user) # self.assertFalse(True, "This should not be possible: %s" % (ch.title if ch else "NONE")) #except AppException, exc: # pass # Now, remove access from one of the channels, and confirm it works dispatch_channel_2.del_perm(self.user) LOGGER.debug("SC IS %s", sc) self.assertEqual(sc.get_outbound_channel(self.user), dispatch_channel_1)
def get(self, id_=None, **filters): limit = filters.pop('limit') offset = filters.pop('offset') if id_: filters['id'] = id_ try: return self.manager.get_by_user(self.user, **filters) except self.model.DoesNotExist: LOGGER.exception(__name__) raise ResourceDoesNotExist("Not found") else: filters['account_id'] = self.user.account.id res = self.manager.find_by_user( self.user, **filters).limit(limit).skip(offset)[:] return res
def do_put(self, path, version=None, **kw): "Emulate POST request" path = get_api_url(path, version=self._get_version(version)) + '?token=%s' % self.auth_token base_url = kw.pop('base_url', 'https://localhost') data = json.dumps(kw) LOGGER.debug( "Performing PUT to %s with %s" % ( path, data)) return self._handle_http_response( self.client.put( path, data=data, base_url=base_url, content_type='application/json'))
def execute_pipeline(self, match_query_base, match_query_filters, limit): # Edge case handling if limit == 0: return [] pipeline = make_pipeline(match_query_base, match_query_filters, limit) res = self.coll.aggregate(pipeline) if res['ok']: fix_for_neg_value(res['result'], ['topic_count', 'term_count'], pipeline) return res['result'] else: LOGGER.warning( 'ChannelHotTopics pipeline %s failed with result %s', pipeline, res ) return []
def trends_find_topics(time_slot, channel_or_tag): channel_num = get_channel_num(channel_or_tag) # import ipdb; ipdb.set_trace(); assert False t0 = datetime.now() records = ChannelHotTopics.objects.coll.find({ F('channel_num'): channel_num, F('time_slot'): time_slot, # F('gc_counter'): MARKED_TO_KEEP }) LOGGER.info( "purging Q:: channel: %s; collection: ChannelHotTopics; func: %s; timedelta: %s" % (channel_or_tag.title, inspect.stack()[0][3], datetime.now() - t0)) topics = [x["tc"] for x in records] LOGGER.info("FIND TOPICS RES: %s %s", len(topics), decode_timeslot(time_slot)) return topics
def execute_search(self, query, limit): results = super(DbBasedSE1, self).execute_search(query, limit) valid_results = [] for entry in results: try: FAQ.objects.get(entry['id']) valid_results.append(entry) except FAQ.DoesNotExist: LOGGER.warning("Removed FAQ stall entry from ES: " + str(entry['id'])) self.collection.delete(str(entry['id'])) # Removing normalization because it introduces strange biases into the data for # or agsint ES. It implicitly limits the impact of ES on the search. #max_score = max([r['relevance'] for r in valid_results]) #for result in valid_results: # result['relevance'] = result['relevance'] / max_score return valid_results
def verify_application_key(self, app_key): """ For an input application key, validate that we have a valid application token in place. :param app_key: A string representing an application key :return: An `ApplicationToken` object is a valid one exists for this key, None otherwise """ try: app_token = self.get(app_key=app_key) if app_token.status != ApplicationToken.STATUS_VALID: LOGGER.warning("App key %s is no longer valid" % app_key) raise AuthError("App key %s is no longer valid" % app_key) return app_token except ApplicationToken.DoesNotExist: LOGGER.warning("Trying to use invalid api key %s" % app_key) raise AuthError("Trying to use invalid api key %s" % app_key)
def add_dataset(self, user, name, data_loader): ''' Create the Dataset and grant user permissions. ''' try: schema = data_loader.read_schema() except AppException: raise dataset = Dataset.create_by_user(user, self.parent.id, name) start = time.time() LOGGER.info('Analysis of input data took: %s', time.time() - start) dataset.update(schema=schema) finish_data_load. async (user, dataset, data_loader) return dataset
def send_new_channel_creation(staff_user, admin_user, channel, link): """ Send warning email to a user once a user with a trial account creates a new channel """ msg = Message( subject="A channel has been created for a trial account you created", sender=_get_sender(), recipients=[staff_user.email] + get_var('ONBOARDING_ADMIN_LIST')) handles = channel.usernames thandle = 'Unknown' for handle in handles: if handle.startswith('@'): thandle = handle # Get the sender name staff_user_name = 'Unknown' admin_user_name = 'Unknown' admin_email = 'Unknown' ckeywords = 'None defined yet' cskipwords = 'None defined yet' if staff_user.first_name is not None: staff_user_name = staff_user.first_name if admin_user.first_name is not None: admin_user_name = admin_user.first_name if admin_user.email is not None: admin_email = admin_user.email if channel.keywords: ckeywords = '' for keyword in channel.keywords: ckeywords += keyword + ' ' if channel.skipwords: cskipwords = '' for skipword in channel.skipwords: cskipwords += skipword + ' ' msg.html = render_template("mail/new_channel_creation.html", staff_user_name=staff_user_name, admin_user_name=admin_user_name, admin_user_email=admin_email, keywords=ckeywords, skipwords=cskipwords, twitter_handle=thandle, channel_link=link) if get_app_mode() != 'dev': send_mail(msg, email_description='new channel confirmation email') else: LOGGER.info(msg.html)
def upsert(self, platform, profile_data): ProfileCls = self.profile_class(platform) if isinstance(profile_data, ProfileCls): profile_object = profile_data elif isinstance(profile_data, (bytes, ObjectId)): profile_object = ProfileCls.objects.find_one(profile_data) elif isinstance(profile_data, dict): query, update = ProfileCls.objects.extract_upsert_data( ProfileCls, profile_data) profile_object = self.upsert_with_retry(ProfileCls, query, update) else: logger.warning("Failed to upsert UserProfile, " "returning anonymous profile\nupsert(" "{}, {})".format(platform, profile_data)) profile_object = ProfileCls.anonymous_profile() return profile_object
def fetch_child_topics(channel_or_tag, time_slot, limit, parent_topic=None): ''' Query the top topics with his parent ''' t0 = datetime.now() results = ChannelHotTopics.objects.by_time_span( channel=channel_or_tag, parent_topic=parent_topic, from_ts=time_slot, intentions=INTENTIONS_FOR_FETCHING, limit=limit) LOGGER.info( "purging Q:: channel: %s; collection: ChannelHotTopics; func: %s; timedelta: %s" % (channel_or_tag.title, inspect.stack()[0][3], datetime.now() - t0)) return [r['topic'] for r in results]
def get_twitter_post_users(post): """Returns conversation parties from the DM or public tweet""" user_ids, user_screen_names, recipient_screen_names = [], [], [] if 'twitter' not in post: # compatibility for tests not passing a fully formatted tweet to the get_tracked_channels if 'user_profile' in post and isinstance(post['user_profile'], dict): user_ids.append(post['user_profile']['user_id']) user_screen_names.append(post['user_profile']['user_name']) return user_ids, user_screen_names, recipient_screen_names tweet_json = post['twitter'] is_direct_message = 'recipient' in tweet_json and 'sender' in tweet_json try: if is_direct_message: user_ids.append(tweet_json['sender']['id_str']) user_screen_names.append(tweet_json['sender']['screen_name']) recipient_screen_names.append( tweet_json['recipient']['screen_name']) else: # all mentions if 'entities' in tweet_json and 'user_mentions' in tweet_json[ 'entities']: for user_data in tweet_json['entities']['user_mentions']: recipient_screen_names.append(user_data['screen_name']) # sender if 'user' in tweet_json: user_ids.append(tweet_json['user']['id_str']) user_screen_names.append(tweet_json['user']['screen_name']) elif 'user_profile' in post: user_ids.append(post['user_profile']['user_id']) user_screen_names.append(post['user_profile']['user_name']) # recipient if 'in_reply_to_screen_name' in tweet_json and tweet_json[ 'in_reply_to_screen_name']: recipient_screen_names.append( tweet_json['in_reply_to_screen_name']) except KeyError: LOGGER.warning("Malformed post data {}".format(post), exc_info=True) all_str = lambda iterable: all(isinstance(x, basestring) for x in iterable) assert all_str(user_ids), user_ids assert all_str(user_screen_names), user_screen_names assert all_str(recipient_screen_names), recipient_screen_names return user_ids, user_screen_names, recipient_screen_names
def restore_password(): "Send email with link for pasword reset" def get_signed_link(auth_token, email): """Return link to password restore form with auth_token parameter """ return "%susers/%s/password?auth_token=%s" % (request.host_url, email, auth_token.digest) def send_restore_link(email): "Create AuthToken and send restore link" try: user = User.objects.get(email=email) except User.DoesNotExist: flash("%s is not registered" % email) app.logger.warning( "request password restore for non registered %s", email) return False auth_token = AuthToken.objects.create_for_restore(user=user) message = Message( subject="Password restore link for Genesys Social Engagement", body="Please use the following link %s to reset your password" " for Genesys Social Engagement/Social Analytics" % get_signed_link(auth_token, email), recipients=[email]) app.logger.debug("sent restore link to %s", email) app.logger.debug(MAIL_SENDER.send(message)) return True if request.method == 'POST': email = request.form.get('email') if not email: flash("No email provided") else: try: if send_restore_link(email): flash("Please, check your mail box.") return redirect('/login') except Exception, ex: LOGGER.error(ex) flash( "Failed to sent restore email. Email communication problems." ) return redirect('/login')
def __init__(self, user, kwargs): super(KafkaFeedApiPostCreator, self).__init__(user, kwargs) class Options(dict): __getattr__ = dict.__getitem__ self.options = Options(username=user, password=kwargs['password'], url=kwargs['url'], retries=kwargs.get('retries', 3)) if not self.options.password: err_msg = "Configuration Error: password and url are required" LOGGER.error("%s %s" % (err_msg, self.options)) raise RuntimeError(err_msg) self.user_agent = kwargs.pop('user_agent', 'FeedApi-PostCreator') self.sleep_timeout = 30
def share_post(self, post, user, dry_run=False): # self.sync_contacts(post.user_profile) from solariat_bottle.tasks.twitter import tw_share_post post_content = post.plaintext_content status_id = post.native_id if dry_run is False and not get_var('ON_TEST') and get_var( 'APP_MODE') == 'prod': tw_share_post.ignore(self, status_id=status_id, screen_name=post.user_profile.user_name) else: create_outbound_post(user, self, "RT: %s" % post_content, post) LOGGER.debug("Retweet '%s' using %s", post_content, self)
def load(self): try: import solariat_bottle.jobs_config as cfg except ImportError: LOGGER.error('Jobs configuration file "jobs_config.py" is not found.') raise self.kafka_broker = cfg.KAFKA_BROKER self.consumers_group = cfg.CONSUMERS_GROUP self.supported_topics = cfg.TOPICS self.cluster_config = cfg.CLUSTER_CONFIG if get_app_mode() == 'test': # default_transport = 'SERIAL' self.transport = 'serial' else: default_transport = 'kafka' self.transport = getattr(cfg, 'TRANSPORT', default_transport)
def wrapper(*args, **kwargs): #LOGGER.debug("API Request Args: {} | Params: {}".format(args, kwargs)) assert args assert isinstance(args[0], BaseAPIView) view = args[0] # Method decorator try: args = args[1:] except IndexError: args = () params = _get_request_data() params.update(kwargs) # Pass URL variables to the view function start_execution = datetime.utcnow() # Execute API method try: # Assert authentication LOGGER.debug("Started executing API call: %s.%s(%s, %s) " % ( view.__class__.__name__, func.__name__, args, kwargs)) if allow_basic_auth is True: user = _get_user() if user is None: user = authenticate_api_user(params) if user is None: raise api_exc.AuthorizationError("User is not authenticated. Parameters for call: " + str(params)) else: user = authenticate_api_user(params) # Set user in thread local storage set_user(user) resp = func(view, user, *args, **params) elapsed = datetime.utcnow() - start_execution if elapsed.total_seconds() >= 2: log = LOGGER.info else: log = LOGGER.debug log("API call: %s.%s(%s,%s) finished after %s Parameters: %s" % (view.__class__.__name__, func.__name__, str(args)[:500], str(kwargs)[:500], elapsed, str(params)[:500])) # auth token expiration and other auth errors except api_exc.AuthorizationError, exc: LOGGER.info(exc) return view.format_api_error_response(exc, msg=str(exc), description=exc.description)
def handle_bulk_insert(self, user, inserted, failed, total, bulk, failed_cnt, sync_errors, op_name='sync'): if inserted: try: result = bulk.execute() LOGGER.info("Succesfully executed a batch insert " + str(result)) except BulkWriteError as bwe: from pprint import pformat msg = 'Error executing bulk insert: ' + pformat( bwe.details, indent=4) LOGGER.info(msg) for err in bwe.details[MONGO_BULK_WRITE_ERRORS]: failed_cnt += 1 self._put_sync_error(sync_errors, 'Db Error', err[MONGO_BULK_DATA_KEY], err[MONGO_BULK_ERROR_KEY]) if user: TaskMessage.objects.create_error(user, msg) if failed: total_fail = (total - failed_cnt == 0) log_method = getattr(LOGGER, 'error') if total_fail else getattr( LOGGER, 'info') msg = 'Failed to %s %s from %s items.\nlast %s errors:' % ( op_name, failed_cnt, total, len(failed)) log_method(msg) for fail in failed: ex = fail.pop('ex') log_method('%s\n%s', fail, traceback.format_exc(ex)) if user: TaskMessage.objects.create_error(user, msg) return failed_cnt
def send_message(self, dry_run, creative, post, user, direct_message=None): # self.sync_contacts(post.user_profile) from solariat_bottle.tasks.twitter import tw_normal_reply, tw_direct_reply is_dm = False # Assume this is not a DM if direct_message is not None: # If we specifically passed the fact that we want a direct message, use DM # Otherwise decide based on post type is_dm = direct_message else: if post.message_type == 'direct': is_dm = True if not is_dm: status = "@%s %s" % (post.user_profile.user_name, creative) else: status = creative if len(status) > 140: msg = ( 'Sorry, you have exceeded your 140 character limit by %d characters. ' 'Please correct your reply and try again.') % (len(status) - 140) raise AppException(msg) status_id = post.native_id # Update the engagement history post.user_profile.update_history(self) LOGGER.debug("For current message, direct message flag is %s", is_dm) if not dry_run and not get_var('ON_TEST'): if is_dm: tw_direct_reply.ignore(self, status=status, screen_name=post.user_profile.user_name) else: tw_normal_reply.ignore(self, status=status, status_id=status_id, post=post) else: create_outbound_post(user, self, creative, post) LOGGER.debug("Sent '%s' to %s using %s", creative, post.user_profile.user_name, self.title)
def fb_put_comment(channel, object_id, message): """put comment to some object""" # func = lambda: get_facebook_api(channel).put_comment(object_id, message.encode('utf-8', 'replace')) # return __try_execute_safely(func, 5, 3600) from solariat_bottle.settings import LOGGER from solariat_bottle.tasks.exceptions import FacebookCommunicationException from solariat_bottle.db.post.facebook import FacebookPost try: fb_post = FacebookPost.objects.get(_native_id=str(object_id)) except FacebookPost.DoesNotExist: LOGGER.warning( "No mapping post for native_id=%s was found. Defaulting to posting comment as user." % object_id) else: try: return fb_comment_by_page(fb_post, object_id, message) except (FacebookCommunicationException, facebook_driver.facebook.GraphAPIError) as exc: if '#1705' in str( exc ): # GraphAPIError: (#1705) There was an error posting to this wall LOGGER.info("Failed sending comment to post %s with error %s" % (object_id, str(exc))) if fb_post.is_second_level_reply: try: object_id = fb_post.wrapped_data['parent_id'] except KeyError: LOGGER.error("Can't find parent for comment %s" % fb_post) raise exc LOGGER.info( "Sending comment to parent %s of initial post %s %s" % (object_id, fb_post, fb_post.native_id)) return fb_comment_by_page(fb_post, object_id, message) raise exc try: return get_facebook_api(channel).put_comment( object_id, force_bytes(message, 'utf-8', 'replace')) except Exception, ex: LOGGER.error( "Failure posting comment to facebook. Exc: %s, Channel: %s, Object_id: %s, Message: %s" % (ex, channel, object_id, message)) raise FacebookCommunicationException(ex.message)
def __request(self, since, until, limit, target='me', driver=None): from solariat_bottle.settings import LOGGER params = {} if since: params['since'] = since if until: params['until'] = until if limit: params['limit'] = limit driver = driver if driver is not None else self.driver try: return driver.request(target, params) except Exception, ex: LOGGER.error( "Facebook request from channel: %s to target: %s failed with error %s" % ((self.channel.id, self.channel.title), target, str(ex))) raise
def trends_mark_to_keep(time_slot, channel_or_tag, topics): channel_ts_val = ChannelTopicTrends.make_channel_ts( channel_or_tag, time_slot) t0 = datetime.now() res = ChannelTopicTrends.objects.coll.update( { FT("channel_ts"): to_binary(channel_ts_val), FT('topic'): { "$in": topics + ["__ALL__"] } }, {'$set': { FT('gc_counter'): MARKED_TO_KEEP }}, multi=True) LOGGER.info( "purging Q:: channel: %s; collection: ChanneTopicTrends; func: %s; timedelta: %s" % (channel_or_tag.title, inspect.stack()[0][3], datetime.now() - t0)) return res
def do_get(self, path, version=None, **kw): "Emulate GET request" kw['token'] = self.auth_token path = get_api_url(path, version=self._get_version(version=version)) if '?' in path: path += '&' else: path +='?' base_url = kw.pop('base_url', 'https://localhost') path += urllib.urlencode(kw) LOGGER.debug( "Performing GET with %s" % path) return self._handle_http_response( self.client.get(path, base_url=base_url))
def compute_csdl(self): """ For the given subscription/channel, compute and compile the datasift CSDL we are going to use to create the actual datasift description. :returns A complied hash of the CSDL computed for the given subscription/channel """ csdl_data = get_csdl_data( [self.channel.inbound_channel, self.channel.outbound_channel]) lang_code = get_lang_code(self.language) if lang_code not in LANG_MAP: lang_code = None csdl_string = generate_csdl(*csdl_data, language=lang_code) datasift_response = datasift_compile(csdl_string) LOGGER.info(u"%s.compute_csdl: %s" % (self.__class__.__name__, log_csdl(csdl_data, csdl_string, datasift_response))) return datasift_response
def datasift_to_post_dict(data): """ Converts a Datasift data structure (coming from the Datasift Stream API) to a post-fields dict (for passing to a <PostCreator> or db.post.utils.factory_by_user) """ base_url = 'https://twitter.com/%s/statuses/%s' def _get_user_profile(author, klout): result = parse_user_profile(author) if klout: result['klout_score'] = klout['score'] return result if 'text' in data['twitter'] or 'retweet' in data['twitter']: post_fields = {'twitter': {'_wrapped_data': json.dumps(data)}} is_retweet = 'retweet' in data['twitter'] post_fields['twitter'].update(_source=TweetSource.DATASIFT_STREAM, _is_retweet=is_retweet) if is_retweet: author = data['twitter']['retweet']['user'] content = data['twitter']['retweet']['text'] elif 'user' in data['twitter']: author = data['twitter']['user'] content = data['twitter']['text'] else: LOGGER.error(u'format not supported: %s' % data) # datasift not always return 'language' object if 'language' in data: post_fields['lang'] = data['language'] elif 'lang' in data['twitter']: post_fields['lang'] = data['twitter']['lang'] elif 'lang' in author: post_fields['lang'] = author['lang'] post_fields['user_profile'] = _get_user_profile( author, data.get('klout', None)) post_fields['content'] = content post_fields['url'] = base_url % (author['screen_name'], str(data['twitter']['id'])) #twitter data used to link a post to possible thread post_fields['twitter'].update(TweetParser()(data['twitter'])) return post_fields
def import_data(self, user, data_loader, only_event_type=None): from solariat_bottle.db.dynamic_event import EventType event_types = EventType.objects.find_by_user(user, platform=self.platform) EVENT_TYPE_MAP = { et.name: et for et in event_types if et.schema or et.discovered_schema } # total = data_loader.total() # TODO: we can improve load a much if use QueueDataLoader # Thread.run: mail_event_type.import_data( QueueDataLoader(mail_event_Q) ) # at same time iterate over real data_loader and push to one of the Queues # depends on data_type. It makes sense only on stream json load imported_data_by_type = defaultdict(list) for data_type, json_data in data_loader.load_data(): if data_type not in EVENT_TYPE_MAP: continue imported_data_by_type[data_type].append(json_data) # TODO: control fails exc = None for data_type, all_data in imported_data_by_type.iteritems(): if only_event_type and only_event_type.name != data_type: continue event_data_loader = ListDataLoader(all_data) event_type = EVENT_TYPE_MAP.get(data_type) event_type.channel_id = self.id try: event_type.import_data(user, event_data_loader) except Exception as ex: exc = ex LOGGER.error('Failed to import to event_type: %s, error: %s', event_type, ex, exc_info=True) # raise last exception if exc: raise exc
def __init__(self, *args, **kwargs): super(FeedApiPostCreator, self).__init__(*args, **kwargs) class Options(dict): __getattr__ = dict.__getitem__ self.options = Options(username=self.user.email, password=kwargs.get('password'), url=kwargs.get('url')) if not self.options.password: err_msg = "Configuration Error: password and url are required" LOGGER.error("%s %s" % (err_msg, self.options)) raise RuntimeError(err_msg) self.user_agent = kwargs.pop('user_agent', 'FeedApi-PostCreator') self.max_workers = kwargs.pop('post_creator_senders', 4) or 4 self.feed_queue = Queue() self.feed_api_threads = []
def send_confimation_email(user, on_boarded_customer, keywods, usernames, skipwords): """ Send confirmation email to a user once signup process is completed """ msg = Message( subject="Customer just signed up! - Genesys Social Analytics", sender=_get_sender(), recipients=[user.email] + get_var('ONBOARDING_ADMIN_LIST')) msg.html = render_template("mail/confirmation_email.html", user_name=user.first_name, customer_email=on_boarded_customer.email, keywords=keywods, usernames=usernames, skipwords=skipwords) if get_app_mode() != 'dev': send_mail(msg, email_description='signup notification email') else: LOGGER.info(msg.html)
def subscribe_to_app(server, callback_url): while not server.started: LOGGER.warn("Server not started, going to sleep") time.sleep(5) LOGGER.info("Subscribing to app") # Now subscribe to our app on facebook G = facebook_driver.GraphAPI(version='2.2') app_access_token = FACEBOOK_APP_ID + "|" + FACEBOOK_APP_SECRET path = FACEBOOK_APP_ID + "/subscriptions" post_args = { 'access_token': app_access_token, 'callback_url': callback_url, 'fields': 'feed', 'object': 'page', 'verify_token': 'token' } subs = G.request(G.version + "/" + path, post_args=post_args) if subs: print "Subscription response was: " + str(subs)