def test_constructor(self): from google.cloud.language.api_responses import SentimentResponse from google.cloud.language.sentence import Sentence from google.cloud.language.sentiment import Sentiment sentiment_response = SentimentResponse( language='en', sentences=[Sentence.from_api_repr(self.SENTENCE_DICT)], sentiment=Sentiment.from_api_repr(self.SENTIMENT_DICT), ) self._verify_sentiment_response(sentiment_response)
def analyze_sentiment(self): """Analyze the sentiment in the current document. .. _analyzeSentiment: https://cloud.google.com/natural-language/\ reference/rest/v1/documents/analyzeSentiment See `analyzeSentiment`_. :rtype: :class:`.Sentiment` :returns: The sentiment of the current document. """ data = {'document': self._to_dict()} api_response = self.client._connection.api_request( method='POST', path='analyzeSentiment', data=data) return Sentiment.from_api_repr(api_response['documentSentiment'])
def from_api_repr(cls, payload): """Return an sentiment response from a JSON representation. :type payload: dict :param payload: A dictionary representing the response. :rtype: `~.language.sentiment.Sentiment` :returns: A ``Sentiment`` object. """ return cls( language=payload.get('language'), sentences=[Sentence.from_api_repr(sentence) for sentence in payload.get('sentences', ())], sentiment=Sentiment.from_api_repr(payload['documentSentiment']), )
def from_api_repr(cls, payload): """Convert an Entity from the JSON API into an :class:`Entity`. :param payload: dict :type payload: The value from the backend. :rtype: :class:`Entity` :returns: The entity parsed from the API representation. """ name = payload['name'] entity_type = payload['type'] metadata = payload['metadata'] salience = payload['salience'] mentions = [Mention.from_api_repr(val) for val in payload['mentions']] sentiment = None if payload.get('sentiment'): sentiment = Sentiment.from_api_repr(payload['sentiment']) return cls(name, entity_type, metadata, salience, mentions, sentiment)
def from_api_repr(cls, payload): """Convert a sentence from the JSON API into a :class:`Sentiment`. :param payload: dict :type payload: The value from the backend. :rtype: :class:`Sentence` :returns: The sentence parsed from the API representation. """ text_span = payload['text'] try: sentiment = Sentiment.from_api_repr(payload['sentiment']) except KeyError: sentiment = None return cls(text_span['content'], text_span['beginOffset'], sentiment=sentiment)
def from_api_repr(cls, payload): """Convert a sentence from the JSON API into a :class:`Sentence`. :param payload: dict :type payload: The value from the backend. :rtype: :class:`Sentence` :returns: The sentence parsed from the API representation. """ text_span = payload['text'] # The sentence may or may not have a sentiment; only attempt the # typecast if one is present. sentiment = None if payload.get('sentiment') is not None: sentiment = Sentiment.from_api_repr(payload['sentiment']) # Return a Sentence object. return cls(text_span['content'], text_span['beginOffset'], sentiment=sentiment)
def annotate_text(self, include_syntax=True, include_entities=True, include_sentiment=True): """Advanced natural language API: document syntax and other features. Includes the full functionality of :meth:`analyze_entities` and :meth:`analyze_sentiment`, enabled by the flags ``include_entities`` and ``include_sentiment`` respectively. In addition ``include_syntax`` adds a new feature that analyzes the document for semantic and syntacticinformation. .. note:: This API is intended for users who are familiar with machine learning and need in-depth text features to build upon. .. _annotateText: https://cloud.google.com/natural-language/\ reference/rest/v1/documents/annotateText See `annotateText`_. :type include_syntax: bool :param include_syntax: (Optional) Flag to enable syntax analysis of the current document. :type include_entities: bool :param include_entities: (Optional) Flag to enable entity extraction from the current document. :type include_sentiment: bool :param include_sentiment: (Optional) Flag to enable sentiment analysis of the current document. :rtype: :class:`Annotations` :returns: A tuple of each of the four values returned from the API: sentences, tokens, sentiment and entities. """ features = {} if include_syntax: features['extractSyntax'] = True if include_entities: features['extractEntities'] = True if include_sentiment: features['extractDocumentSentiment'] = True data = { 'document': self._to_dict(), 'features': features, 'encodingType': self.encoding, } api_response = self.client._connection.api_request(method='POST', path='annotateText', data=data) sentences = [ Sentence.from_api_repr(sentence) for sentence in api_response['sentences'] ] tokens = [ Token.from_api_repr(token) for token in api_response['tokens'] ] sentiment_info = api_response.get('documentSentiment') if sentiment_info is None: sentiment = None else: sentiment = Sentiment.from_api_repr(sentiment_info) entities = [ Entity.from_api_repr(entity) for entity in api_response['entities'] ] annotations = Annotations( sentences=sentences, tokens=tokens, sentiment=sentiment, entities=entities, ) return annotations
def annotate_text(self, include_syntax=True, include_entities=True, include_sentiment=True): """Advanced natural language API: document syntax and other features. Includes the full functionality of :meth:`analyze_entities` and :meth:`analyze_sentiment`, enabled by the flags ``include_entities`` and ``include_sentiment`` respectively. In addition ``include_syntax`` adds a new feature that analyzes the document for semantic and syntacticinformation. .. note:: This API is intended for users who are familiar with machine learning and need in-depth text features to build upon. .. _annotateText: https://cloud.google.com/natural-language/\ reference/rest/v1/documents/annotateText See `annotateText`_. :type include_syntax: bool :param include_syntax: (Optional) Flag to enable syntax analysis of the current document. :type include_entities: bool :param include_entities: (Optional) Flag to enable entity extraction from the current document. :type include_sentiment: bool :param include_sentiment: (Optional) Flag to enable sentiment analysis of the current document. :rtype: :class:`Annotations` :returns: A tuple of each of the four values returned from the API: sentences, tokens, sentiment and entities. """ features = {} if include_syntax: features['extractSyntax'] = True if include_entities: features['extractEntities'] = True if include_sentiment: features['extractDocumentSentiment'] = True data = { 'document': self._to_dict(), 'features': features, 'encodingType': self.encoding, } api_response = self.client._connection.api_request( method='POST', path='annotateText', data=data) sentences = [Sentence.from_api_repr(sentence) for sentence in api_response['sentences']] tokens = [Token.from_api_repr(token) for token in api_response['tokens']] sentiment_info = api_response.get('documentSentiment') if sentiment_info is None: sentiment = None else: sentiment = Sentiment.from_api_repr(sentiment_info) entities = [Entity.from_api_repr(entity) for entity in api_response['entities']] annotations = Annotations( sentences=sentences, tokens=tokens, sentiment=sentiment, entities=entities, ) return annotations
def test_analyze_entity_sentiment(self): from google.cloud.language.document import Encoding from google.cloud.language.entity import EntityType from google.cloud.language.sentiment import Sentiment name1 = 'R-O-C-K' name2 = 'USA' content = name1 + ' in the ' + name2 wiki2 = 'http://en.wikipedia.org/wiki/United_States' salience1 = 0.91391456 salience2 = 0.086085409 sentiment = Sentiment(score=0.15, magnitude=42) response = { 'entities': [ { 'name': name1, 'type': EntityType.OTHER, 'metadata': {}, 'salience': salience1, 'mentions': [{ 'text': { 'content': name1, 'beginOffset': -1 }, 'type': 'TYPE_UNKNOWN', }], 'sentiment': { 'score': 0.15, 'magnitude': 42, } }, { 'name': name2, 'type': EntityType.LOCATION, 'metadata': { 'wikipedia_url': wiki2 }, 'salience': salience2, 'mentions': [ { 'text': { 'content': name2, 'beginOffset': -1, }, 'type': 'PROPER', }, ], 'sentiment': { 'score': 0.15, 'magnitude': 42, } }, ], 'language': 'en-US', } client = make_mock_client(response, api_version='v1beta2') document = self._make_one(client, content) entity_response = document.analyze_entity_sentiment() self.assertEqual(len(entity_response.entities), 2) entity1 = entity_response.entities[0] self._verify_entity(entity1, name1, EntityType.OTHER, None, salience1, sentiment) entity2 = entity_response.entities[1] self._verify_entity(entity2, name2, EntityType.LOCATION, wiki2, salience2, sentiment) # Verify the request. expected = self._expected_data(content, encoding_type=Encoding.get_default()) client._connection.api_request.assert_called_once_with( path='analyzeEntitySentiment', method='POST', data=expected)