def test_analyze_entities(self, mock_create_stub):
        # Mock gRPC layer
        grpc_stub = mock.Mock()
        mock_create_stub.return_value = grpc_stub

        client = language_service_client.LanguageServiceClient()

        # Mock request
        document = language_service_pb2.Document()
        encoding_type = enums.EncodingType.NONE

        # Mock response
        language = 'language-1613589672'
        expected_response = language_service_pb2.AnalyzeEntitiesResponse(
            language=language)
        grpc_stub.AnalyzeEntities.return_value = expected_response

        response = client.analyze_entities(document, encoding_type)
        self.assertEqual(expected_response, response)

        grpc_stub.AnalyzeEntities.assert_called_once()
        args, kwargs = grpc_stub.AnalyzeEntities.call_args
        self.assertEqual(len(args), 2)
        self.assertEqual(len(kwargs), 1)
        self.assertIn('metadata', kwargs)
        actual_request = args[0]

        expected_request = language_service_pb2.AnalyzeEntitiesRequest(
            document=document, encoding_type=encoding_type)
        self.assertEqual(expected_request, actual_request)
示例#2
0
def entity_sentiment_text(text):
    """Detects entity sentiment in the provided text."""
    language_client = language_service_client.LanguageServiceClient()
    document = language_service_pb2.Document()

    if isinstance(text, six.binary_type):
        text = text.decode('utf-8')

    document.content = text.encode('utf-8')
    document.type = enums.Document.Type.PLAIN_TEXT

    result = language_client.analyze_entity_sentiment(document,
                                                      enums.EncodingType.UTF8)

    for entity in result.entities:
        print('Mentions: ')
        print(u'Name: "{}"'.format(entity.name))
        for mention in entity.mentions:
            print(u'  Begin Offset : {}'.format(mention.text.begin_offset))
            print(u'  Content : {}'.format(mention.text.content))
            print(u'  Magnitude : {}'.format(mention.sentiment.magnitude))
            print(u'  Sentiment : {}'.format(mention.sentiment.score))
            print(u'  Type : {}'.format(mention.type))
        print(u'Salience: {}'.format(entity.salience))
        print(u'Sentiment: {}\n'.format(entity.sentiment))
示例#3
0
def get_entity_sentiment(text):
    '''
    Detects entities and sentiment about them from the provided text.
    Input: Body of text to analyze
    Return: List of entities sorted on salience (relevance to text body)
    '''
    language_client = language_service_client.LanguageServiceClient()
    document = language_service_pb2.Document()

    document.content = text.encode('utf-8')
    document.type = enums.Document.Type.PLAIN_TEXT

    encoding = enums.EncodingType.UTF32
    if sys.maxunicode == 65535:
        encoding = enums.EncodingType.UTF16

    result = language_client.analyze_entity_sentiment(document, encoding)

    e_list = []
    for entity in result.entities:
        if abs(entity.sentiment.magnitude - 0.0) > 0.001:
            e_list.append({
                "type": entity.type,
                "name": entity.name,
                "salience": entity.salience,
                "sent_score": entity.sentiment.score,
                "sent_mag": entity.sentiment.magnitude})
    return sorted(e_list, key=itemgetter('salience'), reverse=True)
示例#4
0
def analyze_df(content, creds):
    sentences = []
    try:
        """Run a sentiment analysis request on text within a passed filename."""
        client = language_service_client.LanguageServiceClient(
            credentials=creds)
        # content = row['review_text']

        document = language_service_pb2.Document(
            content=content,
            language="EN",
            type=enums.Document.Type.PLAIN_TEXT)
        annotations = client.analyze_sentiment(document=document)
        for index, sentence in enumerate(annotations.sentences):
            sentencee = sentence.text.content
            sentence_sentiment = sentence.sentiment.score
            sentence_magnitude = sentence.sentiment.magnitude
            # print '-------------',sentence,'----------------'
            try:
                nlps = {
                    'sentence': sentencee,
                    'sentiment_score': sentence_sentiment,
                    'sentiment_magnitude': sentence_magnitude
                }
                sentences.append(nlps)
            except Exception as Ex:
                print "ignoring line", Ex
        return sentences
    except Exception as Ex:
        print "ignoring review", Ex
        return sentences
示例#5
0
    def test_annotate_text(self, mock_create_stub):
        # Mock gRPC layer
        grpc_stub = mock.Mock()
        mock_create_stub.return_value = grpc_stub

        client = language_service_client.LanguageServiceClient()

        # Mock request
        document = language_service_pb2.Document()
        features = language_service_pb2.AnnotateTextRequest.Features()

        # Mock response
        language = 'language-1613589672'
        expected_response = language_service_pb2.AnnotateTextResponse(
            language=language)
        grpc_stub.AnnotateText.return_value = expected_response

        response = client.annotate_text(document, features)
        self.assertEqual(expected_response, response)

        grpc_stub.AnnotateText.assert_called_once()
        args, kwargs = grpc_stub.AnnotateText.call_args
        self.assertEqual(len(args), 2)
        self.assertEqual(len(kwargs), 1)
        self.assertIn('metadata', kwargs)
        actual_request = args[0]

        expected_request = language_service_pb2.AnnotateTextRequest(
            document=document, features=features)
        self.assertEqual(expected_request, actual_request)
    def test_analyze_entities(self, mock_create_stub):
        # Mock gRPC layer
        grpc_stub = mock.Mock(spec=language_service_pb2.LanguageServiceStub)
        mock_create_stub.return_value = grpc_stub

        client = language_service_client.LanguageServiceClient()

        # Mock request
        document = language_service_pb2.Document()
        encoding_type = enums.EncodingType.NONE

        # Mock response
        language = 'language-1613589672'
        expected_response = language_service_pb2.AnalyzeEntitiesResponse(
            language)
        grpc_stub.AnalyzeEntities.return_value = expected_response

        response = client.analyze_entities(document, encoding_type)
        self.assertEqual(expected_response, response)

        grpc_stub.AnalyzeEntities.assert_called_once()
        request = grpc_stub.AnalyzeEntities.call_args[0]

        self.assertEqual(document, request.document)
        self.assertEqual(encoding_type, request.encoding_type)
    def test_analyze_sentiment_exception(self, mock_create_stub):
        # Mock gRPC layer
        grpc_stub = mock.Mock()
        mock_create_stub.return_value = grpc_stub

        client = language_service_client.LanguageServiceClient()

        # Mock request
        document = language_service_pb2.Document()

        # Mock exception response
        grpc_stub.AnalyzeSentiment.side_effect = CustomException()

        self.assertRaises(errors.GaxError, client.analyze_sentiment, document)
    def test_analyze_syntax_exception(self, mock_create_stub):
        # Mock gRPC layer
        grpc_stub = mock.Mock()
        mock_create_stub.return_value = grpc_stub

        client = language_service_client.LanguageServiceClient()

        # Mock request
        document = language_service_pb2.Document()
        encoding_type = enums.EncodingType.NONE

        # Mock exception response
        grpc_stub.AnalyzeSyntax.side_effect = CustomException()

        self.assertRaises(errors.GaxError, client.analyze_syntax, document,
                          encoding_type)
    def test_annotate_text_exception(self, mock_create_stub):
        # Mock gRPC layer
        grpc_stub = mock.Mock(spec=language_service_pb2.LanguageServiceStub)
        mock_create_stub.return_value = grpc_stub

        client = language_service_client.LanguageServiceClient()

        # Mock request
        document = language_service_pb2.Document()
        features = language_service_pb2.AnnotateTextRequest.Features()
        encoding_type = enums.EncodingType.NONE

        # Mock exception response
        grpc_stub.AnnotateText.side_effect = CustomException()

        self.assertRaises(errors.GaxError, client.annotate_text, document,
                          features, encoding_type)
示例#10
0
def entity_sentiment_file(gcs_uri):
    """Detects entity sentiment in a Google Cloud Storage file."""
    language_client = language_service_client.LanguageServiceClient()
    document = language_service_pb2.Document()

    document.gcs_content_uri = gcs_uri
    document.type = enums.Document.Type.PLAIN_TEXT

    result = language_client.analyze_entity_sentiment(
      document, enums.EncodingType.UTF8)

    for entity in result.entities:
        print('Name: "{}"'.format(entity.name))
        for mention in entity.mentions:
            print('  Begin Offset : {}'.format(mention.text.begin_offset))
            print('  Content : {}'.format(mention.text.content))
            print('  Magnitude : {}'.format(mention.sentiment.magnitude))
            print('  Sentiment : {}'.format(mention.sentiment.score))
            print('  Type : {}'.format(mention.type))
        print('Salience: {}'.format(entity.salience))
        print('Sentiment: {}\n'.format(entity.sentiment))
    def test_analyze_sentiment(self, mock_create_stub):
        # Mock gRPC layer
        grpc_stub = mock.Mock(spec=language_service_pb2.LanguageServiceStub)
        mock_create_stub.return_value = grpc_stub

        client = language_service_client.LanguageServiceClient()

        # Mock request
        document = language_service_pb2.Document()

        # Mock response
        language = 'language-1613589672'
        expected_response = language_service_pb2.AnalyzeSentimentResponse(
            language)
        grpc_stub.AnalyzeSentiment.return_value = expected_response

        response = client.analyze_sentiment(document)
        self.assertEqual(expected_response, response)

        grpc_stub.AnalyzeSentiment.assert_called_once()
        request = grpc_stub.AnalyzeSentiment.call_args[0]

        self.assertEqual(document, request.document)
示例#12
0
def get_creds():
    credential_files = []
    for (dirpath, dirnames, filenames) in walk('./google_keys/'):
        for filename in filenames:
            if filename.endswith('.json'):
                credential_files.append(filename)

    credentials_list = []
    for cf in credential_files:
        credentials = service_account.Credentials.from_service_account_file(
            'google_keys/' + cf)
        scoped_credentials = credentials.with_scopes(
            ['https://www.googleapis.com/auth/cloud-platform'])
        try:
            client = language_service_client.LanguageServiceClient(
                credentials=scoped_credentials)
            document = language_service_pb2.Document(
                content="Working good", type=enums.Document.Type.PLAIN_TEXT)
            annotations = client.analyze_sentiment(document=document)
            credentials_list.append(scoped_credentials)
        except Exception as Ex:
            print "Failed to authenticate Key! ", cf
    return credentials_list