Esempio n. 1
0
    def setUp(self):
        #self.serializer = semantria.XmlSerializer()
        self.serializer = semantria.JsonSerializer()
        self.session = semantria.Session(consumerKey, consumerSecret, self.serializer, use_compression=True)

        self.session.Request += onRequest
        self.session.Response += onResponse
        self.session.Error += onError
        self.session.DocsAutoResponse += onDocsAutoResponse
        self.session.CollsAutoResponse += onCollsAutoResponse
Esempio n. 2
0
def analyze2(tweets):
    consumerKey = "7bba1e0b-3a0a-4c27-823d-0a06ab8d27f4"
    consumerSecret = "335156f6-a161-490c-a9c2-203ec44c0cbd"

    def onRequest(sender, result):
        pass

    # print(result)
    def onResponse(sender, result):
        pass

    # print(result)
    def onError(sender, result):
        pass

    # print(result)
    def onDocsAutoResponse(sender, result):
        pass

    # print(result)
    def onCollsAutoResponse(sender, result):
        pass

    # print(result)
    serializer = semantria.JsonSerializer()
    session = semantria.Session(consumerKey, consumerSecret, serializer)
    # print(session.getConfigurations())
    session.Error += onError
    analyzedTweets = []

    for tweet in tweets:
        doc = {"id": str(uuid.uuid1()).replace("-", ""), "text": tweet[1]}
        status = session.queueDocument(doc)
        time.sleep(0.2)
        status = session.getProcessedDocuments()
        if isinstance(status, list):
            for object in status:
                # print(object)
                analyzedTweet = AnalyzedTweet(tweet[0], tweet[2], tweet[3],
                                              object["sentiment_polarity"], 1,
                                              1, 1)
                if (analyzedTweet.location):
                    analyzedTweets.append(analyzedTweet)
                # print(analyzedTweet)
    print(len(analyzedTweets))
    return analyzedTweets
Esempio n. 3
0
def analyse(txtInput):
    serializer = semantria.JsonSerializer()
    session = semantria.Session(key, secret, serializer, use_compression=True)
    doc = {"id": str(txtInput).replace("-", ""), "text": txtInput}
    status = session.queueDocument(doc)
    if status == 202:
        print("\"", doc["id"], "\" document queued successfully.", "\r\n")

    results = []

    while len(results) != 1:
        print("Retrieving your processed results...", "\r\n")
        time.sleep(0.1)
        # get processed documents
        status = session.getProcessedDocuments()
        results.extend(status)
    for data in results:
        response = data["sentiment_score"]
    return response

    # for data in results:
    #     # print document sentiment score
    #     print("Document ", data["id"], " Sentiment score: ", data["sentiment_score"], "\r\n")

    #     # print document themes
    #     if "themes" in data:
    #         print("Document themes:", "\r\n")
    #         for theme in data["themes"]:
    #             print("     ", theme["title"], " (sentiment: ", theme["sentiment_score"], ")", "\r\n")

    #     # print document entities
    #     if "entities" in data:
    #         print("Entities:", "\r\n")
    #         for entity in data["entities"]:
    #             print("\t", entity["title"], " : ", entity["entity_type"]," (sentiment: ", entity["sentiment_score"], ")", "\r\n")

    # return str(results)


#####################################################################################

#print (str(analyse("my work is irrelevant me. F**k life!")))
Esempio n. 4
0
def get_res():
    serializer = semantria.JsonSerializer()
    session = semantria.Session("59e4e96b-f19b-48b5-910a-a2b5d9d2bfc7",
                                "0cee133a-889e-4d1f-9d99-2749677bcfdd",
                                serializer,
                                use_compression=True)

    for fi in os.listdir(data_dir):
        print(fi)
        data = load_data((data_dir + '/%s' % fi))
        # initialTexts.append(data)
        initialTexts[int(fi)] = data

    del initialTexts[0]
    for i, text in enumerate(initialTexts):
        doc = {"id": str(i + 1), "text": text}
        status = session.queueDocument(doc)
        if status == 202:
            print("\"", doc["id"], "\" document queued successfully.", "\r\n")

    length = len(initialTexts)
    results = []

    while len(results) < length:
        print("Retrieving your processed results...", "\r\n")
        time.sleep(2)
        # get processed documents
        status = session.getProcessedDocuments()
        results.extend(status)

    for data in results:
        if "entities" in data:
            print("Entities:", "\r\n")
            if len(data["entities"]) == 20:

                with open('n_semantria_labels/%s' % data["id"] + 'limit',
                          'w') as fb:
                    json.dump(data["entities"], fb)
            else:
                with open('n_semantria_labels/%s' % data["id"], 'w') as fb:
                    json.dump(data["entities"], fb)
Esempio n. 5
0
if __name__ == "__main__":
    print("Semantria Auto-response feature demo.")

    docs = []
    print("Reading documents from file...")
    with open('source.txt', encoding='utf-8') as f:
        for line in f:
            docs.append(line)

    if len(docs) < 1:
        print("Source file isn't available or blank.")
        sys.exit(1)

    # Initializes Semantria Session
    session = semantria.Session(consumerKey, consumerSecret, use_compression=True)
    session.Error += onError
    session.DocsAutoResponse += autoresponse_handler

    # Remembers primary configuration to set it back after the test.
    configurations = session.getConfigurations()
    primary_configuration = None
    autoresponse_configuration = None

    for c in configurations:
        if c['is_primary']:
            primary_configuration = c

        if c['name'] == 'AutoResponseTest':
            autoresponse_configuration = c
Esempio n. 6
0
from __future__ import print_function
import semantria
import uuid
import time

serializer = semantria.JsonSerializer()

session = semantria.Session("98b2c1f2-318f-4de6-a0f3-27bffd811737",
                            "a90aa5cb-d515-4548-985c-735454a8a9a7",
                            serializer,
                            use_compression=True)

initialTexts = [
    "Lisa - there's 2 Skinny cow coupons available $5 skinny cow ice cream coupons on special k boxes and Printable FPC from facebook - a teeny tiny cup of ice cream. I printed off 2 (1 from my account and 1 from dh's). I couldn't find them instore and i'm not going to walmart before the 19th. Oh well sounds like i'm not missing much ...lol",
    "In Lake Louise - a guided walk for the family with Great Divide Nature Tours rent a canoe on Lake Louise or Moraine Lake  go for a hike to the Lake Agnes Tea House. In between Lake Louise and Banff - visit Marble Canyon or Johnson Canyon or both for family friendly short walks. In Banff  a picnic at Johnson Lake rent a boat at Lake Minnewanka  hike up Tunnel Mountain  walk to the Bow Falls and the Fairmont Banff Springs Hotel  visit the Banff Park Museum. The \"must-do\" in Banff is a visit to the Banff Gondola and some time spent on Banff Avenue - think candy shops and ice cream.",
    "On this day in 1786 - In New York City  commercial ice cream was manufactured for the first time."
]

for text in initialTexts:
    doc = {"id": str(uuid.uuid4()).replace("-", ""), "text": text}

    status = session.queueDocument(doc)
    if status == 202:
        print("\"", doc["id"], "\" document queued successfully.", "\r\n")

length = len(initialTexts)
results = []

while len(results) < length:
    print("Retrieving your processed results...", "\r\n")
    # get processed documents
Esempio n. 7
0

def onDocsAutoResponse(sender, result):
    print "\n", "AUTORESPONSE: ", len(result), result


def onCollsAutoResponse(sender, result):
    print "\n", "AUTORESPONSE: ", len(result), result


print "Semantria service demo.", "\r\n"

# Creates JSON serializer instance
serializer = semantria.JsonSerializer()
# Initializes new session with the serializer object and the keys.
session = semantria.Session(consumerKey, consumerSecret, serializer)

# Initialize session callback handlers
#session.Request += onRequest
#session.Response += onResponse
session.Error += onError
#session.DocsAutoResponse += onDocsAutoResponse
#session.CollsAutoResponse += onCollsAutoResponse

for text in initialTexts:
    # Creates a sample document which need to be processed on Semantria
    # Unique document ID
    # Source text which need to be processed
    doc = {"id": str(uuid.uuid1()).replace("-", ""), "text": text}
    # Queues document for processing on Semantria service
    status = session.queueDocument(doc)
Esempio n. 8
0
def endpoint_name():
    print("endpoint hit!")
    rawtext = request.get_json()['text']

    # process text using semantic api

    # Creates JSON serializer instance
    serializer = semantria.JsonSerializer()
    # Initializes new session with the serializer object and the keys.
    session = semantria.Session(consumerKey,
                                consumerSecret,
                                serializer,
                                use_compression=True)
    subscription = session.getSubscription()
    initialTexts = []
    results = []
    tracker = {}
    documents = []

    n = 975
    textchunks = [rawtext[i:i + n] for i in range(0, len(rawtext), n)]
    for text in textchunks:
        # Creates a sample document which need to be processed on Semantria
        # Unique document ID
        # Source text which need to be processed
        doc_id = str(uuid.uuid4())
        documents.append({'id': doc_id, 'text': text})
        tracker[doc_id] = TASK_STATUS_QUEUED

        res = session.queueBatch(documents)
        if res in [200, 202]:
            print("{0} documents queued successfully.".format(len(documents)))
            documents = []

    if len(documents):
        res = session.queueBatch(documents)
        if res not in [200, 202]:
            print("Unexpected error!")
            sys.exit(1)
        print("{0} documents queued successfully.".format(len(documents)))

    print("")

    # fix this too
    while len(list(filter(lambda x: x == TASK_STATUS_QUEUED,
                          tracker.values()))):
        time.sleep(0.5)
        print("Retrieving your processed results...")

        response = session.getProcessedDocuments()
        for item in response:
            if item['id'] in tracker:
                tracker[item['id']] = item['status']
                results.append(item)

    print("")

    # print and populate json to return it
    resultDict = {}

    for data in results:
        dataDict = {}

        # Printing of document sentiment score
        print("Document {0} / Sentiment score: {1}".format(
            data['id'], data['sentiment_score']))

        # Printing of document themes
        if "themes" in data:
            print("Document themes:")
            for theme in data["themes"]:
                print("\t {0} (sentiment: {1})".format(
                    theme['title'], theme['sentiment_score']))

        # Printing of document entities
        if "entities" in data:
            print("Entities:")
            dataDict["entities"] = data["entities"]
            for entity in data["entities"]:
                print("\t {0}: {1} (sentiment: {2})".format(
                    entity['title'], entity['entity_type'],
                    entity['sentiment_score']))

        # Printing the summary
        if "summary" in data:
            print("Summary:")
            dataDict["summary"] = data["summary"]
            print(data["summary"])

        if "relations" in data:
            print("Relationships:")
            dataDict["relationships"] = data["relations"]
            for relation in data["relations"]:
                print("\t {0}: {1}".format(relation['type'],
                                           relation['extra']))

        resultDict[data['id']] = dataDict
        print("")

    print("Done!")

    return jsonify(resultDict)
Esempio n. 9
0
    def parse_sentiment(self, input_texts, expected_lang):
        SentimentProvider.parse_sentiment(self, input_texts, expected_lang)

        if len(input_texts) > 100:
            raise SatException("Too many inputs. Input documents limited at 100 per API call!")

        # Parse messages from json file
        docs_less140 = []
        docs_more140 = []
        id_map = {}
        for comment in input_texts:
            # generate unique id
            comment_id = str(uuid.uuid4()).replace("-", "")
            while comment_id in id_map:
                comment_id = str(uuid.uuid4()).replace("-", "")

            # Map id to orignal id of the comment
            id_map[comment_id] = comment["id"]

            # clean the text of any url
            comment["text"] = re.sub(r'https?://www\.[a-z\.0-9]+', '', comment["text"])
            comment["text"] = re.sub(r'www\.[a-z\.0-9]+', '', comment["text"])

            # add comment to list of overall comments and bigger/smalle 140 char
            if len(comment["text"]) > 140:
                docs_more140.append({"id": comment_id, "text": comment["text"]})
            else:
                docs_less140.append({"id": comment_id, "text": comment["text"]})

        # Initalise JSON serialiser and create semantria Session
        serializer = semantria.JsonSerializer()
        session = semantria.Session(semantria_key, semantria_secret, serializer, use_compression=True)

        # Use Configuration for specific language
        print("Setting Language: " + expected_lang)

        if expected_lang != "German":
            raise SatException("Only 'German' is supported!")

        lang_id_less140 = german_conf_twitter_active
        lang_id_more140 = german_conf

        # Send messages as batch to semantria
        if len(docs_more140) > 0:
            session.queueBatch(docs_more140, lang_id_more140)
        if len(docs_less140) > 0:
            session.queueBatch(docs_less140, lang_id_less140)

        # Retrieve results
        length_more140 = len(docs_more140)
        results_more140 = []
        length_less140 = len(docs_less140)
        results_less140 = []

        while (len(results_more140) < length_more140) or (len(results_less140) < length_less140):
            print("Retrieving processed results...", "\r\n")
            time.sleep(2)
            # get processed documents
            status_more140 = session.getProcessedDocuments(lang_id_more140)
            for data in status_more140:
                if data["id"] in id_map:
                    data["id"] = id_map[data["id"]]
                else:
                    status_more140.remove(data)
            print "Added " + str(len(status_more140)) + " entries to result_more140"
            results_more140.extend(status_more140)

            status_less140 = session.getProcessedDocuments(lang_id_less140)
            for data in status_less140:
                if data["id"] in id_map:
                    data["id"] = id_map[data["id"]]
                else:
                    status_less140.remove(data)
            print "Added " + str(len(status_less140)) + " entries to result_less140"
            results_less140.extend(status_less140)

        results = results_more140 + results_less140
        responses = []
        for result in results:
            responses.append(SentimentResponse(result['id'], result['sentiment_score'], None))
        return responses
Esempio n. 10
0
from __future__ import print_function
import semantria
import uuid
import time
import requests

serializer = semantria.JsonSerializer()

session = semantria.Session("59e4e96b-f19b-48b5-910a-a2b5d9d2bfc7",
                            "0cee133a-889e-4d1f-9d99-2749677bcfdd",
                            serializer,
                            use_compression=True)

initialTexts = [
    """BASEBALL - DODGERS WIN FIFTH STRAIGHT .
MONTREAL 1996-08-28
Hideo Nomo allowed a run in seven innings for his fifth win in seven road starts and Greg Gagne capped a three-run fourth with a two-run homer as the Los Angeles Dodgers claimed a 5-1 victory the Montreal Expos on Tuesday .
With their fifth straight win , the Dodgers moved a half-game ahead of the Expos at the top of the wild card hunt behind Nomo ( 13-10 ) , who allowed six hits and walked four with six strikeouts .
In San Francisco , Mike Williams allowed two runs in 7-1/3 innings and Benito Santiago and Ruben Amaro had RBI hits in the first inning as the Philadelphia Phillies edged the San Francisco Giants 3-2 .
Williams ( 5-12 ) , who snapped a personal three-game losing streak , allowed five hits , walked two and struck out five .
It was also Williams ' first win in three career decisions against San Francisco .
In Pittsburgh , Al Martin's run-scoring single snapped a fifth-inning tie and Denny Neagle outdueled John Smoltz as the Pittsburgh Pirates edged the Atlanta Braves 3-2 .
The Braves led 2-1 entering the fifth , but the Pirates pushed across two runs against Smoltz ( 20-7 ) .
Neagle ( 14-6 ) beat the Braves for the third time this season , allowing two runs and six hits in eight innings .
In St Louis , Gary Sheffield and Devon White each drove in two runs and Mark Hutton scattered four hits over six innings to lead the Florida Marlins past the St. Louis Cardinals 6-3 .
White added a solo homer , his 11th , off reliever Mark Petkovsek with one out in the fifth , giving the Marlins a 6-0 lead .
In New York , Steve Finley's three-run homer capped a four-run eighth inning and gave the San Diego Padres a 4-3 victory over New York , spoiling Bobby Valentine's debut as Mets ' manager .
The rally made a winner out of reliever Willie Blair
Tony Gwynn and Wally Joyner had two hits apiece , helping the Padres to their third straight win .
First-place San Diego has won seven of its last eight games and improved to 34-20 against NL East opponents .
In Houston , Tony Eusebio's eighth-inning sacrifice fly capped a comeback from a five-run deficit that gave the Houston Astros a 6-5 victory over the Chicago Cubs .
Esempio n. 11
0
def getDocumentThemes(textSubmitted):
    print("Semantria Detailed mode demo ...")
    print("")

    # the consumer key and secret
    key = "NONE"
    secret = "NONE"

    # Task statuses
    TASK_STATUS_UNDEFINED = 'UNDEFINED'
    TASK_STATUS_FAILED = 'FAILED'
    TASK_STATUS_QUEUED = 'QUEUED'
    TASK_STATUS_PROCESSED = 'PROCESSED'

    # Creates JSON serializer instance
    serializer = semantria.JsonSerializer()
    # Initializes new session with the serializer object and the keys.
    session = semantria.Session(key, secret, serializer, use_compression=True)

    # Initialize session callback handlers
    # session.Request += onRequest
    # session.Response += onResponse
    session.Error += onError
    # session.DocsAutoResponse += onDocsAutoResponse
    # session.CollsAutoResponse += onCollsAutoResponse

    subscription = session.getSubscription()

    initialTexts = []
    results = []
    tracker = {}
    documents = []

    doc_id = str(uuid.uuid4())
    documents.append({'id': doc_id, 'text': textSubmitted})
    tracker[doc_id] = TASK_STATUS_QUEUED

    res = session.queueBatch(documents)

    if res in [200, 202]:
        print("{0} documents queued successfully.".format(len(documents)))
        documents = []

    if len(documents):
        res = session.queueBatch(documents)
        if res not in [200, 202]:
            print("Unexpected error!")
            sys.exit(1)
        print("{0} documents queued successfully.".format(len(documents)))

    print("")

    while len(list(filter(lambda x: x == TASK_STATUS_QUEUED,
                          tracker.values()))):
        time.sleep(0.5)
        print("Retrieving your processed results...")

        response = session.getProcessedDocuments()
        for item in response:
            if item['id'] in tracker:
                tracker[item['id']] = item['status']
                results.append(item)

    print("")

    #print(textSubmitted)

    for data in results:
        # Printing of document sentiment score
        print("Document {0} / Sentiment score: {1}".format(
            data['id'], data['sentiment_score']))

        print(data)
        if "auto_categories" in data:
            for auto_categories in data["auto_categories"]:
                if "categories" in auto_categories:
                    for categories in auto_categories["categories"]:
                        if categories["sentiment_score"] == data[
                                "sentiment_score"]:
                            return (categories["title"])

        return ("Nothing was found")
Esempio n. 12
0
 def __init__(self, consumer_key, consumer_secret):
     self.name = 'semantria'
     serializer = semantria.JsonSerializer()
     self.session = semantria.Session(consumer_key, consumer_secret,
                                      serializer)
     self.session.Error += onError
Esempio n. 13
0
# -*- coding: utf-8 -*-

import semantria
import uuid
import time

from ... import config

if "semantria" in config.engines:
    key = config.engines["semantria"]["key"]
    secret = config.engines["semantria"]["secret"]
    labels = config.engines["semantria"]["labels"]
    langs = config.engines["semantria"]["langs"]

    serializer = semantria.JsonSerializer()
    session = semantria.Session(key, secret, serializer, use_compression=True)


def convert_label(label):
    if label in labels:
        return labels[label]
    else:
        print "semantria:", label
        return label


def convert_lang(lang):
    if lang in langs:
        return langs[lang]
    else:
        return lang
Esempio n. 14
0
        for line in f:
            if len(line) < 3:
                continue

            job_id = job_ids[random.randint(0, unique_jobid_count - 1)]

            jobs[job_id] += 1
            documents[job_id].append({
                'id': str(uuid.uuid4()),
                'text': line,
                'job_id': job_id
            })

    # Initializes Semantria Session
    session = semantria.Session(SEMANTRIA_KEY,
                                SEMANTRIA_SECRET,
                                use_compression=True)
    session.Error += onError

    if data_sending_mode == 0:
        for job_id, docs in documents.items():
            for document in docs:
                session.queueDocument(document)
            print("{0} documents queued for {1} job ID".format(
                len(docs), job_id))
    elif data_sending_mode == 1:
        for job_id, docs in documents.items():
            if session.queueBatch(docs) is not None:
                print("{0} documents queued for {1} job ID".format(
                    len(docs), job_id))
    else:
Esempio n. 15
0
import uuid
import time
import csv
import MySQLdb
import re
from geopy.geocoders import Nominatim
import matplotlib.pyplot as plt


db = MySQLdb.connect(host="localhost", user="******", passwd="avk287",
db="avk287")
cursor = db.cursor()


serializer = semantria.JsonSerializer()
session = semantria.Session("e1eef79b-f58f-4e79-830c-9203ae09a473", "4a2891a0-2df0-4672-b3e9-5392f2b07256", serializer, use_compression=True)
geolocator = Nominatim()


def plot1(result_q1):

    xaxis = []
    for x in range (0 , len(result_q1)):
        xaxis.append(x)
    yaxis = []
    label= []
    for item in result_q1:
        label.append(item[0])    
        yaxis.append(item[1])

    plt.bar(xaxis, yaxis, align='center')
Esempio n. 16
0
    print("\n", "ERROR: ", result)


def onDocsAutoResponse(sender, result):
    print("\n", "AUTORESPONSE: ", len(result), result)


def onCollectionsAutoResponse(sender, result):
    print("\n", "AUTORESPONSE: ", len(result), result)


print("Semantria Detailed mode demo ...")
print("")

# Initializes new session with the serializer object and the keys.
session = semantria.Session(consumerKey, consumerSecret)

# Initialize session callback handlers
# session.Request += onRequest
# session.Response += onResponse
session.Error += onError
# session.DocsAutoResponse += onDocsAutoResponse
# session.CollectionsAutoResponse += onCollectionsAutoResponse

subscription = session.getSubscription()

initialTexts = []

print("Reading collection from file...")
# import io for compatibility with Python 2 cause f*****g imports don't work with Python3
with io.open('source.txt', encoding='utf-8') as f:
Esempio n. 17
0
    titleList.append(goodTitle)
    #please = type(title)
    # title = title[2:-1]
    #title = title.strip()

# API Key/Secret
# Set the environment vars before calling this program
# or edit this file and put your key and secret here.
#"8087b301-f1ae-452a-849b-cd48fb881436"
consumerKey = os.getenv('SEMANTRIA_KEY')
#"a2c7b881-5e1a-4c7d-9c5e-92249962a586"#
consumerSecret = os.getenv('SEMANTRIA_SECRET')

# Initializes new session with the serializer object and the keys.
session = semantria.Session(
    "8087b301-f1ae-452a-849b-cd48fb881436",
    "a2c7b881-5e1a-4c7d-9c5e-92249962a586")  #consumerKey, consumerSecret)

subscription = session.getSubscription()

initialTexts = []
for i in range(0, ):
    initialTexts.append(fileSynopses[i])

#some sample text - only one document
# initialTexts = [
# "Set in a lonely city on a rainy night, the film takes place in a bicycle shop  that is closed for the night. In the corner of the shop sleeps Red, a red unicycle who languishes in the 'clearance corner', waiting to be purchased. As the camera zooms on him, the sound of rain falling turns into a drumroll, and we go into the dream-sequence. In his dream, Red is being ridden by a circus clown  as part of a juggling act. The clown enters the ring, accompanied by a fanfare, expecting a huge applause, but instead receives only a few scattered claps from different parts of the  audience. Nevertheless, Lumpy starts juggling three balls whilst riding Red, occasionally dropping them as he does. However, Red slides out from underneath Lumpy  and spikes the balls back to him with his bike pedals. The confused clown ponders this for only a second before continuing on with his act. At this point, Red is forced to catch another ball which Lumpy unintentionally throws across the ring. Lumpy continues to ride in the air while juggling the other two balls while Red bounces the green ball up and down. Eventually Lumpy comes to a sudden realization, and looks between his legs, only to discover he's been riding on nothing before he falls to the ground . Red catches the other two balls and begins juggling all three of them, and then balances them on top of each other, after which he receives an uproarous applause. But then the sound of clapping turns into the sound of rain, and Red awakens, left to face bleak reality. Depressed, he returns to the corner where he was previously resting, and goes back to sleep. The short ends with the final image of the neon sign for 'Eben's Bikes'.",
# "The Stooges play three sets of identical triplets, born one year apart. All nine brothers lose track of each other after World War II, unaware that they are all living in the same city. One set  is single, one  is married, and the other  is engaged. Trouble brews when the engaged set of brothers decided to celebrate at a local nightclub. Before they arrive, the unmarried set show up, followed by the fiancees of their brothers. The ladies start hugging and kissing the unsuspecting brothers. Within minutes, the wives of the married brothers show up, thinking their husbands are cheating on them. Hilarity ensues when the nightclub waiter  walks in and sees all nine brothers simultaneously.",
# "Ben Jones  and Marion 'Howdy' Lewis  are two easygoing, modern-day cowboys who make a meager living breaking wild horses. Their frequent employer is Jim Ed Love , a shrewd businessman who always gets the better of them. After they bring him a string of tamed horses and spend the winter rounding up stray cows, he talks them into taking a nondescript roan horse in lieu of some of their wages. Ben finds  that the horse is unrideable. Rather than turning it into soap or dog food, he comes up with the bright idea of taking it to a rodeo and betting other cowhands they cannot ride it, thereby doubling their earnings. Along the way, the duo stop to help two none-too-bright strippers, Mary  and Sister , with their car, which has broken down. Not knowing much about cars, they give them a ride to the nearest garage, but end up getting to know them better  and taking them along to the rodeo. Everything goes as planned; nobody is able to stay on the horse. Then the animal suddenly collapses and Ben spends all the money they've won for veterinary help—and a new stable to replace the one destroyed by the roan when he recovers. In the end, Ben and Howdy end up right back where they started, with only the roan to show for their efforts.",
# "Preetam , a struggling cartoonist, meets Anita  at a tennis match, where she is watching her favorite tennis star. Anita, a wealthy and westernized heiress is controlled by her feminist aunt, Sita Devi . Sita is suspicious of men, and cultivates her attitudes in Anita. However, to receive her fortune, her father's will decrees that Anita must marry within one month of turning 21. Sita Devi doesn't agree with this, and tries to set Anita up with a sham marriage which will soon lead to divorce, thereby giving her both freedom and a fortune. Sita hires Preetam to marry Anita, but doesn't know that the pair have already met. Preetam is kept from Anita after their marriage, but he kidnaps her and takes her to the traditional house of his brother. While at the house, Anita befriends Preetam's sister-in-law, and begins to see the merit in becoming a traditional Indian wife. Preetam is worried that he has lost Anita, and expedites their divorce by providing false, incrimiating evidence to the court. Preetam then leaves mumbai, heartbroken. Anita now recognizes her feelings for Preetam and rushes to meet him at the airport. In the end, the couple is reunited.",
# "Beth Cappadora  and her husband Pat  experience a parent's worst fear when their son Ben vanishes in a crowded hotel lobby during Beth's high school reunion. The ensuing frantic search is unsuccessful, and Beth goes through a sustained nervous breakdown. Unable to cope with her devastation, Beth unintentionally neglects her other children, Vincent ([[Jonathan Jackson  and Kerry . After nine years, the family has seemingly accepted that Ben has gone forever, when a familiar-looking boy  turns up at their house, introduces himself as Sam and offers to mow their lawn. Beth is convinced that Sam is actually her son, and begins an investigation that culminates in the discovery that Ben was kidnapped at the ill-fated high school reunion years ago, by a mentally unstable woman who was a high school classmate of Beth's. This woman brought up Ben as her own child, until she committed suicide. The attempted re-integration of Ben back into the Cappadora family produces painful results for all involved. Eventually the family decides that what's best for Ben is to return him to his adoptive father, but one night Vincent finds him playing basketball outside. Ben reveals that he remembered something from before his abduction, playing with Vincent and Vincent finding him, causing him to feel safe. Vincent, who has carried guilt for letting go of Ben at the reunion is forgiven by Ben who decides to return to living with his real family, but first plays a game of basketball with his brother with their parents secretly watching from their bedroom window.",