Beispiel #1
0
def get_bot_response():
    user_request = request.args.get('msg')  # Fetching input from the user
    user_request = user_request.lower()
    if len(user_request.split(" ")) > 1:
        check_search = user_request.split(" ")[0]
        if check_search == 'google':
            user_request = user_request.replace("google","")
            user_request = user_request.translate ({ord(c): "" for c in "!@#$%^&*()[]{};:,./<>?\|`~-=_+"})
            check_query = user_request.split(" ")[1]
            check_text = user_request.split(" ")[1:3]
            if check_text == check_wikipedia1 or check_text == check_wikipedia2:
                response = scrape_data(user_request, "wikipedia")
            elif check_text == check_wikihow:
                response = scrape_data(user_request, "wikihow")
            elif check_query == "nearby":
                response = scrape_data(user_request, "nearby")
            else:
                response = scrape_data(user_request, "")
                
        else:
            response = chatbot_response(user_request)                

    else:
        response = chatbot_response(user_request)
    
    return response
Beispiel #2
0
def gather_data(data_params):
    with open('config/chromedriver.json') as fh:
        chromedriver_path = json.load(fh)['chromedriver_path']

    print("Scraping data...")
    scrape_data(chromedriver_path, data_params['all_links_pickle_path'],
                data_params['fbworkouts_path'], data_params['comments_path'])
    print("Scraping done.")

    print("Querying Youtube API...")
    get_youtube(data_params['fbworkouts_path'],
                data_params['youtube_csv_path'])
    print("Querying done")
Beispiel #3
0
def scrape():

    # Run the scrape function
    mars_data = scrape.scrape_data()

    # Update the Mongo database using update and upsert=True
    mongo.db.mars_data.update({}, mars_data, upsert=True)
    return redirect("/")
Beispiel #4
0
def scrape():
	try:
		mdata = db.latest
		data = scrape_data()
		mdata.delete_many({"planet":"mars"})
		mdata.insert_one(data)
	except Exception as e:
		print(e)

	return redirect("/", code=302)
Beispiel #5
0
def get_bot_response():
    user_request = request.args.get('msg')  # Fetching input from the user
    user_request = user_request.lower()
    if len(user_request.split(" ")) > 1:
        check_search = user_request.split(" ")[0]
        if check_search == 'google':
            user_request = user_request.replace("google", "")
            user_request = user_request.translate(
                {ord(c): ""
                 for c in "!@#$%^&*()[]{};:,./<>?\|`~-=_+"})
            check_query = user_request.split(" ")[1]
            check_text = user_request.split(" ")[1:3]
            if check_text == check_wikipedia1 or check_text == check_wikipedia2:
                response = scrape_data(user_request, "wikipedia")
            elif check_text == check_wikihow:
                response = scrape_data(user_request, "wikihow")
            elif check_query == "nearby":
                response = scrape_data(user_request, "nearby")
            else:
                response = scrape_data(user_request, "")
        else:
            if user_request == 'how are you?' or user_request == 'how are you':
                response = 'Fine , Good to see you again'
            elif user_request == 'what is your name' or user_request == 'Who are you':
                response = ' I am farmbot, I help in agriculture related problems'
            else:
                user_request = user_request.translate(
                    {ord(c): ""
                     for c in "!@#$%^&*()[]{};:,./<>?\|`~-=_+"})
                check_query = user_request.split(" ")[1]
                check_text = user_request.split(" ")[1:3]
                if check_text == check_wikipedia1 or check_text == check_wikipedia2:
                    response = scrape_data(user_request, "wikipedia")
                elif check_text == check_wikihow:
                    response = scrape_data(user_request, "wikihow")
                elif check_query == "nearby":
                    response = scrape_data(user_request, "nearby")
                else:
                    response = scrape_data(user_request, "")

    else:
        if user_request == 'hi':
            response = 'Hello , Thanks for your greeting'
        elif user_request == 'bye' or user_request == 'thankyou':
            response = 'bye, Thank you'
        elif user_request == 'hello':
            response = 'Good to see you again'
        else:
            response = 'hey'
    print(response)
    speechtotext(response)
    return response
Beispiel #6
0
import argparse
from scrape import scrape_data

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-i", required=True, help="Airbnb listing id")
    args = parser.parse_args()
    
    x = scrape_data(args.i)
    for a,b in enumerate(x):
        print a,b,' : ',x[b]
Beispiel #7
0
def scrape():

    article_title, article_teaser, featured_image_url, table_html, hemisphere_image_urls = scrape_data(
    )

    mongo.db.article.update_one(
        {}, {"$set": {
            "title": article_title,
            "teaser": article_teaser
        }},
        upsert=True)
    mongo.db.featured_image.update_one({},
                                       {"$set": {
                                           "url": featured_image_url
                                       }},
                                       upsert=True)
    mongo.db.facts.update_one({}, {"$set": {
        "table_html": table_html
    }},
                              upsert=True)

    mongo.db.hemisphere.drop()
    for h in hemisphere_image_urls:
        mongo.db.hemisphere.insert_one({
            "title": h["title"],
            "img_url": h["img_url"]
        })

    mongo.db.last_update.update_one(
        {}, {"$set": {
            "last_updated": dt.now().strftime("%m/%d/%Y %H:%M")
        }},
        upsert=True)

    # Redirect back to home page
    return redirect("/")
Beispiel #8
0
def get_bot_response():
    user_request = request.args.get('msg')  # Fetching input from the user
    user_request = user_request.lower()
    if len(user_request.split(" ")) > 1:
        check_search = user_request.split(" ")[0]
        if check_search == 'google':
            user_request = user_request.replace("google", "")
            user_request = user_request.translate(
                {ord(c): ""
                 for c in "!@#$%^&*()[]{};:,./<>?\|`~-=_+"})
            check_query = user_request.split(" ")[1]
            check_text = user_request.split(" ")[1:3]
            if check_text == check_wikipedia1 or check_text == check_wikipedia2:
                response = scrape_data(user_request, "wikipedia")
            elif check_text == check_wikihow:
                response = scrape_data(user_request, "wikihow")
            elif check_query == "nearby":
                response = scrape_data(user_request, "nearby")
            else:
                response = scrape_data(user_request, "")
        else:
            if user_request == 'how are you?' or user_request == 'how are you':
                response = 'Fine , Good to see you again'
            elif user_request == 'what is your name' or user_request == 'Who are you':
                response = ' I am farmbot, I help in agriculture related problems'
            elif user_request == 'what is agriculture' or user_request == 'define agriculture':
                response = 'Agriculture is the science or practice of farming, including cultivation of the soil for the growing of crops and the rearing of animals to provide food, wool, and other products.'
            elif user_request == 'What Is Organic Gardening' or user_request == 'What Is Organic farming':
                response = 'Organic gardening is more than simply avoiding synthetic pesticides and fertilizers. It is about observing nature’s processes, and emulating them in your garden as best you can. And the most important way to do that is to understand the makeup of your soil and to give it what it needs. If anything could be called a ‘rule’ in organic gardening, it’s this: feed the soil, not the plant'
            elif user_request == 'what is the reason behind the tomatoes leaves having spots ' or user_request == 'what to do if my tomato leaves have spots ':
                response = 'Tiny spots may be due to spider mites, which tend to gather on the undersides of leaves, as do tiny pale aphids. A good spray from the hose (including both sides of the leaves) often will take care of such an infestation. If that doesnt work, try an insecticidal soap (avoid home-grown soap solutions, which can strip the protective coating from leaves). Sometimes brown spots indicate a minor fungus infection. To deter fungus problems, avoid overwatering and stake plants so they get good air circulation'
            elif user_request == 'what are all the watering methods for onion' or user_request == 'proper watering technique for onion':
                response = 'Watering once a week usually is enough in the spring. But you may need to water more often during dry, windy weather. Water onions slowly and deeply to help grow strong, healthy roots.'
            elif user_request == 'Why is jasmine plant not flowering' or user_request == 'why is my plant not flowering':
                response = 'Though your plant looks healthy with lustrous green foliage, it may not bloom flowers. It is due to the lack of feeding the plants with water and fertilizers rich in nitrogen and phosphorus. So apply the fertilizers that jasmine plants love to absorb nutrients from them and bloom flowers quickly. Prune the jasmine plant when it stops blooming fragrant flowers'
            elif user_request == 'How to cultivate crop faster' or user_request == 'how can crop yields be increased':
                response = '1.Plant Early, Plant Effectively 2.Practice Seasonal Soil Rotation 3. Know The Yield Potential 4. Always Scout Your Fields 5. Ensure Proper Water Drainage 6. Utilize Fertilizers 7. Test Your Soil 8. Weed Early and Often.'
            else:
                user_request = user_request.translate(
                    {ord(c): ""
                     for c in "!@#$%^&*()[]{};:,./<>?\|`~-=_+"})
                check_query = user_request.split(" ")[1]
                check_text = user_request.split(" ")[1:3]
                if check_text == check_wikipedia1 or check_text == check_wikipedia2:
                    response = scrape_data(user_request, "wikipedia")
                elif check_text == check_wikihow:
                    response = scrape_data(user_request, "wikihow")
                elif check_query == "nearby":
                    response = scrape_data(user_request, "nearby")
                else:
                    response = scrape_data(user_request, "")

    else:
        if user_request == 'hi':
            response = 'Hello , Thanks for your greeting'
        elif user_request == 'bye' or user_request == 'thank you':
            response = 'bye, Thank you'
        elif user_request == 'hello':
            response = 'Good to see you again'
        else:
            response = '...'

    return response
Beispiel #9
0
import scrape
import word_cloud

driver = scrape.initialize_browser(headless=True)
scrape.scrape_data(driver, "YourQuoraID")

word_cloud.generate_word_cloud()
Beispiel #10
0
    id int,
    name TEXT,
    victories TEXT,
    victories_over_matches TEXT,
    touches_scored TEXT,
    touches_received TEXT,
    indicator TEXT,
    match_scores TEXT,
    match_against TEXT
);""")
    c.execute("""CREATE TABLE games (
    id int,
    p1 TEXT,
    p2 TEXT,
    score TEXT,
    round TEXT,
    winner TEXT
);""")


for event, event_url in scrape.get_events():
    conn = sqlite3.connect(f"dbs/{event}.db")
    c = conn.cursor()
    init_file(c)
    for (pools, tableaus) in scrape.scrape_data(event_url):
        c.executemany('INSERT INTO fencers VALUES (?,?,?,?,?,?,?,?,?)', pools)
        c.executemany('INSERT INTO games VALUES (?,?,?,?,?,?)', tableaus)

    conn.commit()
    conn.close()
Beispiel #11
0
def render_video():
    command = [
        "ffmpeg -y -i {0}/solfilmen.mp4 -i {1}/intro.png -i {1}/up.png -i {1}/down.png"
        .format(SRC_DIR, OUT_DIR),
        "-filter_complex \"[0:v][1:v] overlay=enable='between(t,0,6)' [tmp];",
        "[tmp][2:v] overlay=enable='between(t,9,33)' [tmp2]; [tmp2][3:v] overlay=enable='between(t,34,53)'\"",
        "{}/output.mp4".format(OUT_DIR)
    ]

    os.system(" ".join(command))


# create output folder
if not os.path.exists(OUT_DIR):
    os.mkdir(OUT_DIR)

# generate images with dynaic data used to render the video
generate_images(scrape.scrape_data(today))

# render the video with ffmpeg cli
render_video()

# upload the video to s3
uploader.upload_video(
    "{}/output.mp4".format(OUT_DIR),
    S3_BUCKET,
    "date-{}.mp4".format(DATE),
)

print("Done")