def startParser(count,link,channel):

    for i in range(count,101):
        try:

            parsePage(connection.getPageSource(link+"/"+str(i)),channel)

            sql = "INSERT INTO track_table(`channel_name`,`next_link`) VALUES ('%s','%s') ON DUPLICATE KEY UPDATE next_link='%s'"%(channel,link+"/"+str(i+1),link+"/"+str(i+1))

            print sql
            import MySQLdb
            db = MySQLdb.connect('127.0.0.1','root','root','dailymotion')
            cursor = db.cursor()
            cursor.execute(sql)
            db.commit()
        except Exception as x:
            print x
            cursor.execute("INSERT INTO track_table(`channel_name`) VALUES ('%s') ON DUPLICATE KEY UPDATE next_link_nc='%s',next_link='completed' ,status=10"%(channel,link+"/"+str(count)))
        db.commit()
    import MySQLdb
    db = MySQLdb.connect('127.0.0.1','root','root','dailymotion')
    cursor = db.cursor()
    sql = "INSERT INTO track_table(`channel_name`,`next_link`) VALUES ('%s','completed') ON DUPLICATE KEY UPDATE next_link='completed'"%(channel)
    cursor.execute(sql)
    db.commit()
def main():
    for i in range(1,101):
        soup = connection.getPageSource("http://www.dailymotion.com/users/popular/channel/all/"+str(i))
        getContent(soup)
Example #3
0
def main():

    soup = connection.getPageSource("http://www.dailymotion.com/in/browse")
    getContent(soup)