Exemplo n.º 1
0
def queue(id, title):
    if not db.is_exist(id):
        player = kodi()
        db.insert_data({"youtube_id": id, "title": title, "status": "idle"})
        player.GUI.ShowNotification(title=title, message="Successfully Queued", displaytime=20000)
        return "Song Successfully Queued"
    else:
        return "Song already been in queued"
Exemplo n.º 2
0
def crawlingArticle(id, user):
    try:
        if len(id)>5:
            r = requests.get(getObjectUrl(id), cookies=login.cookies)
            soup = bs(r.text, 'html.parser')
            content =  soup.find("div", class_="tbody m-tcol-c").find_all("p")[4].text
            print content
            if not db.is_exist(db.Data, id):
                db.session.add(db.Data(id=id,
                                       user=user,
                                       datetime=soup.find("td", class_="m-tcol-c date").text,
                                       title=soup.find("span", class_="b m-tcol-c").text,
                                       content=content,
                                       morpheme=str(tag.get_tags(content)),
                                       category=soup.find_all("td", class_="m-tcol-c")[1].text))
                db.session.commit()
            print soup.find("td", class_="m-tcol-c date").text      #date
            print soup.find("span", class_="b m-tcol-c").text       #title
            print soup.find("div", class_="tbody m-tcol-c").find_all("p")[4].text  #content
            print soup.find_all("td", class_="m-tcol-c")[1].text    #category

    except Exception as e:
               print e.message
Exemplo n.º 3
0
def getObjectUrl(i):
    url = 'http://cafe.naver.com/ArticleRead.nhn?clubid=10050146&page=3&menuid=353&boardtype=L&referrerAllArticles=false&clubid=10050146&articleid='
    return url+str(i)

if __name__ == '__main__':
    with open(list_file, 'r') as f:
        arr = pickle.load(f)
        for id in arr:
            try:
                if len(id)>5:
                    print id
                    r = requests.get(getObjectUrl(id), cookies=login.cookies)
                    #print r.text
                    soup = bs(r.text, 'html.parser')
                    content =  soup.find("div", class_="tbody m-tcol-c").find_all("p")[4].text
                    if not db.is_exist(db.Data, id):
                        db.session.add(db.Data(id=id,
                                               datetime=soup.find("td", class_="m-tcol-c date").text,
                                               title=soup.find("span", class_="b m-tcol-c").text,
                                               content=content,
                                               morpheme=str(tag.get_tags(content)),
                                               category=soup.find_all("td", class_="m-tcol-c")[1].text))
                        db.session.commit()
                    print soup.find("td", class_="m-tcol-c date").text      #date
                    print soup.find("span", class_="b m-tcol-c").text       #title
                    print soup.find("div", class_="tbody m-tcol-c").find_all("p")[4].text  #content
                    print soup.find_all("td", class_="m-tcol-c")[1].text    #category

            except Exception as e:
                print e.message