Esempio n. 1
0
 def Search(self):
     gotfilename = self.filename.get()
     im = mpimg.imread(gotfilename)
     plt.title("query")
     plt.imshow(im)
     plt.show()
     database=self.databasepath.get()
     print(gotfilename)
     print(database)
     test.search(gotfilename,database)
Esempio n. 2
0
 def run(self, dispatcher: CollectingDispatcher, tracker: Tracker,
         domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
     list = search(tracker.latest_message.get('text'))
     res = ""
     for i in list:
         res += i + "\n"
     dispatcher.utter_message(text=res)
     return []
def get_google_results(title_name):
    try:
        from test import search
        search_article = str(title_name  + "+researchgate")
        for j in search(search_article, tld="co.in", num=10, stop=1, pause=2):
            result = j
    except ImportError:
        print("No module named 'google' found")
    # to search

    return result
Esempio n. 4
0
    def run(self, dispatcher: CollectingDispatcher, tracker: Tracker,
            domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
        confidence = tracker.latest_message['intent'].get('confidence')
        print("confidence", confidence)
        print("intent", tracker.latest_message['intent'].get('name'))
        if confidence < 0.85:
            list = search(tracker.latest_message.get('text'))
            response = ""
            for i in list:
                response += i + "\n"
        else:
            response = """In what location?""".format()

        dispatcher.utter_message(response)
        return []
Esempio n. 5
0
	def __init__(self):
		wx.Frame.__init__(self,parent=None,title="成绩查询",size=(900,560))


		grid = wx.grid.Grid(self,pos=(10,0),size=(750,500))
		grid.CreateGrid(20,7)
		for i in range(20):
			for j in range(7):
				grid.SetCellAlignment(i,j,wx.ALIGN_CENTER,wx.ALIGN_CENTER)
		grid.SetColLabelValue(0, "星期一")  #第一列标签
		grid.SetColLabelValue(1, "星期二")
		grid.SetColLabelValue(2, "星期三")
		grid.SetColLabelValue(3, "星期四")
		grid.SetColLabelValue(4, "星期五")  # 第一列标签
		grid.SetColLabelValue(5, "星期六")
		grid.SetColLabelValue(6, "星期日")

		grid.SetColSize(0,100)
		grid.SetColSize(1,100)
		grid.SetColSize(2,100)
		grid.SetColSize(3,100)
		grid.SetColSize(4,100)
		grid.SetColSize(5,100)
		grid.SetColSize(6,100)
		grid.SetRowSize(0, 90)
		grid.SetRowSize(1, 90)
		grid.SetRowSize(2, 90)
		grid.SetRowSize(3, 90)
		grid.SetRowSize(4, 90)

		grid.SetCellTextColour("NAVY")

		data = test.search()
		for i in range(5):
			for j in range(7):
				file_handle = open('txt/%s.txt'%(i*7+j), mode='w', encoding='utf8')
				grid.SetCellValue(i, j, data[i*7+j])
				file_handle.writelines(data[i*7+j]+"\n")
				file_handle.close()


		pass
def google_search_article_links():
    o = 0
    links = []
    article_data = pd.read_csv("article_title.csv")
    google_search_links = []
    for value in article_data.iloc[:, -1]:
        search_article = value + "+researchgate"
        google_search_links = []
        google_search_links.append(search_article)

        #print (google_search_links)
        for j in search(search_article, tld="co.in", num=10, stop=1, pause=2):
            result = j
            o += 1
            #print (o, result)
            links.append(result)
            #print (o, links)
    #print ("Final result", links)
    return links
    result_links = google_search_article_links()
article_url = 'https://www.researchgate.net/publication/332449458_A_robust_fractionation_method_for_protein_subcellular_localization_studies_in_Escherichia_coli'


def get_proxy():
    proxy = requests.get('http://34.213.20.241/?Key=PROFEZA').text.split(
        '\\')[1][1:]
    return proxy


try:
    from test import search

except ImportError:
    print("No module named 'google' found")
    # to search
for j in search(author_url_1, tld="co.in", num=10, stop=1, pause=2):
    result = j
    print(j)


def scrape_author_details(author_url_1):
    i = 1
    while i > 0:
        if i <= 100:
            try:
                proxies = {'https': 'https://' + get_proxy()}
                repo = requests.get(result, proxies=proxies, timeout=10)
                print('Our work is done.')
                break
            except:
                i += 1