def run_Scrapper_0(self, value, companyname, date): obj_local = None if (value == 1): obj_local = self.obj elif (value == 2): obj_local = self.obj1 elif (value == 3): obj_local = self.obj2 elif (value == 4): obj_local = self.obj3 elif (value == 5): obj_local = self.obj4 obj_local.Post_Factiva_Home_Request() obj1 = Searcher(company_name=companyname, date=date, driver=obj_local.driver) obj1.Select_CustomDateRange() obj1.Enter_FromDate() obj1.Enter_ToDate() obj1.Enter_Company() obj1.SubmitSearch() obj2 = Scrapper(driver=obj_local.driver, session=obj_local.session) obj2.MigrateSeleniumtoRequests() count_ = 1 while True: obj2.GetAllArticles() obj2.Parse_Articles() check = obj2.Navigate_NextPage(count=count_) if (check): break count_ += 1 obj2.Save_ToDataFrame(companyname=companyname, date=date)