def __init__(self, log_database, log_collection, execution_id): self.log_database = log_database self.log_collection = log_collection self.execution_id = execution_id self.mongoDBObject = MongodbOperation() self.log_db_writer = App_LoggerDB(execution_id=execution_id) self.az_blob_mgt = AzureBlobManagement()
def __init__(self, execution_id): #self.file_object = file_object #self.logger_object = logger_object self.log_database="strength_training_log" self.log_collection="stg-training_main_log" self.execution_id=execution_id self.log_db_writer=App_LoggerDB(execution_id=execution_id) self.mongoDBObject = MongodbOperation() self.az_blob_mgt=AzureBlobManagement()
def __init__(self, path, execution_id): self.Batch_Directory = path self.execution_id = execution_id self.collection_name = "strength_schema_prediction" #code added by Avnish yadav self.database_name = "Wafer-sys" #code added by Avnish yadav self.logger_db_writer = App_LoggerDB( execution_id=execution_id) #code added by Avnish yadav self.mongdb = MongodbOperation() self.az_blob_mgt = AzureBlobManagement() self.good_directory_path = "good-raw-file-prediction-validated" self.bad_directory_path = "bad-raw-file-prediction-validated"
def __init__(self, log_database, log_collection, execution_id): #self.file_object = file_object #self.logger_object = logger_object self.execution_id = execution_id self.log_db_writer = App_LoggerDB(execution_id=execution_id) self.log_database = log_database self.log_collection = log_collection self.az_blob_mgt = AzureBlobManagement() self.mongoDBObject = MongodbOperation() self.clf = RandomForestClassifier() self.xgb = XGBClassifier(objective='binary:logistic')
def __init__(self, log_database, log_collection, execution_id): #self.file_object = file_object #self.logger_object = logger_object self.execution_id = execution_id self.log_db_writer = App_LoggerDB(execution_id=execution_id) self.log_database = log_database self.log_collection = log_collection self.az_blob_mgt = AzureBlobManagement() self.mongoDBObject = MongodbOperation() self.linearReg = LinearRegression() self.RandomForestReg = RandomForestRegressor() self.DecisionTreeReg = DecisionTreeRegressor() self.XGBoostReg = XGBRegressor() self.AdaboostReg = AdaBoostRegressor() self.svm = SVC()
def index(): if request.method == 'POST': searchString = request.form['content'].replace( " ", "") # obtaining the search string entered in the form print(searchString) try: print("entered try Block") mongodb = MongodbOperation() #dbConn = pymongo.MongoClient("mongodb://localhost:27017/") # opening a connection to Mongo #db = dbConn['crawlerDB'] # connecting to the database called crawlerDB a = mongodb.getDataBaseClientObject() #reviews=mongodb.checkExistingCollection(searchString,database='crawlerDB') #print(reviews) #reviews = db[searchString].find({}) # searching the collection with the name same as the keyword reviews = -1 print("entered if block") if reviews > 0: # if there is a collection with searched keyword and it has records in it return render_template( 'results.html', reviews=reviews) # show the results to user else: print('entered else block') flipkart_url = "https://www.flipkart.com/search?q=" + searchString # preparing the URL to search the product on flipkart uClient = uReq( flipkart_url) # requesting the webpage from the internet flipkartPage = uClient.read() # reading the webpage uClient.close() # closing the connection to the web server flipkart_html = bs( flipkartPage, "html.parser") # parsing the webpage as HTML bigboxes = flipkart_html.findAll( "div", {"class": "_1AtVbE col-12-12"} ) # seacrhing for appropriate tag to redirect to the product link del bigboxes[ 0: 3] # the first 3 members of the list do not contain relevant information, hence deleting them. box = bigboxes[0] # taking the first iteration (for demo) productLink = "https://www.flipkart.com" + box.div.div.div.a[ 'href'] # extracting the actual product link prodRes = requests.get( productLink) # getting the product page from server prod_html = bs( prodRes.text, "html.parser") # parsing the product page as HTML commentboxes = prod_html.find_all('div', { 'class': "_16PBlm" }) # finding the HTML section containing the customer comments #table = db[searchString] # creating a collection with the same name as search string. Tables and Collections are analogous. a = mongodb.getDataBaseClientObject() database1 = mongodb.createDatabase(a, "crawlerDB") table1 = mongodb.createCollectionInDatabase( database1, searchString) #filename = searchString+".csv" # filename to save the details #fw = open(filename, "w") # creating a local file to save the details #headers = "Product, Customer Name, Rating, Heading, Comment \n" # providing the heading of the columns #fw.write(headers) # writing first the headers to file reviews = [] # initializing an empty list for reviews # iterating over the comment section to get the details of customer and their comments for commentbox in commentboxes: try: name = commentbox.div.div.find_all( 'p', {'class': '_2sc7ZR _2V5EHH'})[0].text except: name = 'No Name' try: rating = commentbox.div.div.div.div.text except: rating = 'No Rating' try: commentHead = commentbox.div.div.div.p.text except: commentHead = 'No Comment Heading' try: comtag = commentbox.div.div.find_all( 'div', {'class': ''}) custComment = comtag[0].div.text except: custComment = 'No Customer Comment' #fw.write(searchString+","+name.replace(",", ":")+","+rating + "," + commentHead.replace(",", ":") + "," + custComment.replace(",", ":") + "\n") mydict = { "Product": searchString, "Name": name, "Rating": rating, "CommentHead": commentHead, "Comment": custComment } # saving that detail to a dictionary #x = table.insert_one(mydict) #insertig the dictionary containing the rview comments to the collection x = mongodb.createOneRecord(searchString, mydict) reviews.append( mydict) # appending the comments to the review list return render_template( 'results.html', reviews=reviews) # showing the review to the user except Exception as e: print(e) return 'something is wrong' #return render_template('results.html') else: return render_template('index.html')
#client = pymongo.MongoClient("mongodb+srv://{0}:{1}@cluster0.tpqna.mongodb.net/Projectdb?retryWrites=true&w=majority") client = pymongo.MongoClient("mongodb+srv://test:[email protected]/Projectdb?retryWrites=true&w=majority") db = client.get_database('Wafer-sys') mydb = client['Wafer-sys'] mycol = mydb["schema-training"] #records = db.new1_db #a=records.count_documents({}) #print(a) """ for testing the class function""" a=MongodbOperation() #c=a.getDataBaseClientObject() #d=a.createDatabase(c,'newentry') #e=a.createCollectionInDatabase(d,'values') record={"Sensor - 3":" float "} record1={"name":"sherwyn"} record3={"name":"leena","rollno": 3223,"dept": "plsql"} record4={"name":"sangetha","rollno": 13223,"dept": "12plsql"} #data1 = [{"code":"2","sum":"10"},{"local":"20"}] #df = pd.DataFrame(data1) #g=a.checkExistingCollection("values", d) #print(g) #d=a.checkDatabase(c,'Projectdb')
def __init__(self, execution_id): self.mongodb=MongodbOperation() self.az_blob_mgt=AzureBlobManagement() self.logger_db_writer=App_LoggerDB(execution_id=execution_id) self.good_file_path="good-raw-file-prediction-validated" self.bad_file_path="bad-raw-file-prediction-validated"
def __init__(self, execution_id): self.mongoDBObject = MongodbOperation() self.azureBlobObject = AzureBlobManagement() self.execution_id = execution_id pass