Beispiel #1
0
def staff_hr_post(user_id=None, post_type=None):
    if post_type == 'hire_employee':
        employee_name = request.form['employee_name']
        department = int(request.form['employee_department'])
        employee_password = request.form['employee_password']
        employee_idcard = request.form['employee_idcard']
        employee_phone = request.form['employee_phone']
        employee_address = request.form['employee_address']
        employee_position = request.form['employee_position']
        employee_note = request.form['employee_note']
        employee_salary = int(request.form['employee_salary'])
        DatabaseConnection.exec_add_an_employee(
            employee_name, department, employee_password, employee_idcard,
            employee_phone, employee_address, employee_position, employee_note,
            employee_salary)
        return redirect(
            url_for('staff_hr_get',
                    user_id=user_id,
                    get_type='hr_show_staff_info'))
    if post_type == 'fire_employee':
        fired_staff_list = request.form.getlist('items[]')
        print 'fired_staff_list:', fired_staff_list

        transaction.fire_staffs(fired_staff_list)
        return redirect(
            url_for('staff_hr_get',
                    user_id=user_id,
                    get_type='hr_show_staff_info'))
Beispiel #2
0
def process_complain(user_id_string, complain_type_string, complain_content):
    user_id = int(user_id_string)
    complain_type = int(complain_type_string)
    print 'kkk'
    DatabaseConnection.exec_add_new_complain_to_complain_list(
        user_id, complain_type, complain_content)
    print 'mmm'
Beispiel #3
0
def sendEmail(allotedTiming, date):
    index = -1
    for x in allotedTiming:
        index += 1
        for y in allotedTiming.get(x):
            inTime = ''
            outTime = ''
            if index == 0:
                inTime = '6:00 AM'
                outTime = '10:00 PM'
            elif index == 1:
                inTime = '10:00 PM'
                outTime = '2:00 PM'
            elif index == 2:
                inTime = '2:00 PM'
                outTime = '6:00 PM'
            else:
                inTime = '6:00 PM'
                outTime = '10:00 PM'

            mailingAddress = y
            name, shopId = crud.getOwnerName(mailingAddress)
            modifiedDate = crud.modifyDate(date)
            sendNotification.sendMail(mailingAddress, modifiedDate, index + 1,
                                      inTime, outTime, name, shopId)
        # y ends
    # x ends
    print('All notifications sent successfully.')
Beispiel #4
0
    def __init__(self):
        # Get the data from the database
        conn = dc.connect()
        housing_data = dc.download_housing_data(conn)
        conn.close()

        price_data = housing_data[
            'price']  # Choose the price data as the target
        housing_data.drop(
            ['price'], axis=1,
            inplace=True)  # Drop the price data from the main feature set
        # Split the dataset into a traning set and test set.
        self.X = housing_data
        self.y = price_data
        self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(
            self.X, self.y, test_size=0.10, random_state=102)

        # Train the data with Linear Regression.
        # Outcome has a lower accuracy of mean absolute error: 127497.25764150245 r2 score: 0.6914914635024427
        # Instead a RandomForestRegressor should be used for higher accuracy prediction value
        # regressor = LinearRegression()
        # regressor.fit(X_train, y_train)
        # y_predict = regressor.predict(X_test)

        # Use a RandomForestRegressor() to create a prediction
        self.regressor = RandomForestRegressor(random_state=102)
        self.regressor.fit(self.X_train, self.y_train)
        self.y_predict = self.regressor.predict(self.X_test)

        # # Check the accuracy of the prediction with the mean absolute error and the r2 scores
        self.maerr = mean_absolute_error(self.y_test, self.y_predict)
        self.r2 = r2_score(self.y_test, self.y_predict)
Beispiel #5
0
def supplier_info_back_edit(supplier_id=None, info_type=None):
    if info_type == "Information":
        password = request.form['password']
        contect = request.form['contect']
        phone = request.form['phone']
        email = request.form['email']
        address = request.form['address']
        note = request.form['note']
        supplier_id = int(supplier_id)
        DatabaseConnection.exec_change_the_information_supplier(
            supplier_id, password, contect, phone, email, address, note)
        return redirect(
            url_for('supplier_info_back',
                    supplier_id=supplier_id,
                    info_type=info_type))
    elif info_type == "After_sale_service":
        complaint_id_list = [
            int(each) for each in request.form.getlist('complains[]')
        ]
        print complaint_id_list
        DatabaseConnection.exec_delete_complain_in_complain_list(
            complaint_id_list)
        return redirect(
            url_for('supplier_info_back',
                    supplier_id=supplier_id,
                    info_type=info_type))
Beispiel #6
0
def staff_pd_info_back(user_id=None, info_type=None):

    if info_type == "Information":
        staff_pd_tuple = DatabaseConnection.exec_fetch_all_information_from_staffacc(
            user_id)
        return render_template('staff_pd_information.html',
                               user_id=user_id,
                               info_type=info_type,
                               staff_pd_tuple=staff_pd_tuple)
    elif info_type == "Purchase_history":
        staff_pd_tuple = DatabaseConnection.exec_fetch_staff_all_purchase_lists(
            user_id)
        return render_template('staff_pd_purchase_history.html',
                               user_id=user_id,
                               info_type=info_type,
                               staff_pd_tuple=staff_pd_tuple)
    elif info_type == "Supplier_list":
        staff_pd_tuple = DatabaseConnection.exec_fetch_all_suppliers_from_supplieracc(
        )
        return render_template('staff_pd_supplier_list.html',
                               user_id=user_id,
                               info_type=info_type,
                               staff_pd_tuple=staff_pd_tuple)
    elif info_type == "Add_purchase_list":
        return render_template('staff_pd_add_purchase_list.html',
                               user_id=user_id)
def do_day(url):
    try:
        response = requests.get(url, timeout=(
            30, 30
        ))  # timeout is in seconds, first is connecting, second is reading
        response.raise_for_status()
        if response.status_code == 200:
            soup = BeautifulSoup(response.content, 'html.parser')
            # Check if day has no calendar items
            for it in soup.find_all(class_="tribe-events-notices"):
                if (it.find("ul").find("li").contents[0] ==
                        'Não há eventos agendados para '):
                    print("Day has no items")
                    return None

            day_string = soup.find(id="tribe-bar-date")["value"]

            out_going_to_database = []

            for it in soup.find_all(class_="type-tribe_events"):
                item_to_append = PSParsing.parse_event(it, day_string)
                out_going_to_database.append(item_to_append)

            DatabaseConnection.send_calendar_items(out_going_to_database, "PS")

    except Exception as err:
        print(f'Error occurred: {err}')
        DatabaseConnection.send_parsing_error(
            url, DatabaseConnection.entity_dictionary["PS"], err)
        Log.log("PS", err, True)
def inside_nodes_page(node):
    dictionary_to_return = {}
    try:
        response = requests.get(node.find(class_="mec-color-hover")["href"],
                                timeout=(30, 30))
        response.raise_for_status()
        if response.status_code == 200:
            soup = BeautifulSoup(response.content, 'html.parser')

            if len(soup.find_all(class_="mec-single-event-time")) > 0:
                dictionary_to_return["Hour"] = soup.find(
                    class_="mec-single-event-time").find(
                        class_="mec-events-abbr").contents[0]
            if len(soup.find_all(class_="mec-single-event-location")) > 0:
                place = ""
                if len(soup.find_all(class_="author fn org")) > 0:
                    place += soup.find(class_="author fn org").contents[0]
                if len(soup.find_all(class_="mec-address")) > 0:
                    if (place != ""): place += ", "
                    if (len(soup.find(class_="mec-address").contents) > 0):
                        place += soup.find(class_="mec-address").contents[0]
                dictionary_to_return["Place"] = place

    except Exception as err:
        print(f'Error occurred: {err}')
        DatabaseConnection.send_parsing_error(
            'https://www.psd.pt/atualidade-agenda/',
            DatabaseConnection.entity_dictionary["PSD"], err)

    return dictionary_to_return
 def submit_err():
     """submit the error reports"""
     report = report_error_text.get("1.0", tk.END)
     de = data_error_var.get()
     ue = UI_error_var.get()
     fe = function_error_var.get()
     dc.submit_error_report(report, de, ue, fe, dc.connect())
     messagebox.showinfo("info", "thank you for reporting errors")
Beispiel #10
0
def supplier(supplier_id=None):
    orders_num = len(
        DatabaseConnection.exec_fetch_supplier_all_purchase_lists(supplier_id))
    complains_num = len(DatabaseConnection.exec_fetch_all_complaints())
    return render_template("supplier.html",
                           supplier_id=supplier_id,
                           orders_num=orders_num,
                           complains_num=complains_num)
Beispiel #11
0
def run_test_crawl(crawler_iterator):
    for caption in crawler_iterator:
        print caption.get_qm_id(), "--", caption.meme.meme_name
        image_source = caption.image_source
        print "url:", caption.get_url()
        print "image_source:", image_source 
        print DatabaseConnection.insertCrawlData(caption.get_url(),caption.get_filename(), caption.get_rating(), caption.get_views())
    run_test_crawl(crawler_iterator)   
Beispiel #12
0
def staff_pd_get(user_id=None):
    purchlists_num = len(
        DatabaseConnection.exec_fetch_staff_all_purchase_lists(user_id))
    suppliers_num = len(
        DatabaseConnection.exec_fetch_all_suppliers_from_supplieracc())
    return render_template("staff_purchase_department.html",
                           user_id=user_id,
                           purchlists_num=purchlists_num,
                           suppliers_num=suppliers_num)
Beispiel #13
0
def money_management():
    start = time.clock()
    DatabaseConnection.database.connect_database()
    accounts = DatabaseConnection.exec_fetch_all_accounts()
    types = DatabaseConnection.exec_fetch_all_types()
    spenders = DatabaseConnection.exec_fetch_all_spenders()
    DatabaseConnection.database.disconnect_database()
    end = time.clock()
    print("Load Time: " + str(end-start) + "s")
    return render_template('moneymanagement.html', account_list=accounts, type_list=types, spender_list=spenders)
    def read_file(self):
        try:
            self.logger.info("Opening log file")
            log = open(config.logname, "r")
        except IOError:
            self.logger.error("Unable to open file right now. Please check if you "
                              "have permissions to read this file or if specified correct path to log file")
            return

        self.logger.info("Started analysis of log {} at {}".format(log.name, datetime.datetime.now()))
        connection = DatabaseConnection.init_db_connection()
        lines = log.readlines()

        lines = self.__crop_logfile(lines)
        self.logger.info("Log records to analyse: {}.".format(len(lines)))
        for item in lines:
            self.__parse_message(item)

        if not log.closed:
            log.close()
            self.logger.info(
                """Log analysis completed at {}. File was successfully closed.""".format(datetime.datetime.now()))
        self.__insert_data(connection)
        self.__clear_data_lists()

        if connection:
            connection.close()
Beispiel #15
0
    def test_download_housing_data_from_database(self):
        df = dc.download_housing_data(connection)

        self.assertTrue(isinstance(df,
                                   pandas.DataFrame))  # Must be a DataFrame
        self.assertGreaterEqual(
            len(df), 15000)  # Must have around the amount of data in the table
Beispiel #16
0
	def __init__(self):
		self.__username = u""
		self.__idealname = u""
		self.__namelist = []
		self.__countlist = []
		self.__DBinstance = DatabaseConnection.DatabaseConnection()
		self.__chosung_list= [
		u'ㄱ', u'ㄲ', u'ㄴ', u'ㄷ',
		u'ㄸ', u'ㄹ', u'ㅁ', u'ㅂ',
		u'ㅃ', u'ㅅ', u'ㅆ', u'ㅇ',
		u'ㅈ', u'ㅉ', u'ㅊ', u'ㅋ',
		u'ㅌ', u'ㅍ', u'ㅎ']
		self.__jungsung_list = [
		u'ㅏ', u'ㅐ', u'ㅑ', u'ㅒ',
		u'ㅓ', u'ㅔ', u'ㅕ', u'ㅖ',
		u'ㅗ', u'ㅘ', u'ㅙ', u'ㅚ',
		u'ㅛ', u'ㅜ', u'ㅝ', u'ㅞ',
		u'ㅟ', u'ㅠ', u'ㅡ', u'ㅢ',
		u'ㅣ']
		self.__jongsung_list = [
		u'', u'ㄱ', u'ㄲ', u'ㄳ',
		u'ㄴ', u'ㄵ', u'ㄶ', u'ㄷ',
		u'ㄹ', u'ㄺ', u'ㄻ', u'ㄼ',
		u'ㄽ', u'ㄾ', u'ㄿ', u'ㅀ',
		u'ㅁ', u'ㅂ', u'ㅄ', u'ㅅ',
		u'ㅆ', u'ㅇ', u'ㅈ', u'ㅊ',
		u'ㅋ', u'ㅌ', u'ㅍ', u'ㅎ']
		self.__chosung_count= [2,4,2,3,6,5,4,4,8,2,4,1,3,6,4,3,4,4,3]
		self.__jungsung_count = [2,3,3,4,2,3,3,4,2,4,5,3,3,2,4,5,3,3,1,2,1]
		self.__jongsung_count = [0,2,4,4,2,5,3,3,5,7,9,9,7,9,9,8,4,4,6,2,4,1,3,4,3,4,4,3]
Beispiel #17
0
def login():
    if request.method == 'GET':
        return render_template('login.html')
    if request.method == 'POST':
        if "username" in request.form and "password" in request.form:
            username = request.form['username']
            password = request.form['password']
            DatabaseConnection.database.connect_database()
            userid, userpermission = DatabaseConnection.exec_user_login(
                username, password)
            DatabaseConnection.database.disconnect_database()
        else:
            username = None
            userid = None
            userpermission = None
        if userid is not None:
            if userpermission == 1:
                login_user(User(id=userid, username=username))
                flash('Logged in successfully.')
                return redirect(url_for('dashboard'))
            else:
                flash('Permission denied.')
                return render_template('login.html')
        else:
            flash('Wrong username or password!')
            return render_template('login.html')
Beispiel #18
0
def main():
    #pass,user,db,host
    db = DBC.DatabaseFacade('ooad', 'plant', 'OOADProject', '127.0.0.1')
    #need to add an appropriate check to setting up the database
    #db.SetUp()
    #db.AddPlantRecords()
    #return

    #This holds all of the threads
    threads = []
    #conditional variable to protect the database
    conditionalvar = threading.Condition()
    #a sentinal for running the app
    running = True
    #conditional varialbe to protect the sentinel
    isrun = threading.Condition()

    #create the web app as a thread
    t = threading.Thread(target=setUpWebApp,
                         args=(db, conditionalvar, isrun, running))
    threads.append(t)
    t.start()

    #create a thread does data input
    d = threading.Thread(target=setUpDataService,
                         args=(db, conditionalvar, isrun, running))
    threads.append(d)
    d.start()

    #create a thread that listens to the user
    u = threading.Thread(target=userListner,
                         args=(db, conditionalvar, isrun, running))
    threads.append(u)
    u.start()
Beispiel #19
0
    def put(self, email, name, shop_type, shop_name, address, phone, mon, tue,
            wed, thu, fri, sat, sun, b1, b2, b3, b4):
        email = email.strip().lower()
        name = name.strip().lower()
        shop_type = shop_type.strip().lower()
        shop_name = shop_name.strip().lower()
        address = address.strip().lower()
        phone = phone.strip()
        mon = mon.strip()
        tue = tue.strip()
        wed = wed.strip()
        thu = thu.strip()
        fri = fri.strip()
        sat = sat.strip()
        sun = sun.strip()
        b1 = b1.strip()
        b2 = b2.strip()
        b3 = b3.strip()
        b4 = b4.strip()

        # check phone
        if validate.validatePhone(phone) != 'True':
            return asJson(validate.validatePhone(phone))

        # update
        return asJson(
            batch.updateOwner(email, name, shop_type, shop_name, address,
                              phone, mon, tue, wed, thu, fri, sat, sun, b1, b2,
                              b3, b4))
    def __show_anomalies():
        connection = DatabaseConnection.init_db_connection()
        cursor = connection.cursor()
        cursor.execute("select username, level_id, fail_count, success_count, "
                       "value, dateOccurred_from, dateOccurred_to "
                       "from anomaly join userData on userData.id = anomaly.data_id "
                       "join user on user.id = userData.user_id "
                       "where type=1")
        output = cursor.fetchall()

        cursor.execute("select ip_address, level_id, fail_count, success_count, "
                       "value, dateOccurred_from, dateOccurred_to "
                       "from anomaly join addressData on addressData.id = anomaly.data_id "
                       "join address on address.id = addressData.ip_address_id "
                       "where type=2")
        output += cursor.fetchall()
        print '_'*109
        print "|{:15} | {:6} | {:10} | {:14} | {:12} | {:16} | {:16}|"\
            .format("User or Ip", "Level", "Fail rate", "Success rate", "Metric value", "Date from", "Date to")
        for item in output:
            if item[5]:
                print "|{:15} | {:6} | {:10} | {:14} | {:12} | {:16} | {:16}|"\
                    .format(item[0], item[1], item[2], item[3], item[4],
                            item[5].strftime("%Y-%m-%d %H:%M"), item[6].strftime("%Y-%m-%d %H:%M"))
            else:
                print "|{:15} | {:6} | {:10} | {:14} | {:12} | {:16} | {:16}|"\
                    .format(item[0], item[1], item[2], item[3], item[4],
                            item[5], item[6])
        print '_'*109
Beispiel #21
0
    def login(self):

        # Connect to the database
        connection = DatabaseConnection.connectdb()
        valid = 'false'

        try:
            with connection.cursor() as cursor:
                sql = "SELECT * FROM users WHERE username = %s"
                try:
                    cursor.execute(sql, (self.username))
                    result = cursor.fetchall()
                    for row in result:
                        if row['username'] == self.username:
                            if row['password'] == self.password:
                                print("password is matching")
                                valid = 'true'
                                return json.dumps({
                                    'valid': valid,
                                    'userid': row['userId']
                                })
                            else:
                                print("invalid password")
                                return json.dumps({'valid': valid})
                        else:
                            print("user not exist")
                            return json.dumps({'valid': valid})

                except:
                    print("Oops! Something wrong")
                    return json.dumps({'valid': valid})

            connection.commit()
        except:
            print('Connection to db failed')
Beispiel #22
0
    def get(self, email, tense):
        """
        :param tense: 'past', 'present' or 'future'
        """
        tense = tense.strip().lower()
        email = email.strip().lower()

        return batch.getBatchAllotmentDetails(email, tense)
Beispiel #23
0
    def put(self, of, email):
        """
        :param of: 'owner' or 'user'
        """
        of = of.strip().lower()
        email = email.strip().lower()

        return asJson(batch.deactivateAccount(of, email))
def html_page_parser(argv):
    # get html source code in a file
    html = urllib2.urlopen(argv).read().decode("utf-8")
    soup = BeautifulSoup(html, "html5lib")
    tags = soup.find_all('img')
    # title=soup.find_all('title')
    title = soup.title.string
    imgLinks = list()
    caption = list()
    if len(tags)==0:
        print("This website doesn't contain any image in it's content!")
        exit(0)
    elif len(tags)>0:
        for element in tags:
            try:
                src = extractImageSrc(str(element))[0]
                alt = extractImageSrc(str(element))[1]
            except IndexError:
                pass
            if src.endswith((".jpg", ".png", ".jpeg", ".gif", ".svg")):
                if len(alt) < 1:
                    alt = title
                if src.startswith("http") or src.startswith("www."):
                    imgLinks.append(src)
                    caption.append(alt)
                elif src.startswith("//upload."):
                    imgLinks.append("https:" + src)
                    caption.append(alt)
                else:
                    caption.append(alt)
                    imgLinks.append(checkAddress(argv[1]) + src)

    current_directory_path = os.path.dirname(os.path.realpath(__file__))
    current_directory_path = os.path.join(current_directory_path, 'result')
    global GlobalContor
    for i in imgLinks:
        filename="HtmlImg"+str(GlobalContor)+getImgExtension(i)
        filepath = os.path.join('result', filename)
        GlobalContor+=1
        try:
            urllib2.urlretrieve(i, filepath)
        except:
            pass

    images_list=list()
    images_list=get_images(current_directory_path)

    finalResult=[]
    contor=0

    for i in images_list:
        result_json = ImageSerialization.get_info_from_image(i)
        result_json['caption']=caption[contor]
        finalResult.append(result_json)
        contor += 1

    db = DatabaseConnection.DatabaseConnection()
    db.insert_entry(finalResult)
 def query_saved_data():
     data_table.delete(0, tk.END)
     data = dc.download_saved_data(dc.connect())
     for index, row in data.iterrows():
         data_row = (f"bedrooms: {row['bedrooms']} "
                     f"bathrooms: {row['bathrooms']} "
                     f"sqft_living: {row['sqft_living']} "
                     f"floors: {row['floors']} "
                     f"waterfront: {row['waterfront']} "
                     f"view: {row['view']} "
                     f"grade: {row['grade']} "
                     f"sqft_basement: {row['sqft_basement']} "
                     f"yr_built: {row['yr_built']} "
                     f"yr_renovated: {row['yr_renovated']} "
                     f"lat: {row['lat']} "
                     f"long: {row['long']} "
                     f"price: ${row['price']} ")
         data_table.insert(tk.END, data_row)
Beispiel #26
0
class Discovery:
    """Purpose is to automatically discover Fog devices through a client push system"""

    #Threaded function TODO add handler to kill this thread correctly on sigkil.
    def start_rest(db):
        rest = RestService(db)

    db = DatabaseConnection()
    threading.Thread(target=start_rest, args=(db,)).start()
Beispiel #27
0
	def __init__(self):
		self.__mode=u""
		self.__limitnum=0
		self.__indexnum=0
		self.__ideallist = []
		self.__DBinstance = DatabaseConnection.DatabaseConnection()
		self.__dirname = str(id(self))
		self.__imgname = id(self)
		os.mkdir("/home/ec2-user/database_project/static/"+self.__dirname,0777)
Beispiel #28
0
def staff_login():
    username = request.form['username']
    password = request.form['password']

    if DatabaseConnection.exec_staff_login(username, password):
        return redirect(url_for('staffone', username=username))

    flash("The account does not exist, please retype it!")
    return redirect('/staff_login')
Beispiel #29
0
def process_purchase(user_id_string, id_list_string, number_list_string):
    # TODO 没额外说明的就没有返回值
    user_id = int(user_id_string)
    id_list = map(eval, id_list_string)
    number_list = map(eval, number_list_string)
    # print id_list
    # print number_list
    shop_id = DatabaseConnection.exec_add_new_shopping_record(user_id)
    # order_list = zip(id_list, number_list)
    # print order_list
    number2_list = []
    for id in id_list:
        number2_list.append(3)

    print number2_list
    order_list = zip(id_list, number2_list)
    print order_list
    DatabaseConnection.exec_add_new_shopping_list(shop_id, order_list)
    DatabaseConnection.exec_change_the_credit_customer(user_id, 10)
    def save():
        """
        Function command for the save button that collects the data to be sent to the database.
        Mirrors some of the functionality of predict to ensure that the validation occurs at each stage and
        that no bad data gets submitted.
        """
        lat_valid = str(lat_valid_var.get())
        long_valid = str(long_valid_var.get())

        # Only performs the prediction if the lat and long are valid
        if lat_valid == 'valid' and long_valid == 'valid':
            bedrooms = float(bedrooms_entry.get())
            bathrooms = float(bathrooms_entry.get())
            sqft_living = float(sqft_living_entry.get())
            floors = float(floors_picker.get())
            waterfront = float(waterfront_picker.get())
            view = float(view_entry.get())
            grade = float(grade_picker.get())
            sqft_basement = float(sqft_basement_entry.get())
            yr_built = int(yr_built_picker.get())
            yr_renovated = int(yr_renovated_picker.get())
            lat = float(lat_entry.get())
            long = float(long_entry.get())
            fields_list = [
                bedrooms, bathrooms, sqft_living, floors, waterfront, view,
                grade, sqft_basement, yr_built, yr_renovated, lat, long
            ]

            price = regressor.predict_house_price(fields_list)

            price_var = "House Price: ${:,.2f}".format(price)

            price_predicted_var.set(price_var)
            if price:
                dc.insert_data_into_saved(bedrooms, bathrooms, sqft_living,
                                          floors, waterfront, view, grade,
                                          sqft_basement, yr_built,
                                          yr_renovated, lat, long, price)
                messagebox.showinfo("alert", "price was saved")

        else:
            not_valid = 'All fields must be valid for a prediction'
            price_predicted_var.set(not_valid)
def upload_to_db_post_processed_data():
    """
    Calls the database upload command after the features are filtered then the DataFrame is
    uploaded to the database.

    Chosen features: price, bedrooms, bathrooms, sqft_living, floors, waterfront, view, grade,
    sqft_basement, yr_built, yr_renovated, lat, long

    Not chosen: id, date, sqft_lot, condition, sqft_above, zipcode, sqft_living15, sqft_lot15
    These were not chose because they either have low correlation with the price or they are
    redundant to another feature.
    """
    house_features = [
        'price', 'bedrooms', 'bathrooms', 'sqft_living', 'floors',
        'waterfront', 'view', 'grade', 'sqft_basement', 'yr_built',
        'yr_renovated', 'lat', 'long'
    ]
    house_data = house_data_raw[house_features]
    dc.upload_processed_data(house_data, conn)
Beispiel #32
0
    def test_register_user_into_db(self):
        username_test = 'test1111XXX'
        password_test = 'test1111AAA'

        cursor = connection.cursor()  # Connect to database
        dc.register_user(username_test, password_test,
                         connection)  # Register a test user

        test_params = [(username_test, password_test)
                       ]  # Format the test data for comparison
        # Obtain the data of the test user from the database and save into a variable
        data_from_db = cursor.execute(
            f"SELECT username, password FROM users WHERE username = '******'"
        ).fetchall()
        cursor.execute(f"DELETE FROM users WHERE username = '******'"
                       )  # Remove the test data from the db
        connection.commit()  # Commit changes to database

        # Compare the saved original data to the database obtained data
        self.assertEqual(test_params, data_from_db)
Beispiel #33
0
def customer_login():
    username = request.form['username']
    password = request.form['password']

    (is_valid,
     user_tuple) = DatabaseConnection.exec_customer_login(username, password)
    user_id = user_tuple[0][0]
    if is_valid:
        return redirect(url_for('customer', user_id=user_id))

    flash("The account does not exist, check it again.")
    return redirect('/customer_login')
    def __stats_insert_data(self):

        connection = DatabaseConnection.init_db_connection()
        cursor = connection.cursor()

        cursor.execute("SELECT count(*) from user")
        result = cursor.fetchone()
        self.stats_label1.set_text("Total number of users logged during running of application: {}"
                                   .format(int(result[0])))

        cursor.execute("SELECT count(*) from address")
        result = cursor.fetchone()
        self.stats_label2.set_text("Total number of IP addresses used for login to system: {}"
                                   .format(int(result[0])))

        cursor.execute("SELECT SUM(success_count) from userData")
        result = cursor.fetchone()
        self.stats_label3.set_text("Total number of successful logins to system: {}"
                                   .format(int(result[0])))

        cursor.execute("SELECT SUM(fail_count) from userData")
        result = cursor.fetchone()
        self.stats_label4.set_text("Total number of failed logins to system: {}"
                                   .format(int(result[0])))

        cursor.execute("SELECT count(*) from anomaly")
        result = cursor.fetchone()
        self.stats_label5.set_text("Total number of detected anomalies: {}"
                                   .format(int(result[0])))

        cursor.execute("SELECT sum(success_count+fail_count) from userData")
        result = cursor.fetchone()
        self.stats_label6.set_text("Total number of analysed log records: {}"
                                   .format(int(result[0])))

        cursor.execute("SELECT count(*) from blockingAccount where status='blocked'")
        result = cursor.fetchone()
        self.stats_label7.set_text("Number of blocked user accounts: {}"
                                   .format(int(result[0])))

        cursor.execute("SELECT count(*) from blockingAddress where status='blocked'")
        result = cursor.fetchone()
        self.stats_label8.set_text("Number of blocked IP addresses: {}"
                                   .format(int(result[0])))
    def __show_address_data():
        connection = DatabaseConnection.init_db_connection()
        cursor = connection.cursor()
        cursor.execute("select ip_address, fail_count, success_count, "
                       "dateOccurred_from, dateOccurred_to from addressData "
                       "join address on addressData.ip_address_id = address.id")
        output = cursor.fetchall()
        print '_'*84
        print "|{:15} | {:10} | {:14} | {:16} | {:16}|"\
            .format("Ip address",  "Fail rate", "Success rate", "Date from", "Date to")

        for item in output:
            if item[3]:
                print "|{:15} | {:10} | {:14} | {:16} | {:16}|"\
                    .format(item[0], item[1], item[2], item[3].strftime("%Y-%m-%d %H:%M"), item[4].strftime("%Y-%m-%d %H:%M"))
            else:
                print "|{:15} | {:10} | {:14} | {:16} | {:16}|"\
                    .format(item[0], item[1], item[2], item[3], item[4])
        print '_'*84
    def create_anomaly_model(self, level):
        store = Gtk.ListStore(str, str, str, str, str, str)
        connection = DatabaseConnection.init_db_connection()
        cursor = connection.cursor()
        cursor.execute("select username, fail_count, success_count, "
                       "value, dateOccurred_from, dateOccurred_to "
                       "from anomaly join userData on userData.id = anomaly.data_id "
                       "join user on user.id = userData.user_id "
                       "where type=1 and level_id = {}".format(level))
        output = cursor.fetchall()

        cursor.execute("select ip_address, fail_count, success_count, "
                       "value, dateOccurred_from, dateOccurred_to "
                       "from anomaly join addressData on addressData.id = anomaly.data_id "
                       "join address on address.id = addressData.ip_address_id "
                       "where type=2 and level_id = {}".format(level))
        output += cursor.fetchall()
        for item in output:
            store.append([str(item[0]), str(item[1]), str(item[2]), str(item[3]), str(item[4]), str(item[5])])
        return store
    def __start_unblocking_thread(self):
        self.logger.info("Starting user and IP address unblocking daemon.")
        cursor = DatabaseConnection.init_db_connection().cursor()
        user_blocker = UserBlocker()
        address_blocker = AddressBlocker()
        while True:
            # self.logger.info("Executing unblocking iteration")
            cursor.execute("Select blockingAccount.id, username from blockingAccount "
                           "join user on user.id = blockingAccount.user_id "
                           "where date_unblocked < NOW() and status='blocked'")
            output = cursor.fetchall()
            for item in output:
                user_blocker.unblock_user(item[1], item[0])

            cursor.execute("Select blockingAddress.id, ip_address from blockingAddress "
                           "join address on address.id = blockingAddress.ip_address_id "
                           "where date_unblocked < NOW() and status='blocked'")
            output = cursor.fetchall()
            for item in output:
                address_blocker.unblock_address(item[1], item[0])

            time.sleep(15)
    def compute_metrics(self):
        self.__logger.info("Search for anomalies started.")
        __user_blocker = None
        __address_blocker = None
        connection = DatabaseConnection.init_db_connection()
        c = connection.cursor()
        c.execute("SELECT userData.id, fail_count, success_count, username "
                  "from userData "
                  "join user on user.id = userData.user_id "
                  "where metric_set=0")
        user_data_to_analyse = c.fetchall()
        self.__logger.info("User records to recompute: {}".format(c.rowcount))

        for record in user_data_to_analyse:
            c.execute("UPDATE userData set metric_set=1 where id={}".format(int(record[0])))
            fail_count = record[1]
            success_count = record[2]
            anomaly = Anomaly(success_count, fail_count)

            if anomaly.is_valid:
                self.__logger.warn("Anomaly detected. Checking if existing anomaly should be updated, or new created.")
                c.execute("SELECT id from anomaly where data_id = {} and type=1".format(int(record[0])))
                existing_anomaly = c.fetchone()
                if existing_anomaly:
                    self.__logger.info("Updating anomaly.")
                    self.__update_anomaly(existing_anomaly[0], anomaly, c)
                else:
                    self.__logger.info("Inserting new anomaly.")
                    self.__insert_anomaly(record[0], anomaly, c, 1)
                self.__send_alert(anomaly, record[3])
                self.__logger.info("New anomaly data stored. Alert was sent according to level of anomaly")

                if anomaly.level == 3 and config.user_blocking_enabled:
                    if not __user_blocker:
                        __user_blocker = UserBlocker()
                    __user_blocker.block_user(record[3])

        c.execute("SELECT addressData.id, fail_count, success_count, ip_address "
                  "from addressData "
                  "join address on address.id = addressData.ip_address_id "
                  "where metric_set=0")
        ip_data_to_analyse = c.fetchall()
        self.__logger.info("Ip records to recompute: {}".format(c.rowcount))

        for record in ip_data_to_analyse:
            c.execute("UPDATE addressData set metric_set=1 where id={}".format(int(record[0])))
            fail_count = record[1]
            success_count = record[2]
            anomaly = Anomaly(success_count, fail_count)

            if anomaly.is_valid:
                self.__logger.info("Anomaly detected. Checking if existing anomaly should be updated, or new created.")
                c.execute("SELECT id from anomaly where data_id = {} and type=2".format(int(record[0])))
                existing_anomaly = c.fetchone()
                if existing_anomaly:
                    self.__logger.info("Updating anomaly.")
                    self.__update_anomaly(existing_anomaly[0], anomaly, c)
                else:
                    self.__logger.info("Inserting new anomaly.")
                    self.__insert_anomaly(record[0], anomaly, c, 2)
                self.__send_alert(anomaly, record[3])
                self.__logger.info("New anomaly data stored. Alert was sent according to level of anomaly")

                if anomaly.level == 3 and config.address_blocking_enabled:
                    if not __address_blocker:
                        __address_blocker = AddressBlocker()
                    __address_blocker.block_address(record[3])
    def __define_statistics_window(self):
        self.stats_window = Gtk.Window()
        self.stats_window.set_size_request(800, 860)
        background_color = Gdk.color_parse('#bfbfbf')
        self.stats_window.modify_bg(Gtk.StateType.NORMAL, background_color)
        self.stats_window.connect("destroy", self.__on_close_stats)
        self.stats_window.set_resizable(False)
        self.stats_window.set_title("Statistics information")

        self.close_btn = Gtk.Button("Close")
        self.close_btn.set_size_request(150, 40)
        self.close_btn.set_tooltip_text("Close this window.")
        self.close_btn.connect("clicked", self.__on_close_stats)

        self.stats_label1 = Gtk.Label()
        self.stats_label2 = Gtk.Label()
        self.stats_label3 = Gtk.Label()
        self.stats_label4 = Gtk.Label()
        self.stats_label5 = Gtk.Label()
        self.stats_label6 = Gtk.Label()
        self.stats_label7 = Gtk.Label()
        self.stats_label8 = Gtk.Label()

        graph_container1 = Gtk.VBox(False, 8)
        scroll_window1 = Gtk.ScrolledWindow()
        graph_container1.pack_start(scroll_window1, True, True, 0)
        graph_container1.set_size_request(800, 220)
        figure1 = plt.figure(figsize=[0.7, 0.7])
        axis1 = figure1.add_subplot(111)
        connection = DatabaseConnection.init_db_connection()
        cursor = connection.cursor()
        cursor.execute("SELECT count(*) from anomaly where level_id = 3")
        third_lvl_count = cursor.fetchone()
        cursor.execute("SELECT count(*) from anomaly where level_id = 2")
        second_lvl_count = cursor.fetchone()
        cursor.execute("SELECT count(*) from anomaly where level_id = 1")
        first_lvl_count = cursor.fetchone()

        labels = 'Critical level anomalies: {}'.format(int(third_lvl_count[0])),\
                 'Medium level anomalies: {}'.format(int(second_lvl_count[0])), \
                 'Low level anomalies: {}'.format(int(first_lvl_count[0]))
        sizes = [int(third_lvl_count[0]), int(second_lvl_count[0]), int(first_lvl_count[0])]
        colors = ['red', 'orange', 'yellow']
        explode = (0.03, 0.03, 0.03)
        axis1.pie(sizes, explode=explode, labels=labels, colors=colors, shadow=True, startangle=10)
        axis1.set_title("Graphical view of detected anomalies")
        axis1.axis('equal')
        axis1.plot()
        canvas2 = FigureCanvas(figure1)
        scroll_window1.add_with_viewport(canvas2)

        graph_container2 = Gtk.VBox(False, 8)
        scroll_window2 = Gtk.ScrolledWindow()
        graph_container2.pack_start(scroll_window2, True, True, 0)
        graph_container2.set_size_request(800, 400)
        figure2 = plt.figure(figsize=[0.6, 0.6])
        axis2 = figure2.add_subplot(211)
        axis2.set_title("Graphical view of logging process in time.\n Red = Failed logins. Green = Successful logins.")
        cursor.execute(" select concat(concat(dateOccurred_from, ' - '), time_format(dateOccurred_to,'%H:%i'))"
                       " as Time, sum(success_count), sum(fail_count) from userData where dateOccurred_from is not NULL "
                       " group by dateOccurred_from order by dateOccurred_from ")
        output = cursor.fetchall()
        dates = [(r[0]) for r in output]
        success_values = [int(r[1]) for r in output]
        fail_values = [int(r[2]) for r in output]

        x = range(len(dates))
        # use number instead of dates in case of too many x values
        if len(x) < 30:
            axis2.set_xticks(x)
            axis2.set_xticklabels(dates, rotation=50)
        axis2.set_ylabel("Number of login procedures", rotation='vertical')
        axis2.set_xlabel("Date and time", rotation='horizontal')
        axis2.plot(x, success_values, "yo-")
        axis2.plot(x, fail_values, "r.-")
        canvas2 = FigureCanvas(figure2)
        scroll_window2.add_with_viewport(canvas2)

        location = Gtk.Fixed()
        location.put(self.close_btn, 630, 810)
        location.put(self.stats_label1, 10, 20)
        location.put(self.stats_label2, 10, 40)
        location.put(self.stats_label3, 10, 60)
        location.put(self.stats_label4, 10, 80)
        location.put(self.stats_label5, 10, 100)
        location.put(self.stats_label6, 10, 120)
        location.put(self.stats_label7, 10, 140)
        location.put(self.stats_label8, 10, 160)
        location.put(graph_container1, 10, 190)
        location.put(graph_container2, 30, 410)
        self.stats_window.add(location)
 def __init__(self):
     self.logger = CustomLogger.setup_logger(config.outlog)
     DatabaseConnection.check_database()
     self.start_unblocking()
 def __init__(self):
     self.__logger = logging.getLogger('SecurityMetricIDS')
     self.__logger.info("Initializing address blocking utility.")
     self.__dev_null = open(os.devnull, 'w')
     self.cursor = DatabaseConnection.init_db_connection().cursor()