Example #1
0
    def test_singleton_property(self):
        default_fname = "logs/log01.log"
        fname1 = "logs/file1.log"
        fname2 = "logs/file2.log"
        fname3 = "logs/file3.log"

        log1 = Logger()

        log2 = Logger()

        log3 = Logger()

        # singleton logger will have only 1 instance
        # each object's filename will be the last
        # filename set i.e. 'default_fname'
        self.assertEqual(log1.filename, default_fname)
        self.assertEqual(log2.filename, default_fname)
        self.assertEqual(log3.filename, default_fname)

        log1.filename = fname1
        log2.filename = fname2
        log3.filename = fname3

        # singleton logger will have only 1 instance
        # each object's filename will be the last
        # filename set i.e. 'fname3'.
        self.assertEqual(log1.filename, fname3)
        self.assertEqual(log2.filename, fname3)
        self.assertEqual(log3.filename, fname3)

        # due to singleton property, instances will be same.
        self.assertEqual(log1, log2)
        self.assertEqual(log3, log2)
Example #2
0
    def test_info_logger(self):
        message = "test_logger is working?"
        # make sure logs directory is created
        if not os.path.isdir("logs"):
            os.system("mkdir logs")
            print("directory created")
        log = Logger()
        log.info(message)

        with open(log.filename, "r") as log_file:
            log_lines = log_file.readlines()

        # for this test to be reused, remove the file
        os.system("rm " + log.filename)

        # get last line
        last_log_line = log_lines[-1].rstrip("\n")

        self.assertEqual(last_log_line, "[INFO] " + message)
Example #3
0
def send_alert(message):
    m_log = Logger(logger_name='ssh_logger', filename = LogPath )
    headers = {'Content-Type':'application/x-www-form-urlencoded'}
    msg = AlertContent + message
    payload = {'text': AlertTitle, 'desp': msg}
    try:
        resp = requests.post(ADDR, data=payload, headers=headers)
    except requests.exceptions.RequestException as e:
        m_log.info("send log failed: %s" %payload)
        m_log.info("send failed reason: %s" %e)
Example #4
0
from logger_class import Logger

logger_object = Logger("/var/log/class_logger.log")

logger_object.info("This is an info message")

Example #5
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from logger_class import Logger

logger_object = Logger("var/log/class_logger.log")

logger_object.error("This is a error message")

def extract_data():
    # load id data
    file = open("movie.txt", "r")
    id_list = file.readlines()
    for i in range(len(id_list)):
        id_list[i] = id_list[i].strip("\n")

    # setup
    logger = Logger("file").getLogger()

    with MyDb("localhost", "root", "", 3306) as db, DoubanAPI() as D:
        # create database "movie_info", comment this block if database already created
        try:
            create_db(db)
        except Exception as e:
            logger.error(e)
            raise Exception("Database creation failed!")

        # connect to "movie_info"
        db.connect_to_db("movie_info")

        # begin extraction and insertion
        for id in id_list:
            time.sleep(3)  # simulate user behaviour
            # extraction
            try:
                D.search(id)
                logger.info("Search success for id: " + str(id))
            except Exception as e:  # error caused by ip ban
                logger.error(e)
                raise Exception("Ip banned!")
            try:
                info = d.info()
            except Exception as e:  # error caused by ip ban
                logger.error(e)
                raise Exception("Ip banned!")

            # insert into entity 'movie'
            query = "INSERT INTO `movie` VALUES (%s, %s, %s, %s, %s)"
            param = (id, ",".join(info["上映日期"]), info["rating"],
                     info["rating_count"], ",".join(info["rating_weight"]))
            try:
                db.execute(query, param)
            except Exception as e:  # error caused by duplicate id in txt
                logger.error(e)
                logger.info("Failure in id: " + str(id))
                continue

            # insert into relations
            query = "INSERT INTO `director_movie` VALUES (%s, %s)"
            for d in info["director_id"]:
                try:
                    param = (d, id)
                    db.execute(query, param)
                except Exception as e:
                    logger.error(e)
                    continue
            query = "INSERT INTO `actor_movie` VALUES (%s, %s)"
            for a in info["actor_id"]:
                try:
                    param = (a, id)
                    db.execute(query, param)
                except Exception as e:
                    logger.error(e)
                    continue
            query = "INSERT INTO `writer_movie` VALUES (%s, %s)"
            for w in info["screenwriter_id"]:
                try:
                    param = (w, id)
                    db.execute(query, param)
                except Exception as e:
                    logger.error(e)
                    continue
            query = "INSERT INTO `actor_movie` VALUES (%s, %s)"
            for a in info["actor_id"]:
                try:
                    param = (a, id)
                    db.execute(query, param)
                except Exception as e:
                    logger.error(e)
                    continue
            query = "INSERT INTO `genre_movie` VALUES (%s, %s)"
            for g in info["类型"]:
                try:
                    param = (g, id)
                    db.execute(query, param)
                except Exception as e:
                    logger.error(e)
                    continue
            query = "INSERT INTO `region_movie` VALUES (%s, %s)"
            for r in info["制片国家/地区"]:
                try:
                    param = (r, id)
                    db.execute(query, param)
                except Exception as e:
                    logger.error(e)
                    continue
            query = "INSERT INTO `language_movie` VALUES (%s, %s)"
            for l in info["语言"]:
                try:
                    param = (l, id)
                    db.execute(query, param)
                except Exception as e:
                    logger.error(e)
                    continue
            # if success
            logger.info("Success in id: " + str(id))
        logger.info("Finish insertion")

        time.sleep(3000)  # wait until insertion finish successfully
        filter_data(db)
Example #7
0
#!/usr/bin/env python
#-*- coding: utf-8 -*-

from logger_class import Logger

m_log = Logger(logger_name='watcher', filename = 'catche.log' )

m_log.info('test1')
m_log.warn('test warn')
m_log.debug('test debug')
Example #8
0
from logger_class import Logger

logger_object = Logger("filename.log")
logger_object.info("This is an info message")
Example #9
0
def region_info(db):
    header = ("region", "movie_count", "avg_rating")
    query = "SELECT region, COUNT(movie_ID) AS movie_count, AVG(rating) AS avg_rating FROM `region_movie` JOIN movie " \
            "WHERE movie_ID = ID GROUP BY region "
    db.execute_to_csv(query, None, "./data/region_avg.csv", header)
    header = ("movie_id", "region_rating")
    query = "SELECT ID, AVG(avg_r) AS avg_rating FROM movie JOIN region_movie JOIN (SELECT region, COUNT(movie_ID) AS " \
            "movie_count, AVG(rating) AS avg_r FROM `region_movie` JOIN movie WHERE movie_ID = ID GROUP BY region) AS " \
            "T WHERE ID = region_movie.movie_ID AND region_movie.region = T.region GROUP BY ID "
    db.execute_to_csv(query, None, "./data/region_pred_avg.csv", header)
    query = "SELECT ID, rating FROM `movie` WHERE ID NOT IN (SELECT movie_ID FROM region_movie)"
    db.execute_to_csv(query, None, "./data/region_empty.csv", header)


if __name__ == '__main__':
    logger = Logger("file").getLogger()
    make_dir()
    with MyDb("localhost", "root", "", 3306, "movie_info") as db:
        filter_data(db)
        try:
            get_movie(db)
            actor_info(db)
            director_info(db)
            writer_info(db)
            genre_info(db)
            language_info(db)
            region_info(db)
        except Exception as e:
            logger.error(e)
def test_combined():

    a = 0.15
    v = 0.08
    urc.movel_wait(pose0, a=a, v=v)
    rx_move(1200)
    time.sleep(2)

    gs = GelSight(
        IP=IP,
        corners=corners,
        tracking_setting=tracking_setting,
        output_sz=(400, 300),
        id="right",
    )
    gs.start()
    c = input()

    rx_move(760)
    grc.follow_gripper_pos = 1
    time.sleep(0.5)

    depth_queue = []

    cnt = 0
    dt = 0.05
    pos_x = 0.5

    tm_key = time.time()
    logger = Logger()
    noise_acc = 0.
    flag_record = False
    tm = 0
    start_tm = time.time()

    vel = [0.00, 0.008, 0, 0, 0, 0]

    while True:
        img = gs.stream.image

        # get pose image
        pose_img = gs.pc.pose_img
        # pose_img = gs.pc.frame_large
        if pose_img is None:
            continue

        # depth_current = gs.pc.depth.max()
        # depth_queue.append(depth_current)
        #
        # if len(depth_queue) > 2:
        #     depth_queue = depth_queue[1:]
        #
        # if depth_current == np.max(depth_queue):
        pose = gs.pc.pose
        cv2.imshow("pose", pose_img)

        if gs.pc.inContact:

            # if cnt % 4 < 2:
            #     # grc.follow_gripper_pos = 1
            #     rx_move(810)
            # else:
            a = 0.02
            v = 0.02

            fixpoint_x = pose0[0]
            fixpoint_y = pose0[1] - 0.133
            pixel_size = 0.2e-3
            ur_pose = urc.getl_rt()
            ur_xy = np.array(ur_pose[:2])

            x = 0.1 - pose[0] - 0.5 * (1 - 2 * pose[1]) * np.tan(pose[2])
            alpha = np.arctan(ur_xy[0] - fixpoint_x) / (
                ur_xy[1] - fixpoint_y) * np.cos(np.pi * ang / 180)

            print("x: ", x, "; input: ", x * pixel_size)

            # K = np.array([6528.5, 0.79235, 2.18017]) #10 degrees
            # K = np.array([7012, 8.865, 6.435]) #30 degrees
            # K = np.array([1383, 3.682, 3.417])
            K = np.array([862689, 42.704, 37.518])

            state = np.array([[x * pixel_size], [pose[2]], [alpha]])
            phi = -K.dot(state)

            # noise = random.random() * 0.07 - 0.02
            # a = 0.8
            # noise_acc = a * noise_acc + (1 - a) * noise
            # phi += noise_acc

            target_ur_dir = phi + alpha
            limit_phi = np.pi / 3
            target_ur_dir = max(-limit_phi, min(target_ur_dir, limit_phi))
            if abs(target_ur_dir) == limit_phi:
                print("reached phi limit")
            v_norm = 0.02
            vel = np.array([
                v_norm * sin(target_ur_dir) * cos(np.pi * ang / 180),
                v_norm * cos(target_ur_dir),
                v_norm * sin(target_ur_dir) * sin(np.pi * -ang / 180), 0, 0, 0
            ])

            # if x < -0.2:
            #     print("regrasp")
            #     rx_regrasp()

            if ur_pose[0] < -0.7 - .1693:
                vel[0] = max(vel[0], 0.)
                print("reached x limit")
            if ur_pose[0] > -0.4 - .1693:
                vel[0] = min(vel[0], 0.)
                print("reached x limit")
            if ur_pose[2] < .15:
                vel[2] = 0.
                print("reached z limit")
            if ur_pose[1] > .34:
                print("end of workspace")
                print("log saved: ", logger.save_logs())
                gs.pc.inContact = False
                vel[0] = min(vel[0], 0.)
                vel[1] = 0.

            # print("sliding vel ", vel[0], "posx ", pos_x)

            vel = np.array(vel)
            urc.speedl(vel, a=a, t=dt * 2)

            time.sleep(dt)

        else:
            print("no pose estimate")
            print("log saved: ", logger.save_logs())
            break

        # # get tracking image
        # tracking_img = gs.tc.tracking_img
        # if tracking_img is None:
        #     continue

        # slip_index_realtime = gs.tc.slip_index_realtime
        # print("slip_index_realtime", slip_index_realtime)

        # cv2.imshow("marker", tracking_img[:, ::-1])
        # cv2.imshow("diff", gs.tc.diff_raw[:, ::-1] / 255)

        # if urc.getl_rt()[0] < -.45:
        #     break

        # cnt += 1

        c = cv2.waitKey(1) & 0xFF
        if c == ord("q"):
            break

        ##################################################################
        # Record data
        # 'gelsight_url'  : self.gelsight_url,
        # 'fabric_pose'   : self.fabric_pose,
        # 'ur_velocity'   : self.ur_velocity,
        # 'ur_pose'       : self.ur_pose,
        # 'slip_index'    : self.slip_index,
        # 'x'             : self.x,
        # 'y'             : self.x,
        # 'theta'         : self.theta,
        # 'phi'           : self.phi,
        # 'dt'            : self.dt

        if gs.pc.inContact:
            # print("LOGGING")
            # logger.gelsight = gs.pc.diff
            # logger.cable_pose = pose
            logger.ur_velocity = vel
            logger.ur_pose = urc.getl_rt()

            v = np.array([logger.ur_velocity[0], logger.ur_velocity[1]])
            alpha = asin(v[1] / np.sum(v**2)**0.5)

            logger.x = pose[0]
            logger.y = pose[1]
            logger.theta = pose[2]
            # logger.phi = alpha - logger.theta

            logger.dt = time.time() - tm
            tm = time.time()

            logger.add()
"""
Works Cited
豆瓣电影. https://movie.douban.com/. Accessed Dec. 2020.
"""

from DoubanAPI import DoubanAPI
from logger_class import Logger


def extract_id():
    data_size = 3000
    out = "movie.txt"
    with DoubanAPI() as D:
        D.find_id(False, "电影", 0, (2010, 2019), out, data_size)


if __name__ == '__main__':
    logger = Logger().getLogger()
    try:
        extract_id()
    except Exception as e:
        logger.error(e)
Example #12
0
from flask.views import MethodView

from flask import request, make_response, render_template, send_file
from logger_class import Logger
import io

logger = Logger()


class WebDavProtocol(MethodView):
    """This class handles WebDav protocol, but does not implement any FS logic"""
    def __init__(self):
        # self.files = __class__.files
        pass

    def propfind(self, path=None, pathname=None):

        self.files['depth'] = request.headers['Depth']

        if not pathname:
            return make_response(
                render_template('propfind_file_generated.xml',
                                values=self.get_file(pathname)), 207)

        elif pathname and self.find_file(pathname):
            self.files['only_files'] = True
            return make_response(
                render_template('propfind_one_file.xml',
                                values=self.find_file(pathname)), 207)

        return make_response('', 404)
Example #13
0
# -*-coding:utf8-*-

from logger_class import Logger

logger_object = Logger("logfile/log1.log")
logger_object.info('This is an info message')



Example #14
0
from webdavprotocol import WebDavProtocol
from testwebdavprotocol import TestWebDavProtocol

from flask import Flask, request, make_response

import os
from logger_class import Logger

# Enables request logging to log file
debug = True

# Logger
logger = Logger()
logger.flush()

# Crunch, needed for path_join in jinja_handler. Temporary.
host = '192.168.88.56'
#host = '192.168.1.120'

# List of allowed methods
ALLOWED_METHODS = ['GET', 'PUT', 'PROPFIND', 'PROPPATCH', 'MKCOL', 'DELETE',
                   'COPY', 'MOVE', 'OPTIONS']

app = Flask(__name__)

webdav_view = TestWebDavProtocol.as_view('webdav')
app.add_url_rule('/webdav', defaults={'path': ''}, view_func=webdav_view, methods=ALLOWED_METHODS)
app.add_url_rule('/webdav/<path:pathname>', view_func=webdav_view, methods=ALLOWED_METHODS)

@app.before_request
def logging():
from logger_class import Logger

logger_object = Logger('resouces/info.log')
logger_object.info('hello, this is a test')
Example #16
0
def test_combined():

    a = 0.15
    v = 0.08
    urc.movel_wait(pose0, a=a, v=v)
    rx_move(1200)
    time.sleep(2)

    gs = GelSight(
        IP=IP,
        corners=corners,
        tracking_setting=tracking_setting,
        output_sz=(400, 300),
        id="right",
    )
    gs.start()
    c = input()

    rx_move(800)
    grc.follow_gripper_pos = 1
    time.sleep(0.5)

    depth_queue = []

    cnt = 0
    dt = 0.05
    pos_x = 0.5

    tm_key = time.time()
    logger = Logger()
    noise_acc = 0.
    flag_record = False
    tm = 0
    start_tm = time.time()

    vel = [0.00, 0.008, 0, 0, 0, 0]

    while True:
        img = gs.stream.image

        # get pose image
        pose_img = gs.pc.pose_img
        # pose_img = gs.pc.frame_large
        if pose_img is None:
            continue

        # depth_current = gs.pc.depth.max()
        # depth_queue.append(depth_current)
        #
        # if len(depth_queue) > 2:
        #     depth_queue = depth_queue[1:]
        #
        # if depth_current == np.max(depth_queue):
        pose = gs.pc.pose
        cv2.imshow("pose", pose_img)

        if gs.pc.inContact:

            # if cnt % 4 < 2:
            #     # grc.follow_gripper_pos = 1
            #     rx_move(810)
            # else:
            a = 0.02
            v = 0.02
            kp = .03
            # kp_rot = .2

            # pos_x = (2*pose[0] + (1 - pose[1])*np.tan(pose[2]))/2
            pos_x = (pose[0] + (1 - pose[1]) * np.tan(pose[2]))
            # pos_x = pose[0]
            # e = (pos_x-0.5)*kp

            # vel = [0, (pos_x-0.3)*kp, -0.008, 0, 0, 0]
            # vel = [0, (pos_x-0.6)*kp, -0.008, kp_rot*gs.pc.pose[2], 0, 0]
            # vel = [0, e*np.cos(th) - dy, -e*np.sin(th) - dz, kp_rot*gs.pc.pose[2], 0, 0]

            noise = random.random() * 0.03 - 0.015
            a = 0.8
            noise_acc = a * noise_acc + (1 - a) * noise
            print(pos_x)

            vel = [(pos_x - 0.10) * kp + noise_acc, 0.008,
                   -(pos_x - 0.10) * kp * .2, 0, 0, 0]
            vel = np.array(vel)

            # grc.follow_gripper_pos = .885
            # grc.follow_gripper_pos = .88
            # rx_move(830)
            # urc.speedl([(pose[0]-0.2)*kp, 0.008, 0, 0, 0, 0], a=a, t=dt*2)

            ur_pose = urc.getl_rt()
            if ur_pose[0] < -0.7:
                vel[0] = max(vel[0], 0.)
            if ur_pose[0] > -0.3:
                vel[0] = min(vel[0], 0.)
            if ur_pose[2] < .08:
                vel[2] = 0.
            if ur_pose[1] > .34:
                print("end of workspace")
                print("log saved: ", logger.save_logs())
                gs.pc.inContact = False
                vel[0] = min(vel[0], 0.)
                vel[1] = 0.

            print("sliding vel ", vel[0], "posx ", pos_x)

            vel = np.array(vel)
            urc.speedl(vel, a=a, t=dt * 2)

            time.sleep(dt)

        else:
            print("no pose estimate")
            print("log saved: ", logger.save_logs())
            break

        # # get tracking image
        # tracking_img = gs.tc.tracking_img
        # if tracking_img is None:
        #     continue

        # slip_index_realtime = gs.tc.slip_index_realtime
        # print("slip_index_realtime", slip_index_realtime)

        # cv2.imshow("marker", tracking_img[:, ::-1])
        # cv2.imshow("diff", gs.tc.diff_raw[:, ::-1] / 255)

        # if urc.getl_rt()[0] < -.45:
        #     break

        # cnt += 1

        c = cv2.waitKey(1) & 0xFF
        if c == ord("q"):
            break

        ##################################################################
        # Record data
        # 'gelsight_url'  : self.gelsight_url,
        # 'fabric_pose'   : self.fabric_pose,
        # 'ur_velocity'   : self.ur_velocity,
        # 'ur_pose'       : self.ur_pose,
        # 'slip_index'    : self.slip_index,
        # 'x'             : self.x,
        # 'y'             : self.x,
        # 'theta'         : self.theta,
        # 'phi'           : self.phi,
        # 'dt'            : self.dt

        if gs.pc.inContact:
            # print("LOGGING")
            # logger.gelsight = gs.pc.diff
            # logger.cable_pose = pose
            logger.ur_velocity = vel
            logger.ur_pose = urc.getl_rt()

            v = np.array([logger.ur_velocity[0], logger.ur_velocity[1]])
            alpha = asin(v[1] / np.sum(v**2)**0.5)

            logger.x = pose[0]
            logger.y = pose[1]
            logger.theta = pose[2]
            # logger.phi = alpha - logger.theta

            logger.dt = time.time() - tm
            tm = time.time()

            logger.add()