Example #1
0
def main():

    # Get config
    Config.init_config()

    # Init custom logger
    CustomLogger.set_global_log_level(Config.get('log_level'))
    log = CustomLogger(__name__)

    log.info('')
    log.info('#######################################')
    log.info('#                                     #')
    log.info('#     Catch-up TV & More simulator    #')
    log.info('#                                     #')
    log.info('#######################################')
    log.info('')

    # Custumize sys.path to use custom modules
    customize_sys_path()

    if Config.get('test_modules'):
        return test_modules(log)
    else:
        exploration_loop(log)

        if Config.get('print_all_explored_items'):
            print('\n* All explored items:\n')
            for s in Route.explored_routes_l:
                print(s)

        RuntimeErrorCQ.print_encountered_warnings()
        ret_val = RuntimeErrorCQ.print_encountered_errors()
        return ret_val
Example #2
0
    def __init__(self):

        # start node
        rospy.init_node('ds18b20_device_server',
                        log_level=rospy.DEBUG,
                        anonymous=True)
        # get roslaunch params and reinit part of params
        self._device_name = rospy.get_param('~ds18b20_device_name')
        self._logname = 'ds18b20_' + self._device_name[
            3:]  #rospy.get_param('~ds18b20_log_name', 'ds18b20')
        self._lost_data_marker = rospy.get_param('~ds18b20_lost_data_marker',
                                                 -65536)
        # self._log_node_name = rospy.get_param('~ds18b20_log_node_name', 'ds18b20_log_node')
        # self._temp_pub_name = rospy.get_param('~ds18b20_temp_pub_name', 'ds18b20_1_temp_pub')
        self._temp_pub_name = 'ds18b20_' + self._device_name[3:] + '_data_pub'

        self._timeout = rospy.get_param('~ds18b20_timeout', 1)
        self._measure_interval = rospy.get_param('~ds18b20_measure_interval')

        self._temp_pub = rospy.Publisher(self._temp_pub_name,
                                         Temperature,
                                         queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname)
        self._logger.info("ds18b20_device_server init : {}".format(
            self._device_name))
        # print("we here init")

        # and go to infinite loop
        self._loop()
    def __init__(self):
        # hardcoded constants
        self._success_response = "success: "
        self._error_response = "error: "


        # start node
        rospy.init_node('sba5_device_server', log_level=rospy.DEBUG)

        # get roslaunch params and reinit part of params
        self._calibration_time = int(rospy.get_param('~sba5_calibration_time'))
        self._logname = rospy.get_param('~sba5_log_name', 'sba5')
        self._log_node_name = rospy.get_param('~sba5_log_node_name', 'sba5_log_node')
        self._port = rospy.get_param('~sba5_port', '/dev/serial/by-id/usb-FTDI_FT230X_Basic_UART_DN03WQZS-if00-port0')
        self._baudrate = int(rospy.get_param('~sba5_baudrate', 19200))
        self._timeout = float(rospy.get_param('~sba5_timeout', 0.2))
        self._service_name = rospy.get_param('~sba5_service_name', 'sba5_device')


        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name, String, queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.debug("sba5_device_server init")

        # self._logger.info("sba5_device_server cal time is {}, type is {}".format(
        #     self._calibration_time, type(self._calibration_time)
        # ))

        # service
        self._service = rospy.Service(self._service_name, SBA5Device, self.handle_request)
        self._loop()
    def __init__(self):
        # hardcoded constants
        self._success_response = "success: "
        self._error_response = "error: "

        # start node
        rospy.init_node('exp_server', log_level=rospy.DEBUG)

        # get roslaunch params

        # names for self topics
        self._logname = rospy.get_param('~exp_log_name', 'exp_system')
        self._log_node_name = rospy.get_param('~exp_log_node_name', 'exp_log_node')
        self._service_name = rospy.get_param('~exp_service_name', 'exp_system')
        self._exp_pub_name = rospy.get_param('~exp_pub_name', 'exp_pub')

        # mode of work
        # no matter which method is chosen we will parse params for all of them from .launch file
        self._mode = rospy.get_param('~exp_mode_name', 'table')
        # self._stand_type = rospy.get_param('~exp_lamp_type', 'exp')
        # self._height_of_lamps = rospy.get_param('~exp_lamp_h', 25)

        self._search_table = rospy.get_param('exp_search_table')
        self._exp_description = rospy.get_param('mysql_data_saver_experiment_description')
        self._exp_id = self._exp_description["experiment_number"]
        self._db_params=rospy.get_param('mysql_db_params')
        self._db_name = "experiment" + str(self._exp_id)
        # self._exp_config_path = rospy.get_param('~exp_config_path', 'test.xml')
        # todo add here parsing params of gradient search and other smart methods

        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name, String, queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.info("exp_server init")

        # create exp_db if it is not exists now
        self._create_exp_db()

        # create search method handler object
        # it must not contain any ros api, because i hate ros
        if self._mode == 'table':
            self._search_handler = TableSearchHandler(
                self._search_table, self._db_params, self._exp_description, self._logger)
        elif self._mode == 'ordered_table':
            self._search_handler = OrderedTableSearchHandler(
                self._search_table, self._db_params, self._exp_description, self._logger)

        # todo add other search modes
        # now we have to calculate current search point
        # self._search_handler will calculate and store it
        # until we get reqv from control system

        self._search_handler.calculate_next_point()

        # service
        self._service = rospy.Service(self._service_name, ExpSystem, self._handle_request)

        self._loop()
    def __init__(self):
        # hardcoded constants
        self._success_response = "success"
        self._error_response = "error: "


        # start node
        rospy.init_node('led_device_server', anonymous=True, log_level=rospy.DEBUG)

        # get roslaunch params and reinit part of params
        self._logname = rospy.get_param('~led_log_name', 'LED')
        self._log_node_name = rospy.get_param('~led_log_node_name', 'led_log_node')
        self._port = rospy.get_param('~led_port', '/dev/serial/by-id/usb-Silicon_Labs_CP2102_USB_to_UART_Bridge_Controller_0001-if00-port0')
        self._baudrate = rospy.get_param('~led_baudrate', 19200)
        self._timeout = rospy.get_param('~led_timeout', 10)
        self._service_name = rospy.get_param('~led_service_name')
        self._max_red_current = rospy.get_param('~led_max_red_current', 250)
        self._min_red_current = rospy.get_param('~led_min_red_current', 10)
        self._max_white_current = rospy.get_param('~led_max_white_current', 250)
        self._min_white_current = rospy.get_param('~led_min_white_current', 10)

        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name, String, queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.debug("led_device_server init")

        # service
        self._service = rospy.Service(self._service_name, LedDevice, self.handle_request)
        self._loop()
Example #6
0
def performance():
    my_logger = CustomLogger('main_db_logging.yaml').logger
    my_config_file = 'main_db_settings.ini'
    batch_sizes = [5000, 10000, 20000, 50000]
    df = pd.DataFrame(columns=['batch_size', 'time'])
    for i, batch_size in enumerate(batch_sizes):
        db = DatabaseConnector(logger=my_logger, batch_size=batch_size, config_file=my_config_file)
        run_time = db.run()
        df.loc[i, 'batch_size'] = db.batch_size
        df.loc[i, 'time'] = str(run_time).split('.')[0]
    print(df)
Example #7
0
    def __init__(self):
        print("========== start init")

        # start node
        rospy.init_node('mysql_data_plotter_server',
                        log_level=rospy.DEBUG,
                        anonymous=True)

        # self logging
        self._logname = rospy.get_param('~mysql_data_plotter_log_name',
                                        'mysql_data_plotter')
        self._log_node_name = rospy.get_param(
            '~mysql_data_plotter_log_node_name', 'mysql_data_plotter_log_node')

        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name,
                                        String,
                                        queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.debug("mysql_data_plotter_server init")

        # for default we will use current ip in tun0 protocol
        # if there is no any tun0 - it must be a critical error

        self.ipv4 = os.popen('ip addr show tun0').read().split(
            "inet ")[1].split("/")[0]

        self.web_addr = self.ipv4
        self.web_port = 8090  # on every rpi
        self.list_of_hdf = list()
        self.metastring = "experiment metadata: \n"

        # TODO load description from experiments table and add to metastring
        # self.list_of_datasets =

        print("========== start get list of datasets")

        # TODO; fix -all below only for test  ||||
        #                            vvvv
        self.list_of_tables = rospy.get_param('~mysql_data_plotter_raw_topics')
        # self.list_of_tables = [  # TODO must be loaded from .launch
        #     {'name': '/bmp180_1_temp_pub', 'id': 1, 'type': 'temperature', 'dtype': 'float64', 'units': 'C', 'status': 'raw_data'},
        #     {'name': '/bmp180_1_pressure_pub', 'id': 2, 'type': 'pressure', 'dtype': 'float64', 'units': 'kPa', 'status': 'raw_data'},
        #     {'name': '/raw_co2_pub', 'type': 'co2', 'id': 3, 'dtype': 'float64', 'units': 'ppmv', 'status': 'raw_data'},
        #     {'name': '/si7021_1_hum_pub', 'id': 4, 'type': 'humidity', 'dtype': 'float64', 'units': 'percents', 'status': 'raw_data'},
        #     {'name': '/si7021_1_temp_pub', 'id': 5, 'type': 'temperature', 'dtype': 'float64', 'units': 'C', 'status': 'raw_data'}
        # ]

        print("========== end of init")
Example #8
0
def main():
    # performance()
    my_logger = CustomLogger('main_db_logging.yaml').logger
    my_config_file = 'main_db_settings.ini'
    my_db = DatabaseConnector(logger=my_logger, config_file=my_config_file)
    my_db.run()
    if my_db.number_of_rows <= 10:
        my_db.open_db()
        print('-' * 100)
        print(my_db.get_max_db_id())
        print('-' * 100)
        print(my_db.get_data())
        print('-' * 100)
        my_db.close_db()
 def __init__(self, product, service, zuul_url, zuul_route):
     self._PRODUCT = product
     self._SERVICE = service
     self._BASE_PATH = os.path.expanduser('~') + "/Documents/GRP"
     now = datetime.now()
     log_dir = "/var/log/grp/regression-test-automation/%s/%s/%s" % \
               (self._PRODUCT, self._SERVICE, '{0:%Y-%m-%d__%H.%M.%S}'.format(now))
     self.log = CustomLogger(log_dir).get_logger()
     self.log.info("Script starts executes at : {0:%Y-%m-%d %H:%M:%S}\n".format(now))
     pcfs = service.split('/')
     if len(pcfs) != 4:
         sys.exit("Invalid service : %s\nPlease enter a valid service." % service)
     self._COMPONENT = pcfs[0]
     self._FEATURE = pcfs[1]
     self._VERSION = pcfs[2]
     self._SERVICE_NAME = pcfs[3]
     self._ZUUL_URL = zuul_url
     self._ZUUL_ROUTE = zuul_route
    def __init__(self):

        # start node
        rospy.init_node('si7021_device_server', log_level=rospy.DEBUG)
        # get roslaunch params and reinit part of params
        self._logname = rospy.get_param('~si7021_log_name', 'si7021')
        self._lost_data_marker = rospy.get_param('~si7021_lost_data_marker',
                                                 -65536)
        self._log_node_name = rospy.get_param('~si7021_log_node_name',
                                              'si7021_log_node')
        self._hum_pub_name = rospy.get_param('~si7021_hum_pub_name',
                                             'si7021_1_hum_pub')
        self._temp_pub_name = rospy.get_param('~si7021_temp_pub_name',
                                              'si7021_1_temp_pub')

        self._timeout = rospy.get_param('~si7021_timeout', 1)
        self._measure_interval = rospy.get_param('~si7021_measure_interval')

        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name,
                                        String,
                                        queue_size=10)

        self._hum_pub = rospy.Publisher(self._hum_pub_name,
                                        Temperature,
                                        queue_size=10)
        self._temp_pub = rospy.Publisher(self._temp_pub_name,
                                         Temperature,
                                         queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.debug("si7021_device_server init")
        # print("we here init")

        # and go to infinite loop
        # self._service = rospy.Service(self._service_name, SBA5Device, self.handle_request)
        self._loop()
Example #11
0
    def __init__(self):
        # hardcoded constants
        self._success_response = "success: "
        self._error_response = "error: "

        # start node
        rospy.init_node('k30_device_server', log_level=rospy.DEBUG)
        self._logname = rospy.get_param('~k30_log_name', 'k30')
        self._log_node_name = rospy.get_param('~k30_log_node_name',
                                              'k30_log_node')
        self._port = rospy.get_param(
            '~k30_port', '/dev/serial/by-id/usb-Silicon_Labs_CP2102_USB_to_'
            'UART_Bridge_Controller_0003-if00-port0')
        self._baudrate = rospy.get_param('~k30_baudrate', 9600)
        self._timeout = rospy.get_param('~k30_timeout', 1)
        self._service_name = rospy.get_param('~k30_service_name', 'k30_device')
        self._calibration_time = rospy.get_param('~k30_calibration_time', 25)
        self._lost_data_marker = rospy.get_param('~k30_lost_data_marker',
                                                 -65536)

        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name,
                                        String,
                                        queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.debug("k30_device_server init")

        # device
        self._device = K30(devname=self._port,
                           baudrate=self._baudrate,
                           timeout=self._timeout)

        # service
        self._service = rospy.Service(self._service_name, K30Device,
                                      self.handle_request)
        self._loop()
Example #12
0
    def __init__(self):
        # hardcoded constants
        self._success_response = "success: "
        self._error_response = "error: "

        # start node
        rospy.init_node('watchdog_server',
                        anonymous=True,
                        log_level=rospy.DEBUG)

        # get roslaunch params

        # names for self topics
        self._logname = rospy.get_param('~watchdog_log_name', 'data_saver')
        self._log_node_name = rospy.get_param('~watchdog_log_node_name',
                                              'watchdog')
        self._service_name = rospy.get_param('~watchdog_service_name',
                                             'watchdog')

        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name,
                                        String,
                                        queue_size=10)

        # start logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.debug("data_saver server init")

        self.dead_flag = False

        # service
        self._service = rospy.Service(self._service_name, Watchdog,
                                      self._handle_request)

        self._logger.debug("end of init, go to loop")

        self._loop()
Example #13
0
    def __init__(self):

        # start node
        rospy.init_node('scales_device_server', log_level=rospy.DEBUG)
        # get roslaunch params and reinit part of params
        self._logname = rospy.get_param('scales_log_name', 'scales')
        self._lost_data_marker = rospy.get_param('scales_lost_data_marker',
                                                 -65536)
        self._log_node_name = rospy.get_param('scales_log_node_name',
                                              'scales_log_node')
        self._data_pub_name = rospy.get_param('scales_data_pub_name',
                                              'scales_data_pub')
        self._port = rospy.get_param(
            'scales_port',
            '/dev/serial/by-id/usb-USB_Vir_USB_Virtual_COM-if00')
        self._baudrate = rospy.get_param('scales_baudrate', 9600)
        self._timeout = rospy.get_param('scales_timeout', 1)
        self._measure_interval = rospy.get_param('scales_measure_interval', 5)

        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name,
                                        String,
                                        queue_size=10)

        self._data_pub = rospy.Publisher(self._data_pub_name,
                                         Temperature,
                                         queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.debug("scales_device_server init")
        # print("we here init")

        # and go to infinite loop
        # self._service = rospy.Service(self._service_name, SBA5Device, self.handle_request)
        self._loop()
Example #14
0
# -*- coding: utf-8 -*-

from config import Config
from custom_logger import CustomLogger
from runtime_error import RuntimeErrorCQ

log_ = CustomLogger(__name__)

if Config.get("disable_xbmc_mock_log"):
    log_.set_log_level("ERROR")

LOGDEBUG = 0
LOGINFO = 1
LOGNOTICE = 2
LOGWARNING = 3
LOGERROR = 4
LOGSEVERE = 5
LOGFATAL = 6
LOGNONE = 7

levelno_dict = {
    0: "debug",
    1: "info",
    2: "notice",
    3: "warning",
    4: "error",
    5: "severe",
    6: "fatal",
    7: "none",
}
Example #15
0
import logging
import posixpath

from custom_logger import CustomLogger
from extensions_paths import EXAMPLES
from file_system_util import CreateURLsFromPaths
from future import Future
from render_servlet import RenderServlet
from special_paths import SITE_VERIFICATION_FILE
from timer import Timer

_SUPPORTED_TARGETS = {
    'examples': (EXAMPLES, 'extensions/examples'),
}

_log = CustomLogger('render_refresher')


class _SingletonRenderServletDelegate(RenderServlet.Delegate):
    def __init__(self, server_instance):
        self._server_instance = server_instance

    def CreateServerInstance(self):
        return self._server_instance


def _RequestEachItem(title, items, request_callback):
    '''Runs a task |request_callback| named |title| for each item in |items|.
  |request_callback| must take an item and return a servlet response.
  Returns true if every item was successfully run, false if any return a
  non-200 response or raise an exception.
Example #16
0
    def __init__(self):
        # hardcoded constants
        self._success_response = "success: "
        self._error_response = "error: "
        self._logsep = ";"

        # start node
        rospy.init_node('control_server', log_level=rospy.DEBUG)

        # get roslaunch params

        # names for self topics
        self._logname = rospy.get_param('~control_log_name', 'control_system')
        self._log_node_name = rospy.get_param('~control_log_node_name', 'control_log_node')
        self._service_name = rospy.get_param('~control_service_name', 'control_system')
        self._raw_co2_pub_name = rospy.get_param('~control_raw_co2_pub_name', 'raw_co2_pub')
        self._operator_pub_name = rospy.get_param('~control_operator_pub_name', 'operator_pub')

        # devices services names
        # self._relay_service_name = rospy.get_param('~control_relay_service_name', 'relay_device')
        self._led_service_name = rospy.get_param('~control_led_service_name', 'led_device')
        self._sba5_service_name = rospy.get_param('~control_sba5_service_name', 'sba5_device')  # we are using sba5 by default co2 sensor
        self._exp_service_name = rospy.get_param('~control_exp_service_name', 'exp_system')

        # experiment params
        self._LSM_exp_red = int(rospy.get_param('~control_LSM_exp_red', 250))  # mA old = 142
        # here we need 306 mA, but our devices can work only with Ir < 250 mA
        self._LSM_exp_white = int(rospy.get_param('~control_LSM_exp_white', 114))  # mA  old = 76
        self._LSM_control_red = int(rospy.get_param('~control_LSM_control_red', 237))  # mA
        self._LSM_control_white = int(rospy.get_param('~control_LSM_control_white', 122))  # mA
        self._full_experiment_loop_time = int(rospy.get_param('~control_full_experiment_loop_time', 900.0)) # sec
        self._isolated_measure_time = int(rospy.get_param('~control_isolated_measure_time', 480.0))  # sec
        self._n2_calibration_time = int(rospy.get_param('~control_n2_calibration_time', 90.0))  # depends on
        self._air_valves_open_time = int(rospy.get_param('~control_air_valves_open_time', 15.0))  # sec
        self._co2_measure_time = int(rospy.get_param('~control_co2_measure_time', 1.0))  # sec
        self._ventilation_time = self._full_experiment_loop_time - self._isolated_measure_time - \
            self._n2_calibration_time - self._air_valves_open_time

        # flags of current control regime
        #self._mode = rospy.get_param('~control_start_mode', 'life_support')
        self._mode = rospy.get_param('~control_start_mode', 'experiment')
        self._mode_flag = 'co2'
        # mode can be :
        # experiment
        # life_support
        # test
        # search_experiment
        # ...
        #self._sba5_measure_allowed = False  # by default
        # self._at_work = rospy.get_param('~control_start_at_work', True)
        # start node and loop immediately after launch or

        # rpi_gpio device stub
        # TODO add this params to .launch file
        self._gpio_handler = RelayHandler(
            n2_valve=5,
            air_pump_1=6,
            air_pump_2=12,
            cooler_1=13,
            air_valve_1=19,
            air_valve_2=26,
            ndir_pump=16,
            empty=20
        )


        # params of search experiment
        self._co2_search_time_start = 0
        self._co2_search_time_stop = 0
        self._current_search_point_id = 0
        self._led_delay_time = 0  # for setting up leds in selected time in vent period
        # by default:
        self._current_red = self._LSM_exp_red
        self._current_white = self._LSM_exp_white
        self._current_search_is_finished = 1  # by default it is not finished



        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name, String, queue_size=10)
        # create data topic publisher
        self._co2_pub = rospy.Publisher(self._raw_co2_pub_name, Temperature, queue_size=10)
        # create operator-topic to send important msgs to operator
        # self._operator_pub = rospy.Publisher(self._operator_pub_name, String, queue_size=10)

        # logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.info("control_server init")

        # create Locks
        self._co2_measure_allowed_event = Event()

        # serial errors handling
        # self._last_serial_error_time = 0
        # self._delay_after_serial_error = 300



        self._logger.info("control_server service creation")

        # service
        self._service = rospy.Service(self._service_name, ControlSystem, self._handle_request)

        self._logger.info("check if we are in experiment mode")


        # self._serial_error_counter = 0
        # self._serial_error_max = 5
        # subscribe to rosout_agg to know if there is fatal error and we need to do smth
        # self._system_log_sub = rospy.Subscriber(
        #     name='/rosout_agg', data_class=Log,
        #     callback=self._log_callback,
        #     queue_size=5)



        # reinit sba5
        if self._mode == 'full_experiment' or self._mode == 'full_experiment':
            self._logger.info("update sba5 and allow sba5 measures")
            self._update_sba5_params()

            # allow measures of sba5
            self._co2_measure_allowed_event.set()

            # self._default_red = 142
            # self._default_white = 76

            self._current_red = self._LSM_exp_red
            self._current_white = self._LSM_exp_white

            # create timers for async periodic tasks using internal ros mechanics
            # Create a ROS Timer for reading data
            rospy.Timer(rospy.Duration(1), self._get_sba5_measure)  # 1 Hz
            # create ros timer for main loop

        if self._mode == 'life_support_dual':
            self._dual_led_service_name = rospy.get_param('~control_dual_led_service_name')
            self._current_red = self._LSM_control_red
            self._current_white = self._LSM_control_white

        if self._mode == 'k30_experiment':
            self._k30_service_name = rospy.get_param('~k30_service_name', 'k30_device')
            self._k30_calibration_time = int(rospy.get_param('~k30_calibration_time', '25'))
            self._current_red = self._LSM_control_red
            self._current_white = self._LSM_control_white

            rospy.Timer(rospy.Duration(2), self._get_k30_data)  # 0.5 Hz


        self._logger.info("go to loop")
        self._loop()
Example #17
0
    def __init__(self):
        # hardcoded constants
        self._success_response = "success: "
        self._error_response = "error: "

        # start node
        rospy.init_node('mysql_data_saver_server', log_level=rospy.INFO)

        # get roslaunch params

        self._description = rospy.get_param('mysql_data_saver_experiment_description')  # do not add ~ !!
        self._db_name = "experiment" + str(self._description["experiment_number"])

        # names for self topics
        self._logname = rospy.get_param('~mysql_data_saver_log_name', 'mysql_data_saver')
        self._lost_data_marker = rospy.get_param('~mysql_data_saver_lost_data_marker',
                                        -65536)
        self._log_node_name = rospy.get_param('~mysql_data_saver_log_node_name', 'mysql_data_saver_log')
        # self._service_name = rospy.get_param('~mysql_data_saver_service_name', 'mysql_data_saver')
        self._db_params = rospy.get_param('mysql_db_params')
        self._system_log_sub = rospy.Subscriber(
            name='/rosout_agg', data_class=Log,
            callback=self._log_callback,
            queue_size=50)
        # create log topic publisher
        self._log_pub = rospy.Publisher(self._log_node_name, String, queue_size=10)

        # start logger
        self._logger = CustomLogger(name=self._logname, logpub=self._log_pub)
        self._logger.info("mysql_data_saver_server init")

        default_id = self.generate_experiment_id()
        # self._experiment_id = rospy.get_param('~mysql_data_saver_experiment_id', default_id)
        # experiment id is string like that YYYY_MM_DD_TYPE_NUMB
        # where YYYY_MM_DD is data of start
        # TYPE is short type of experiment (like TEST, TABLE, SEARCH etc)
        # NUMB is uniq number of experiment

        # TODO get params of sql database

        # TODO really get

        self.con = pymysql.connect(host=self._db_params["host"],
                              user=self._db_params["user"],
                              password=self._db_params["password"],
                              # db='experiment',
                              charset='utf8mb4',
                              cursorclass=pymysql.cursors.DictCursor)

        self.con.close()  # check if db available



        self._raw_topics = rospy.get_param('mysql_data_saver_raw_topics')
        # self._exp_topics = rospy.get_param('~mysql_data_saver_exp_topics')



        # list to keep subscribers (for what?)
        self._subscribers_list = list()

        # check if tables in db exists, or update it
        self._create_database()

        # TODO insert into sensors table all sensors from .launch if not exists

        #  TODO and insert into experiment table current experiment if not exists
        # sub to all topics from parsed string
        self._logger.debug("creating raw subs")
        for topic in self._raw_topics:
            # for now we think that all raw_data_topics have type "sensor_msgs/Temperature"
            # it must be in architecture of all system !
            # so here we think that it is "sensor_msgs/Temperature" as default
            s = rospy.Subscriber(name=topic['name'], data_class=Temperature,
                                 callback=self._raw_data_callback, callback_args=topic,
                                 queue_size=2)
            print(s)
            self._subscribers_list.append(s)

        self._logger.debug("end of init, go to loop")

        self._loop()
Example #18
0
from compiled_file_system import CompiledFileSystem
from custom_logger import CustomLogger
from data_source_registry import CreateDataSource
from environment import GetAppVersion
from file_system import IsFileSystemThrottledError
from future import Future
from gcs_file_system_provider import CloudStorageFileSystemProvider
from github_file_system_provider import GithubFileSystemProvider
from host_file_system_provider import HostFileSystemProvider
from object_store_creator import ObjectStoreCreator
from refresh_tracker import RefreshTracker
from server_instance import ServerInstance
from servlet import Servlet, Request, Response
from timer import Timer, TimerClosure

_log = CustomLogger('refresh')


class RefreshServlet(Servlet):
    '''Servlet which refreshes a single data source.
  '''
    def __init__(self, request, delegate_for_test=None):
        Servlet.__init__(self, request)
        self._delegate = delegate_for_test or RefreshServlet.Delegate()

    class Delegate(object):
        '''RefreshServlet's runtime dependencies. Override for testing.
    '''
        def CreateBranchUtility(self, object_store_creator):
            return BranchUtility.Create(object_store_creator)
Example #19
0
from branch_utility import BranchUtility
from compiled_file_system import CompiledFileSystem
from custom_logger import CustomLogger
from data_source_registry import CreateDataSources
from environment import GetAppVersion
from gcs_file_system_provider import CloudStorageFileSystemProvider
from github_file_system_provider import GithubFileSystemProvider
from host_file_system_provider import HostFileSystemProvider
from object_store_creator import ObjectStoreCreator
from refresh_tracker import RefreshTracker
from render_refresher import RenderRefresher
from server_instance import ServerInstance
from servlet import Servlet, Request, Response
from timer import Timer

_log = CustomLogger('cron')


class CronServlet(Servlet):
    '''Servlet which runs a cron job.
  '''
    def __init__(self, request, delegate_for_test=None):
        Servlet.__init__(self, request)
        self._delegate = delegate_for_test or CronServlet.Delegate()

    class Delegate(object):
        '''CronServlet's runtime dependencies. Override for testing.
    '''
        def CreateBranchUtility(self, object_store_creator):
            return BranchUtility.Create(object_store_creator)