def __init__(self,inference_db_path,preprocessor):
     self._inference_db_path = inference_db_path
     self._time_created = datetime.now()
     self._logger = Logger(f'inferencing_logs_{self._time_created.date()}_{self._time_created.strftime("%H%M%S")}.log')
     self._db_handler = SqliteDbHandler(self._logger,self._inference_db_path,'inferencing_db')
     self._model_utils = ModelUtils(self._logger)
     self._preprocessor = preprocessor
示例#2
0
    def __init__(self, username, password):
        # READ CONFIGURATION FROM config FILE
        self.__CONFIG = "../config/config.config"

        # PARSE DATA FROM DICTIONARY
        self.__HOST = ''
        self.__DATABASE = ''
        self.__PORT = ''
        self.__get_config()

        # CREATE CONNECTION
        try:
            self.__CONNECTION = psycopg2.connect(host=self.__HOST,
                                                 port=self.__PORT,
                                                 database=self.__DATABASE,
                                                 user=username,
                                                 password=password)
        except Exception as e:
            print(f"Got exception trying to connect:\n {e}")

        # CREATE CURSOR
        self.__CURSOR = self.__CONNECTION.cursor()

        #LOGGER
        self.__LOGGER = Logger("database")
        self.__LOGGER.log_info(
            f"Established new connection to database by {username}.")
示例#3
0
 def __init__(self,
              *filename,
              file_name: str = "networkDeserialization.txt"):
     Logger.__init__(self, file_name)
     self.f.write("Network Deserialization Data")
     self.pipe_list: {Pipe} = self.deserialize_pipe_list(*filename)
     self.valve_list: {Valve} = self.deserialize_valve_list(*filename)
     self.short_pipe_list: {ShortPipe
                            } = self.deserialize_short_pipe_list(*filename)
     self.resistor_list: {Resistor
                          } = self.deserialize_resistor_list(*filename)
     self.compressor_station_list: {
         CompressorStation
     } = self.deserialize_compressor_station_list(*filename)
     self.node_list: {Innode} = self.deserialize_innode_list(*filename)
     self.source_list: {Source} = self.deserialize_source_list(*filename)
     self.sink_list: {Sink} = self.deserialize_sink_list(*filename)
     self.control_valve_list: {
         ControlValve
     } = self.deserialize_control_valve_list(*filename)
     self.energy_consumption_measurement = self.deserialize_energy_consumption_measurement(
         *filename)
     self.power_measurement = self.deserialize_power_measurement(*filename)
     CompressorStation.set_energy_power_measurement(
         self.energy_consumption_measurement, self.power_measurement)
     Source.read_constant_data(self.source_list)
     self.f.close()
示例#4
0
    def __init__(self,
                 connecter=None,
                 db_backup='',
                 new_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if db_backup and os.path.isfile(db_backup):
            self.db_backup = db_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Messenger.NO_DBNAME_TO_RESTORE)

        message = Messenger.DB_RESTORER_VARS.format(
            server=self.connecter.server,
            user=self.connecter.user,
            port=self.connecter.port,
            db_backup=self.db_backup,
            new_dbname=self.new_dbname)
        self.logger.debug(Messenger.DB_RESTORER_VARS_INTRO)
        self.logger.debug(message)
示例#5
0
    def test_automated(self):
        tasks_list = []
        end_tasks_list = []
        n = 2
        for i in range(n):
            tasks_list.append("start " + str(i))
            end_tasks_list.append("end " + str(i))
            end_tasks_list.append("poll")

        random.shuffle(end_tasks_list)

        tasks_list = tasks_list + end_tasks_list

        result = []
        class_tasks = []
        logger_client = Logger()
        index = 0
        for i in range(len(tasks_list)):
            if tasks_list[i] == "poll":
                result.append(str(index))
                index += 1
                class_tasks.append(logger_client.poll())
            elif 'end' in tasks_list[i]:
                result.append(None)
                id = tasks_list[i].split(" ")[1]
                class_tasks.append(logger_client.end(id))
            else:
                id = tasks_list[i].split(" ")[1]
                result.append(None)
                class_tasks.append(logger_client.start(id, int(id)))

        loop = asyncio.get_event_loop()
        x = loop.run_until_complete(asyncio.gather(*class_tasks))
        self.assertListEqual(x, result)
    def __init__(self, mode):
        """

        :param mode: Either load from file or from web address
        """
        self.__logger = Logger(name='emoji_logger')
        self.data = self.load()
 def test_happy_case_end(self):
     logger_client = Logger()
     tasks=[logger_client.start(1, 1),logger_client.start(2, 2),logger_client.end(1),logger_client.poll()]
     loop = asyncio.get_event_loop()
     loop.run_until_complete(asyncio.gather(*tasks))
     self.assertEqual(len(logger_client._Logger__process_lookup.keys()), 1)
     self.assertEqual(len(logger_client._Logger__heap), 1)
示例#8
0
    def show_pg_warnings(pg_dbs=[], bkped_dbs=[], logger=None):
        '''
        Target:
            - compare two lists with databases. This function will be used to
              show which PostgreSQL databases do not have a backup in a specified
              directory and which databases have a backup but are not stored in
              PostgreSQL.
        Parameters:
            - pg_dbs: list of PostgreSQL databases.
            - bkped_dbs: list of databases which have a backup.
            - logger: a logger to show and log some messages.
        '''
        if not logger:
            logger = Logger()

        for dbname in pg_dbs:
            if dbname not in bkped_dbs:  # PostgreSQL without backup
                message = Messenger.NO_BACKUP_FOR_POSTGRESQL_DB.format(
                    dbname=dbname)
                logger.highlight('warning', message, 'purple', effect='bold')

        for dbname in bkped_dbs:
            # Backup of an nonexistent PostgreSQL database
            if dbname not in pg_dbs:
                message = Messenger.NO_POSTGRESQL_DB_FOR_BACKUP.format(
                    dbname=dbname)
                logger.highlight('warning', message, 'purple', effect='bold')
示例#9
0
class BinaryKnob(object):
    value = 0
    pins = []
    logger = None

    def __init__(self, pins):
        self.logger = Logger()
        self.pins = pins
        add_event_detection(self.pins[0], self.__handle_change, True)
        add_event_detection(self.pins[1], self.__handle_change, True)
        add_event_detection(self.pins[2], self.__handle_change, True)
        self.__handle_change

    def get_value(self):
        self.__handle_change(1)
        return self.value

    def __handle_change(self, pin):
        a = GPIO.input(self.pins[0])
        b = GPIO.input(self.pins[1])
        c = GPIO.input(self.pins[2])
        self.value = 1
        if a == True:
            self.value += 1
        if b == True:
            self.value += 2
        if c == True:
            self.value += 4
        self.logger.log("Selected %s" % self.value)
示例#10
0
    def __init__(self):

        self.image_type = 'OA'

        self.dataset = ProcessData(train_ratio=0.9,
                                   process_raw_data=False,
                                   do_augment=False,
                                   add_augment=True,
                                   do_flip=True,
                                   do_blur=True,
                                   do_deform=True,
                                   do_crop=True,
                                   image_type=self.image_type,
                                   get_scale_center=False,
                                   single_sample=True)

        self.model = cnn_toy_model.cnn_skipC_model(criterion=nn.MSELoss(),
                                                   optimizer=torch.optim.Adam,
                                                   learning_rate=0.001,
                                                   weight_decay=0)

        if torch.cuda.is_available():
            torch.cuda.current_device()
            self.model.cuda()

        self.logger = Logger(model=self.model,
                             project_root_dir=self.dataset.project_root_dir,
                             image_type=self.image_type)
        self.epochs = 2
示例#11
0
 def __init__(self, pins):
     self.logger = Logger()
     self.pins = pins
     add_event_detection(self.pins[0], self.__handle_change, True)
     add_event_detection(self.pins[1], self.__handle_change, True)
     add_event_detection(self.pins[2], self.__handle_change, True)
     self.__handle_change
示例#12
0
    def __init__(self, networkDescerialization: NetworkDeserialization,
                 scenario: Scenario):
        Network.__init__(self, networkDeserialization, scenario)
        Logger.__init__(self, "optimization.txt")
        self.model = Model("Minimization of Network Flow Operation Costs")
        element_q, node_q, node_p = self.define_model_variables()

        # self.define_model_variables()
        for pipe in networkDeserialization.pipe_list.values():
            m = Model("approximation")
            variable_dict = {}
            pipe.p_e = m.addVar(lb=0.0, ub=0.0, vtype='C')
            conti_functon: ContiFunction = ContiFunction(pipe, m)
            conti_functon._model_definition(pipe, variable_dict)
            m.setObjective(4 * variable_dict[pipe.id] + 3, "minimize")
            m.optimize()
            # conti_functon._model_definition(pipe, variable_dict)

            # conti: ContiFunction = ContiFunction(pipe, m)
            # print(conti.optimize())
            print("Conti Function Approximation p_min = {}, p_max = {}".format(
                pipe.from_node.pressure_min, pipe.from_node.pressure_max))
            # friction:FrictionFunctionApproximation = FrictionFunctionApproximation(pipe.from_node.pressure_min, pipe.from_node.pressure_max,
            #                                                                        pipe.from_node.flow_min, pipe.from_node.flow_max,
            #                                                                        pipe, m)
            # print(friction.optimize())
            # print("Friction Function Approximation p_min ={} p_max ={} q_min ={} q_max ={}".format(pipe.from_node.pressure_min, pipe.from_node.pressure_max, pipe.from_node.flow_min, pipe.from_node.flow_max))
            # impact: ImpactFunctionApproximation = ImpactFunctionApproximation(pipe.to_node.pressure_min, pipe.to_node.pressure_max, pipe.to_node.flow_min, pipe.to_node.flow_max, pipe, m)
            # print(impact.optimize())
            # print("ImpactFunctionApproximation p_min ={}, p_max ={} q_min ={}, q_max ={}".format(pipe.to_node.pressure_min, pipe.to_node.pressure_max, pipe.to_node.flow_min, pipe.to_node.flow_max))
            # gas_consumption: GasFlowCompressorConsumptionApproximation = GasFlowCompressorConsumptionApproximation(4000000, 6000000,4000000, 6000000,
            #                                                                                                        1000.0, 1000.0, model=m, compressor=pipe)
            # print(gas_consumption.optimize())
            # print("GasFlowCompressorConsumption  p_min ={}, p_max ={}, p_v_min = {} p_v_max = {} q_min ={}, q_max ={}".format(pipe.from_node.pressure_min, pipe.from_node.pressure_max, pipe.to_node.pressure_min, pipe.to_node.pressure_max, pipe.to_node.flow_min, pipe.to_node.flow_max))
        self.model.optimize()
示例#13
0
class DB2Connector:
    __connection = None
    __logger = None

    def __init__(self):
        self.__logger = Logger()

    def getConnection(self, db_detail):
        try:
            if self.__connection == None:
                conn = psycopg2.connect(user=db_detail.get_user(),
                                        password=db_detail.get_password(),
                                        host=db_detail.get_host(),
                                        port=db_detail.get_port(),
                                        database=db_detail.get_database())
                self.__logger.log(
                    "Info", "Connection established sucessfully--------")
                self.__connection = conn
            else:
                return self.__connection
        except Exception as e:
            self.__logger.log(
                "Info", "Can't connect to postgres database {}".format(e))
            #sys.exit(1)
        return self.__connection
示例#14
0
 def __init__(self):
     """
     control variables
     """
     super(Sniffer, self).__init__()
     self.sock = None
     self.log = Logger("/dev/stdout")
示例#15
0
class Scraper:
	def __init__(self, output_path):
		self.output_path = output_path
		self.logger = Logger(out_path=self.output_path)
		self.all_urls = set()
		self.result = []

	def collect_urls(self):
		try:
			specialities = get_speciality_data()
			state_code_data = get_state_code_data()
			for speciality_key, code in specialities.items():
				for item in state_code_data:
					furl = con_scraper.SPECIALITY_URL.format(code, item[0], item[1])
					self.all_urls.add(furl)
			print(len(self.all_urls))
		except Exception as ex:
			traceback.print_exc()
			raise ex

	def collect_data(self):
		res = open("all_result_2.csv","a")
		req_obj = get_new_tr_obj()
		cookies = parseCookieFile(cookiefile=os.path.join(os.getcwd(), "static_input", "cookies.txt" ))
		try:
			for url in self.all_urls:
				# req_obj = get_new_tr_obj()
				try:
					req = req_obj.get(url, timeout=30)
					if req.status_code == 200:
						try:
							soup = BeautifulSoup(req.content, "html.parser")
							trs = soup.find_all("tr")
							for row in range(1,len(trs)):
								try:
									each_data = trs[row].find_all("td")
									specialty_name = each_data[1].text.strip() if each_data[1] else None
									location = each_data[2].text.strip().replace(","," ") if  len(each_data)>2 and each_data[2] else None
									doc_name = each_data[0].text
									doc_url = each_data[0].find("a")
									doc_url = doc_url["href"] if doc_url else ""
									actual_doc_url = con_scraper.BASE_URL + doc_url
								except Exception as ex:
									traceback.print_exc()
								else:
									if actual_doc_url != "https://doctorfinder.ama-assn.org/doctorfinder/":
										self.logger.info( "{}, {}".format(specialty_name.strip(), doc_name.strip().replace("\n","")))
										fdata = "|".join([ str(specialty_name).strip().replace("\n",""), str(location).strip().replace("\n",""), str(doc_name).strip().replace("\n",""), actual_doc_url.strip() ])+"\n"
										# self.result.append(fdata)
										print( "{}, {}".format(specialty_name.strip(), doc_name.strip().replace("\n","")))

										res.write(fdata)
						except Exception as ex:
							traceback.print_exc()
							# raise ex
				except Exception as ex:
					pass
			res.close()
		except Exception as ex:
			traceback.print_exc()
示例#16
0
    def on_line_received(self, json_payload):
        Logger.debug('line received: %s', json_payload, 'server')
        payload = json.loads(json_payload)

        if payload and 'type' in payload and payload['type'] == 'user_input':
            user_input = UserInputDataUnit(payload['content']).get_object()
            self.game.apply_user_input_to_player(self.user_id, user_input)
            self.update_game()
            self.send_broadcast_payload_except_self({
                'type':
                'user_input',
                'content':
                payload['content'],
                'user':
                PlayerDataUnit(self.game.get_player(self.user_id)).get_pdu()
            })

            self.interval_executor.execute_if_interval_elapsed(
                self, lambda self: self.send_broadcast_payload(
                    {
                        'type': 'game_state',
                        'content': GameDataUnit(self.game).get_pdu()
                    }))

        else:
            Logger.error('Unknown message type: (%s)', json_payload,
                         'server_protocol')
示例#17
0
def main(args):
    conf = yaml.load(open(args.config))
    conf.update(conf[conf['model']])

    if args.multi_gpu:
        conf['batch_size'] *= torch.cuda.device_count()

    datasets = {
        'MNIST': torchvision.datasets.MNIST,
        'CIFAR': torchvision.datasets.CIFAR10
    }
    if conf['dataset'].upper() == 'MNIST':
        conf['data_path'] = os.path.join(conf['data_path'], 'MNIST')
        size = 28
        classes = list(range(10))
        mean, std = ((0.1307,), (0.3081,))
    elif conf['dataset'].upper() == 'CIFAR':
        conf['data_path'] = os.path.join(conf['data_path'], 'CIFAR')
        size = 32
        classes = ['plane', 'car', 'bird', 'cat', 'deer',
                   'dog', 'frog', 'horse', 'ship', 'truck']
        mean, std = ((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
    else:
        raise ValueError('Dataset must be either MNIST or CIFAR!')
    transform = transforms.Compose([
        transforms.RandomCrop(size, padding=2),
        transforms.ToTensor(),
        transforms.Normalize(mean, std)
    ])
    loaders = {}
    trainset = datasets[conf['dataset'].upper()](root=conf['data_path'],
                        train=True, download=True, transform=transform)
    testset = datasets[conf['dataset'].upper()](root=conf['data_path'],
                       train=False, download=True, transform=transform)
    loaders['train'] = torch.utils.data.DataLoader(trainset,
                batch_size=conf['batch_size'], shuffle=True, num_workers=4)
    loaders['test'] = torch.utils.data.DataLoader(testset,
                batch_size=conf['batch_size'], shuffle=False, num_workers=4)
    print(9*'#', 'Using {} dataset'.format(conf['dataset']), 9*'#')


    # Training
    use_gpu  = not args.disable_gpu and torch.cuda.is_available()
    caps_net = CapsNetTrainer(loaders,
                              conf['model'],
                              conf['lr'],
                              conf['lr_decay'],
                              conf['num_classes'],
                              conf['num_routing'],
                              conf['loss'],
                              use_gpu=use_gpu,
                              multi_gpu=args.multi_gpu)

    ensure_dir('logs') #
    logger = {}
    logger['train'] = Logger('logs/{}-train'.format(conf['dataset']))
    logger['test'] = Logger('logs/{}-test'.format(conf['dataset']))
    ensure_dir(conf['save_dir']) #
    caps_net.train(conf['epochs'], classes, conf['save_dir'], logger)
示例#18
0
 def lineReceived(self, json_payloads):
     Logger.trace('line received lost from network process: (%s)', json_payloads, category='client')
     json_payloads.replace(self.terminator, '')
     for json_payload in json_payloads.split('\n'):
         json_payload = json_payload.strip('\r')
         if json_payload != '':
             payload = json.loads(json_payload)
             self.output_queue.put(payload)
示例#19
0
 def init_logger(self):
     head = [
         "average_episode_std", "sum steps episode number"
         "total number of episodes", "Average sum of rewards per episode",
         "KL between old and new distribution", "Surrogate loss",
         "Surrogate loss prev", "ds", "entropy", "mean_advant"
     ]
     self.logger = Logger(head)
示例#20
0
 def __init__(self, pin, callback):
     self.logger = Logger()
     self.pin = pin
     if callback is None:
         add_event_detection(self.pin, self.__default_callback, True)
     else:
         add_event_detection(self.pin, callback, True)
     self.__handle_change
示例#21
0
    def __init__(self,
                 connecter=None,
                 bkp_path='',
                 group='',
                 bkp_type='dump',
                 prefix='',
                 vacuum=True,
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        msg = Msg.CL_BACKER_VARS.format(server=self.connecter.server,
                                        user=self.connecter.user,
                                        port=self.connecter.port,
                                        bkp_path=self.bkp_path,
                                        group=self.group,
                                        bkp_type=self.bkp_type,
                                        prefix=self.prefix,
                                        vacuum=self.vacuum)
        self.logger.debug(Msg.CL_BACKER_VARS_INTRO)
        self.logger.debug(msg)
示例#22
0
    def __init__(self, time='', command='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        self.time = time.strip()
        self.command = command.strip()
 def grayscale(self, img):
     """
     Apply a grayscale filter
     :param img: the image to apply the filter
     :return: return the filtered image
     """
     logger = Logger()
     logger.log(" Applying grayscale filter...")
     return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
示例#24
0
 def put_text(self, img):
     """
     Apply a put text filter
     :param img: the image to apply the filter
     :return: return the filtered image
     """
     logger = Logger()
     logger.log(" Applying FilterZeTeam filter...")
     return cv2.putText(img, 'Moise, Lucas & Kevin', (100, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (50, 0, 255), 2, cv2.LINE_AA)
def train():
    best_loss_cls = 100
    best_loss_reg = 100
    best_loss = 100

    pretrained_dict = torch.load('model/vgg16-397923af.pth')
    model_dict = model.base_layers.state_dict()
    pretrained_dict = {
        k: v
        for k, v in pretrained_dict.items() if k in model_dict
    }
    model_dict.update(pretrained_dict)
    model.base_layers.load_state_dict(model_dict)
    torchvision.models.detection.fasterrcnn_resnet50_fpn()
    model.apply(weights_init)
    model.train()
    logger = Logger('log/')
    for epoch in range(40):
        epoch_loss_cls = 0
        epoch_loss_regr = 0
        epoch_loss = 0
        epoch_batch = 0
        for batch_idx, (imgs, clss, regs) in enumerate(dataloader):
            imgs = imgs.to(device)
            clss = clss.to(device)
            regs = regs.to(device)

            pre_cls, pre_reg, pre_cls_prob = model(imgs)
            loss_cls = critetion_cls(pre_cls, clss)
            loss_reg = critetion_reg(pre_reg, regs)
            loss = loss_cls + loss_reg

            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            epoch_loss_cls += loss_cls.item()
            epoch_loss_regr += loss_reg.item()
            epoch_loss += loss.item()
            epoch_batch += 1

            print(
                "epoch:{}, batch_idx:{}, loss_cls:{:.4f}, loss_reg:{:.4f}, loss:{:.4f}, avg_loss:{:.4f}"
                .format(epoch, batch_idx, loss_cls, loss_reg, loss,
                        epoch_loss / epoch_batch))

            if best_loss_cls > loss_cls or best_loss_reg > loss_reg or best_loss > loss:
                best_loss_cls = loss_cls
                best_loss_reg = loss_reg
                best_loss = loss
                torch.save(model, 'model/ctpn.pth')
        logger.scalar_summary('loss_cls', epoch_loss_cls / epoch_batch, epoch)
        logger.scalar_summary('loss_reg', epoch_loss_regr / epoch_batch, epoch)
        logger.scalar_summary('loss', epoch_loss, epoch)
        logger.scalar_summary('avgloss', epoch_loss / epoch_batch, epoch)

    print("--------------END----------------")
示例#26
0
    def __init__(self, bkp_path='', prefix='', min_n_bkps=1, exp_days=365,
                 max_size=5000, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if bkp_path and os.path.isdir(bkp_path):
            self.bkp_path = bkp_path
        else:
            self.logger.stop_exe(Messenger.DIR_DOES_NOT_EXIST)

        if prefix is None:
            self.prefix = Default.PREFIX
        else:
            self.prefix = prefix

        if min_n_bkps is None:
            self.min_n_bkps = Default.MIN_N_BKPS
        elif isinstance(min_n_bkps, int):
            self.min_n_bkps = min_n_bkps
        elif Checker.str_is_int(min_n_bkps):
            self.min_n_bkps = Casting.str_to_int(min_n_bkps)
        else:
            self.logger.stop_exe(Messenger.INVALID_MIN_BKPS)

        if exp_days is None:
            self.exp_days = Default.EXP_DAYS
        elif isinstance(exp_days, int) and exp_days >= -1:
            self.exp_days = exp_days
        elif Checker.str_is_valid_exp_days(exp_days):
            self.exp_days = Casting.str_to_int(exp_days)
        else:
            self.logger.stop_exe(Messenger.INVALID_OBS_DAYS)

        if max_size is None:
            self.max_size = Default.MAX_SIZE
        elif Checker.str_is_valid_max_size(max_size):
            self.max_size = max_size
        else:
            self.logger.stop_exe(Messenger.INVALID_MAX_TSIZE)

        # Split a string with size and unit of measure into a dictionary
        self.max_size = Casting.str_to_max_size(self.max_size)
        # Get the equivalence in Bytes of the specified unit of measure
        self.equivalence = Casting.get_equivalence(self.max_size['unit'])
        # Get the specified size in Bytes
        self.max_size_bytes = self.max_size['size'] * self.equivalence

        message = Messenger.CL_TRIMMER_VARS.format(
            bkp_path=self.bkp_path, prefix=self.prefix,
            min_n_bkps=self.min_n_bkps, exp_days=self.exp_days,
            max_size=self.max_size)
        self.logger.debug(Messenger.CL_TRIMMER_VARS_INTRO)
        self.logger.debug(message)
    def __init__(self):
        GPIO.setup(self.binary_output_pins, GPIO.OUT)
        GPIO.setup(self.mux_enable_output_pins, GPIO.OUT)

        self.logger = Logger()
        self.__set_latch(self.binary_output_pins[0], 0)
        self.__set_latch(self.binary_output_pins[1], 0)
        self.__set_latch(self.binary_output_pins[2], 0)
        self.__set_latch(self.binary_output_pins[3], 0)
        self.close_boxes()
 def __init__(self, file_path, schema_path, process):
     self._file_path = file_path
     self._schema_path = schema_path
     self._time_created = datetime.now()
     self._logger = Logger(
         f'validation_{process}_logs_{self._time_created.date()}_{self._time_created.strftime("%H%M%S")}.log'
     )
     self._path_to_good_files = None
     self._path_to_bad_files = None
     self.process = process
示例#29
0
	def __init__(self, working_dir, log_dir, compare_file, parameter_format, logger=None):
		"""
		parameter_format specifies what parameters from which files are needed.
		[file, name, min, max, step]
		"""
		if logger == None:
			self.log = Logger()
		else:
			self.log = logger
		self.error = False
		self.working_dir = working_dir
		self.log_dir = log_dir
		self.kiva_path = '../ext'
		self.kiva_name = 'kiva_0D'
		self.kiva_parameter_files = ['itape17', 'itape5', 'itapeERC', 'itapeRs']
		self.parameter_format = parameter_format
		self.log.debug("Parameter Format: %s" % self.parameter_format)
		# this will later hold all kiva_parameter_files that
		# need to be loaded as ITapeFile objects
		self.itapes = {}
		self.compare_file = compare_file
		# Check if working directory exists and that it is empty
		self.working_dir_indicator = os.path.join(self.working_dir, 'kivagen.working.dir')
		if not os.path.isdir(self.working_dir):
			self.log.error("'%s' does not exist." % self.working_dir)
			self.error = True
			self.working_dir = None
		elif os.listdir(self.working_dir):	# not empty
			if not os.path.isfile(self.working_dir_indicator):
				self.log.error("'%s' does not seem to be a working directory." % self.working_dir)
				self.error = True
				self.working_dir = None
				return
		else:
			open(self.working_dir_indicator, 'a').close()
		# Check that log dir exists
		if not os.path.isdir(self.log_dir):
			self.log.error("'%s' does not exist." % self.log_dir)
			self.error = True
			self.log_dir = None
		# Check if input files exist
		input_files = [self.compare_file]
		input_files.append(os.path.join(self.kiva_path, self.kiva_name))
		for parameter_file in self.kiva_parameter_files:
			input_files.append(os.path.join(self.kiva_path, parameter_file))
		for input_file in input_files:
			if not os.path.isfile(input_file):
				self.log.error("'%s' not found." % input_file)
				self.error = True
		# Parse compare file
		self.compare_values = self._readCompareFile(self.compare_file)
		# self.log.debug("self.compare_values = %s" % self.compare_values)
		# Load Parameters
		self._loadParameters()
示例#30
0
 def dilate_file(self, img, intensity):
     """
     Apply a dilation filter to the image
     :param img: The image path
     :param intensity: The intensity of the filter
     :return:
     """
     logger = Logger()
     kernel = np.ones((20, 20), np.uint8)
     logger.log(" Applying dilation filter...")
     return cv2.dilate(img, kernel, iterations=intensity)
示例#31
0
 def parse(self, data: dict) -> dict:
     """
     general dict parser after parsing environments
     """
     _dict = data
     for _ in range(5):
         self._parse_dict(_dict)
     for failed in self.get_variables(str(_dict)):
         Logger.get().debug('[JsParser]', 'failed to find env value of "',
                            failed, '"')
     return _dict
示例#32
0
def send_to_server_recursive_threaded_loop(connection_handler):
    Logger.trace('loop from network process sender thread', category='client')

    while not connection_handler.input_queue.empty():
        payload = connection_handler.input_queue.get()
        Logger.debug('fetch input_queue from network process sender thread', payload, category='client')
        connection_handler.sendLine(json.dumps(payload))

    time.sleep(0.01)  # 10 ms
    # get our Deferred which will be called with the largeFibonnaciNumber result
    threads.deferToThread(send_to_server_recursive_threaded_loop, connection_handler)
 def __init__(self, lighting, demo_mode=False):
     self.logger = Logger()
     #self.demo_mode = demo_mode
     #For now, lets leave it as only counting one coin.
     self.demo_mode = True
     self.lighting = lighting
     GPIO.setup(self.coin_counter_pins, GPIO.OUT)
     GPIO.setup(self.coin_input_pin, GPIO.IN)
     GPIO.setup(self.coin_counter_input_pin, GPIO.IN)
     self.__set_coin_count(0)
     self.start_waiting_for_coin()
示例#34
0
 def forbid_root(logger=None):
     '''
     Target:
         - stop the execution of the program if this is being run by "root".
     '''
     if not logger:
         logger = Logger()
     try:
         if getuser() == 'root':  # Get system username
             raise Exception()
     except Exception as e:
         logger.debug('Error en la función "forbid_root": {}.'.format(
             str(e)))
         logger.stop_exe(Messenger.ROOT_NOT_ALLOWED)
示例#35
0
    def __init__(self, connecter=None, db_backup='', new_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if db_backup and os.path.isfile(db_backup):
            self.db_backup = db_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Messenger.NO_DBNAME_TO_RESTORE)

        message = Messenger.DB_RESTORER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, db_backup=self.db_backup,
            new_dbname=self.new_dbname)
        self.logger.debug(Messenger.DB_RESTORER_VARS_INTRO)
        self.logger.debug(message)
示例#36
0
 def __init__(self, pin, callback):
     self.logger = Logger()
     self.pin = pin
     if callback is None:
         add_event_detection(self.pin, self.__default_callback, True)
     else:
         add_event_detection(self.pin, callback, True)
     self.__handle_change
示例#37
0
    def create_dir(path, logger=None):
        '''
        Target:
            - stop the execution of the program if this is being run by "root".
        Parameters:
            - path: directory to create.
            - logger: a logger to show and log some messages.
        '''
        if not logger:
            logger = Logger()

        try:
            if not os.path.exists(path):  # If path does not exist...
                os.makedirs(path)  # Create it
        except Exception as e:
            logger.debug('Error en la función "create_dir": {}.'.format(
                str(e)))
            logger.stop_exe(Messenger.USER_NOT_ALLOWED_TO_CHDIR)
示例#38
0
 def __init__(self, lighting, demo_mode=False):
     self.logger = Logger()
     self.demo_mode = demo_mode
     self.lighting = lighting
     GPIO.setup(self.coin_counter_pins, GPIO.OUT)
     GPIO.setup(self.coin_input_pin, GPIO.IN)
     GPIO.setup(self.coin_counter_input_pin, GPIO.IN)
     self.__set_coin_count(0)
     self.start_waiting_for_coin()
示例#39
0
    def on_line_received(self, json_payload):
        Logger.debug('line received: %s', json_payload, 'server')
        payload = json.loads(json_payload)

        if payload and 'type' in payload and payload['type'] == 'user_input':
            user_input = UserInputDataUnit(payload['content']).get_object()
            self.game.apply_user_input_to_player(self.user_id, user_input)
            self.update_game()
            self.send_broadcast_payload_except_self({
                'type': 'user_input',
                'content': payload['content'],
                'user': PlayerDataUnit(self.game.get_player(self.user_id)).get_pdu()
            })

            self.interval_executor.execute_if_interval_elapsed(self, lambda self: self.send_broadcast_payload(
                {'type': 'game_state', 'content': GameDataUnit(self.game).get_pdu()}))

        else:
            Logger.error('Unknown message type: (%s)', json_payload, 'server_protocol')
    def __init__(self):
        GPIO.setup(self.binary_output_pins, GPIO.OUT)
        GPIO.setup(self.mux_enable_output_pins, GPIO.OUT)

        self.logger = Logger()
        self.__set_latch(self.binary_output_pins[0], 0)
        self.__set_latch(self.binary_output_pins[1], 0)
        self.__set_latch(self.binary_output_pins[2], 0)
        self.__set_latch(self.binary_output_pins[3], 0)
        self.close_boxes()
示例#41
0
    def db_filter_exclude(dbs_list, ex_dbs=[], ex_regex='', logger=None):
        '''
        Target:
            - filter a list of databases to remove only the specified ones,
              taking into account the received parameters.
        Parameters:
            - dbs_list: list to filter.
            - ex_dbs: list with the databases' names to exclude.
            - ex_regex: regular expression which indicates the databases' names
              to exclude.
            - logger: a logger to show and log some messages.
        Return:
            - a filtered list (subset of "dbs_list"), value from the addition
              of the exclude conditions "ex_dbs" y "ex_regex".
        '''
        if not logger:
            logger = Logger()

        # Copy the list to remove remove positions without conflict errors
        dbs_filtered = dbs_list[:]

        if '*' in ex_dbs:  # If exclude all...
            return []  # Return an empty list

        if ex_regex:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the exclusion list or matches the
                # regular expression...
                if dbname in ex_dbs or re.match(ex_regex, dbname):
                    # Remove database from the list
                    dbs_filtered.remove(db)
                    logger.debug('Base de datos excluida: {}'.format(dbname))
        else:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the exclusion list...
                if dbname in ex_dbs:
                    # Remove database from the list
                    dbs_filtered.remove(db)
                    logger.debug('Base de datos excluida: {}'.format(dbname))

        return dbs_filtered
示例#42
0
class Button(object):
    value = 0
    pin = None
    logger = None

    def __init__(self, pin, callback):
        self.logger = Logger()
        self.pin = pin
        if callback is None:
            add_event_detection(self.pin, self.__default_callback, True)
        else:
            add_event_detection(self.pin, callback, True)
        self.__handle_change

    def __default_callback(self):
        self.logger.log("Button: at pin %s callback with default callback")

    def get_value(self):
        self.__handle_change(1)
        return self.value
 def __init__(self, lighting, demo_mode=False):
     self.logger = Logger()
     #self.demo_mode = demo_mode
     #For now, lets leave it as only counting one coin.
     self.demo_mode = True
     self.lighting = lighting
     GPIO.setup(self.coin_counter_pins, GPIO.OUT)
     GPIO.setup(self.coin_input_pin, GPIO.IN)
     GPIO.setup(self.coin_counter_input_pin, GPIO.IN)
     self.__set_coin_count(0)
     self.start_waiting_for_coin()
示例#44
0
    def db_filter_include(dbs_list, in_dbs=[], in_regex='', logger=None):
        '''
        Target:
            - filter a list of databases to get only the specified ones, taking
              into account the received parameters.
        Parameters:
            - dbs_list: list to filter.
            - in_dbs: list with the databases' names to include.
            - in_regex: regular expression which indicates the databases' names
              to include.
            - logger: a logger to show and log some messages.
        Return:
            - a filtered list (subset of "dbs_list"), value from the addition
              of the include conditions "in_dbs" y "in_regex".
        '''
        if not logger:
            logger = Logger()

        dbs_filtered = []

        if '*' in in_dbs:  # If include all...
            return dbs_list  # Return the whole list

        if in_regex:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the inclusion list or matches the
                # regular expression...
                if dbname in in_dbs or re.match(in_regex, dbname):
                    dbs_filtered.append(db)  # Add database to the list
                    logger.debug('Base de datos incluida: {}'.format(dbname))
        else:
            for db in dbs_list:
                dbname = db['datname']
                # If database's name is in the inclusion list...
                if dbname in in_dbs:
                    dbs_filtered.append(db)  # Add database to the list
                    logger.debug('Base de datos incluida: {}'.format(dbname))

        return dbs_filtered
示例#45
0
    def __init__(self, level=1, username='', email='', password='',
                 to_infos=[], cc_infos=[], bcc_infos=[], server_tag='',
                 external_ip='', op_type='', logger=None):

        if logger:
            self.logger = logger
        else:
            from logger.logger import Logger
            self.logger = Logger()

        if isinstance(level, int) and level in Default.MAIL_LEVELS:
            self.level = level
        elif Checker.str_is_int(level):
            self.level = Casting.str_to_int(level)
        else:
            self.level = Default.MAIL_LEVEL

        self.from_info['email'] = email
        if not Checker.str_is_valid_mail(email):
            message = Messenger.INVALID_FROM_MAIL.format(
                email=email)
            self.logger.highlight('warning', message, 'yellow')

        self.from_info['name'] = username
        if username is '':
            message = Messenger.INVALID_FROM_USERNAME
            self.logger.highlight('warning', message, 'yellow')

        self.from_info['pwd'] = password
        if password is '':
            message = Messenger.INVALID_FROM_PASSWORD
            self.logger.highlight('warning', message, 'yellow')

        to_infos = Casting.str_to_list(to_infos)
        self.to_infos = self.get_mail_infos(to_infos)

        cc_infos = Casting.str_to_list(cc_infos)
        self.cc_infos = self.get_mail_infos(cc_infos)

        bcc_infos = Casting.str_to_list(bcc_infos)
        self.bcc_infos = self.get_mail_infos(bcc_infos)

        if op_type in self.OP_TYPES.keys():
            self.op_type = op_type
        else:
            self.op_type = 'u'

        self.server_tag = server_tag
        self.external_ip = external_ip
示例#46
0
 def __init__(self):
     GPIO.cleanup()
     GPIO.setmode(GPIO.BOARD)
     self.logger = Logger()
     self.__init_pins()
     self.box_controller = BinaryBoxController()
     self.printer = Printer()
     LightSystemManager.setup()
     #depracted by lighting system manager
     self.lighting = LightingController()
     self.adventure_knob_a = BinaryKnob(self.box_select_pins_a)
     self.adventure_knob_b = BinaryKnob(self.box_select_pins_b)
     self.coin_machine = CoinMachine(self.lighting, self.demo_mode)
     self.server = api.run.ServerController()
     self.api = api.run
def get_user_info(user_id, token, logfilename):
    logger_obj = Logger(log_name=' USER INFO THREAD', log_filename=logfilename)
    status, user_info_str, info = request_data(_4SQ_USER_URL % (user_id, token))

    if (status == 500) or (status == 404):
        logger_obj.put_message('error', str(status) + ' - %s' % user_id)
        return status, None, 0

    elif status == 200:
        logger_obj.put_message('debug', '200 - %s' % user_id)
        user_info = dict()
        try:
            user_info = json.loads(user_info_str)['response']['user']
            user_info['current_time'] = str(int(time.mktime(datetime.datetime.now().timetuple())))
            return status, user_info, 1
        except:
            return status, user_info, 1
    else:
        logger_obj.put_message('critical', 'Other error - %s' % user_id)
        logger_obj.put_message('critical', status)
        return status, None, 1 
示例#48
0
    def __init__(self, connecter=None, bkp_path='', group='',
                 bkp_type='dump', prefix='', vacuum=True, logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # If backup directory is not specified, create a default one to store
        # the backups
        if bkp_path:
            self.bkp_path = bkp_path
        else:
            self.bkp_path = Default.BKP_PATH
            Dir.create_dir(self.bkp_path, self.logger)

        if group:
            self.group = group
        else:
            self.group = Default.GROUP

        if bkp_type is None:
            self.bkp_type = Default.BKP_TYPE
        elif Checker.check_compress_type(bkp_type):
            self.bkp_type = bkp_type
        else:
            self.logger.stop_exe(Msg.INVALID_BKP_TYPE)

        self.prefix = prefix

        if isinstance(vacuum, bool):
            self.vacuum = vacuum
        elif Checker.str_is_bool(vacuum):
            self.vacuum = Casting.str_to_bool(vacuum)
        else:
            self.logger.stop_exe(Msg.INVALID_VACUUM)

        msg = Msg.CL_BACKER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, bkp_path=self.bkp_path, group=self.group,
            bkp_type=self.bkp_type, prefix=self.prefix, vacuum=self.vacuum)
        self.logger.debug(Msg.CL_BACKER_VARS_INTRO)
        self.logger.debug(msg)
示例#49
0
    def __init__(self, pdu_or_object):
        self.pdu = {}

        Logger.debug("PDU init (%s:%s)", (type(pdu_or_object), pdu_or_object), 'network_protocol')
        if isinstance(pdu_or_object, dict):
            Logger.trace("PDU init from dict", category='network_protocol')
            self.from_dictionary(pdu_or_object)
        else:
            Logger.trace("PDU init from object", category='network_protocol')
            self.from_object(pdu_or_object)
示例#50
0
    def __init__(self, connecter=None, new_dbname='', original_dbname='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        # First check whether the name of the copy already exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS, (new_dbname, ))
        # Do not replicate if the name already exists
        result = self.connecter.cursor.fetchone()
        if result:
            msg = Msg.DB_ALREADY_EXISTS.format(dbname=new_dbname)
            self.logger.stop_exe(msg)

        if new_dbname:
            self.new_dbname = new_dbname
        else:
            self.logger.stop_exe(Msg.NO_NEW_DBNAME)

        # First check whether the name of the source exists in PostgreSQL
        self.connecter.cursor.execute(Queries.PG_DB_EXISTS,
                                      (original_dbname, ))
        result = self.connecter.cursor.fetchone()
        if not result:
            msg = Msg.DB_DOES_NOT_EXIST.format(dbname=original_dbname)
            self.logger.stop_exe(msg)

        if original_dbname:
            self.original_dbname = original_dbname
        else:
            self.logger.stop_exe(Msg.NO_ORIGINAL_DBNAME)

        msg = Msg.REPLICATOR_VARS.format(server=self.connecter.server,
                                         user=self.connecter.user,
                                         port=self.connecter.port,
                                         original_dbname=self.original_dbname,
                                         new_dbname=self.new_dbname)
        self.logger.debug(Msg.REPLICATOR_VARS_INTRO)
        self.logger.debug(msg)
示例#51
0
    def get_filtered_dbnames(dbs_all, in_dbs=[], ex_dbs=[], in_regex='',
                             ex_regex='', in_priority=False, logger=None):
        '''
        Target:
            - filter a list of databases' names taking into account inclusion
              and exclusion parameters and their priority.
        Parameters:
            - dbs_all: list to filter.
            - in_dbs: list with the databases' names to include.
            - ex_dbs: list with the databases' names to exclude.
            - in_regex: regular expression which indicates the databases' names
              to include.
            - ex_regex: regular expression which indicates the databases' names
              to exclude.
            - in_priority: a flag which determinates if the inclusion
              parameters must predominate over the exclusion ones.
            - logger: a logger to show and log some messages.
        Return:
            - a filtered list (subset of "dbs_all").
        '''
        if not logger:
            logger = Logger()

        bkp_list = []

        if in_priority:  # If inclusion is over exclusion
            # Apply exclusion first and then inclusion
            bkp_list = DbSelector.dbname_filter_exclude(dbs_all, ex_dbs,
                                                        ex_regex, logger)
            bkp_list = DbSelector.dbname_filter_include(bkp_list, in_dbs,
                                                        in_regex, logger)
        else:
            # Apply inclusion first and then exclusion
            bkp_list = DbSelector.dbname_filter_include(dbs_all, in_dbs,
                                                        in_regex, logger)
            bkp_list = DbSelector.dbname_filter_exclude(bkp_list, ex_dbs,
                                                        ex_regex, logger)

        logger.highlight('info', Messenger.SEARCHING_SELECTED_DBS, 'white')

        if bkp_list == []:
            logger.highlight('warning', Messenger.EMPTY_DBNAME_LIST, 'yellow',
                             effect='bold')
        else:
            for dbname in bkp_list:
                logger.info(Messenger.SELECTED_DB.format(dbname=dbname))
        return bkp_list
示例#52
0
    def __init__(self, connecter=None, in_dbs=[], old_role='', new_role='',
                 logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(in_dbs, list):
            self.in_dbs = in_dbs
        else:
            self.in_dbs = Casting.str_to_list(in_dbs)

        if old_role:
            self.old_role = old_role
        else:
            self.logger.stop_exe(Msg.NO_OLD_ROLE)

        if not new_role:
            self.logger.stop_exe(Msg.NO_NEW_ROLE)
        # First check whether the user exists in PostgreSQL or not
        self.connecter.cursor.execute(Queries.PG_USER_EXISTS, (new_role, ))
        # Do not alter database if the user does not exist
        result = self.connecter.cursor.fetchone()
        if result:
            self.new_role = new_role
        else:
            msg = Msg.USER_DOES_NOT_EXIST.format(user=new_role)
            self.logger.stop_exe(msg)

        msg = Msg.ALTERER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, in_dbs=self.in_dbs,
            old_role=self.old_role, new_role=self.new_role)
        self.logger.debug(Msg.ALTERER_VARS_INTRO)
        self.logger.debug(msg)
示例#53
0
    def __init__(self, connecter=None, cluster_backup='', logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if cluster_backup and os.path.isfile(cluster_backup):
            self.cluster_backup = cluster_backup
        else:
            self.logger.stop_exe(Messenger.NO_BKP_TO_RESTORE)

        message = Messenger.CL_RESTORER_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, cluster_backup=self.cluster_backup)
        self.logger.debug(Messenger.CL_RESTORER_VARS_INTRO)
        self.logger.debug(message)
示例#54
0
    def __init__(self, connecter=None, dbnames=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Msg.NO_CONNECTION_PARAMS)

        if isinstance(dbnames, list):
            self.dbnames = dbnames
        else:
            self.dbnames = Casting.str_to_list(dbnames)

        msg = Msg.DROPPER_VARS.format(server=self.connecter.server,
                                      user=self.connecter.user,
                                      port=self.connecter.port,
                                      dbnames=self.dbnames)
        self.logger.debug(Msg.DROPPER_VARS_INTRO)
        self.logger.debug(msg)
示例#55
0
    def __init__(self, connecter, target_all=False, target_user='',
                 target_dbs=[], logger=None):

        if logger:
            self.logger = logger
        else:
            self.logger = Logger()

        if connecter:
            self.connecter = connecter
        else:
            self.logger.stop_exe(Messenger.NO_CONNECTION_PARAMS)

        if target_all is None:
            self.target_all = target_all
        elif isinstance(target_all, bool):
            self.target_all = target_all
        elif Checker.str_is_bool(target_all):
            self.target_all = Casting.str_to_bool(target_all)
        else:
            self.logger.stop_exe(Messenger.INVALID_TARGET_ALL)

        self.target_user = target_user

        if target_dbs is None:
            self.target_dbs = []
        elif isinstance(target_dbs, list):
            self.target_dbs = target_dbs
        else:
            self.target_dbs = Casting.str_to_list(target_dbs)

        message = Messenger.TERMINATOR_VARS.format(
            server=self.connecter.server, user=self.connecter.user,
            port=self.connecter.port, target_all=self.target_all,
            target_user=target_user, target_dbs=self.target_dbs)
        self.logger.debug(Messenger.TERMINATOR_VARS_INTRO)
        self.logger.debug(message)