def __write_log_to_redis_and_logging_framework(self,
                                                event,
                                                write_to_redis_event_summary=False):
     logging_to_console_and_syslog(event)
     BriefCamParser.redis_instance.write_an_event_in_redis_db(event)
     if write_to_redis_event_summary:
         BriefCamParser.redis_instance.write_an_event_in_redis_db(event)
Exemple #2
0
 def create_local_consumer2(self):
     c = None
     redis_instance = RedisInterface(threading.current_thread().getName())
     conf = {
         'bootstrap.servers': "localhost:9092",
         'group.id': "video-file-name",
         'session.timeout.ms': 6000,
         'auto.offset.reset': 'earliest'
     }
     while redis_instance.get_current_enqueue_count() != \
             redis_instance.get_current_dequeue_count():
         if not c:
             c = Consumer(conf)
             c.subscribe(["video-file-name"], on_assign=print_assignment)
         msg = c.poll(timeout=1.0)
         if msg is None or msg.error():
             continue
         else:
             logging_to_console_and_syslog(
                 '%% %s [%d] at offset %d with key %s:\n' %
                 (msg.topic(), msg.partition(), msg.offset(), str(
                     msg.key())))
             logging_to_console_and_syslog("msg.value()={}".format(
                 msg.value()))
             redis_instance.increment_dequeue_count()
             c.close()
             c = None
             time.sleep(5)
    def process_job(self, file_name):
        logging_to_console_and_syslog("**********Processing Briefcam Job {} ***********"
                                      .format(file_name))
        job_done = False
        while not job_done:
            try:
                if not self.browser_alive():
                    self.__write_log_to_redis_and_logging_framework(
                        "Browser is not alive. Reopening the browser.", True)
                    self.clean_up()

                self.__search_and_leftclick_case(file_name)

                if not self.__add_video("{}/{}".format(self.video_file_path,
                                                       file_name)):
                    self.__write_log_to_redis_and_logging_framework(
                        "Add video failed. Closing and reopening the browser.", True)
                    self.clean_up()
                    continue
                self.__make_sure_video_is_added_successfully()
                self.__delete_video_clip_from_shared_volume(file_name)
                self.go_to_main_screen()

                job_done = True
            except BriefCamServerException:
                self.clean_up()
Exemple #4
0
    def __init__(self,
                 docker_tag=None,
                 image_name=None,
                 dockerfile_directory_name=None):

        if docker_tag:
            self.docker_tag = docker_tag
        else:
            self.docker_tag = "ssriram1978"

        if image_name:
            self.image_name = image_name
        else:
            self.image_name = "unittest"

        self.docker_image_name = "{}/{}:latest".format(self.docker_tag, self.image_name)

        if dockerfile_directory_name:
            self.dirname = dockerfile_directory_name
        else:
            logging_to_console_and_syslog("Cannot proceed without knowing the path to dockerfile.")
            raise BaseException

        self.docker_instance = docker.from_env()
        self.container = None
Exemple #5
0
 def __init__(self):
     logging_to_console_and_syslog("Initializing RTSPDockerOrchestrator instance.")
     self.image_name = None
     self.environment = None
     self.bind_mount = None
     self.read_environment_variables()
     self.docker_instance = docker.from_env()
    def tearDown(self):
        for index in range(self.max_consumer_threads):
            self.consumer_threads[index].do_run = False
        time.sleep(5)
        self.delete_test_docker_container()
        for index in range(self.max_consumer_threads):
            logging_to_console_and_syslog("Trying to join thread {}."
                                          .format(self.consumer_threads[index].getName()))
            self.consumer_threads[index].join(1.0)
            if self.consumer_threads[index].is_alive():
                try:
                    logging_to_console_and_syslog("Trying to __stop thread {}."
                                                  .format(self.consumer_threads[index].getName()))
                    self.consumer_threads[index]._stop()

                except:
                    logging_to_console_and_syslog("Caught an exception while stopping thread {}"
                                                  .format(self.consumer_threads[index].getName()))
                    docker_image_name = self.dirname.split('/')[-2]
                    logging_to_console_and_syslog("Trying to stop docker image{}."
                                                  .format(docker_image_name))
                    try:
                        docker_api_interface_instance = DockerAPIInterface(image_name=docker_image_name,
                                                                           dockerfile_directory_name=self.dirname)
                        docker_api_interface_instance.stop_docker_container_by_name()
                    except:
                        logging_to_console_and_syslog("Caught an exception while stopping {}"
                                                      .format(docker_image_name))
                        print("Exception in user code:")
                        print("-" * 60)
                        traceback.print_exc(file=sys.stdout)
                        print("-" * 60)
    def process_file_name(self, filename):
        """
        Use the filename as the key in the dictionary video_file_name_size to fetch the inner dictionary value.
        Create a new dictionary value if the key is not found.
        Update the existing dictionary value if a key/value already exists in the dictionary video_file_name_size.
        :param filename:
        :return status:
        """
        status = False

        if not filename or not type(filename) == str:
            return status

        if filename.endswith('.mp4'):
            filesize = self.__getSize(filename)
            logging_to_console_and_syslog("Found a file name that ends with .mp4 {}, "
                                          "filesize={}"
                                          .format(filename, filesize))
            if filesize == 0:
                return status

            dict_obj = self.video_file_name_size[filename]
            if not dict_obj:
                # No dictionary object found for key filename.
                # Create a new dictionary and set it as the value in dict video_file_name_size.
                status = self.create_new_dict_obj(filename, filesize)
            else:
                status = self.update_existing_dict_obj(filename, filesize)

        return status
 def read_environment_variables(self):
     """
     This method is used to read the environment variables
     defined in the OS.
     :return:
     """
     while self.redis_log_keyname is None or \
             self.total_job_dequeued_count_redis_name is None:
         time.sleep(2)
         logging_to_console_and_syslog("RedisInterface:{} "
                                       "Trying to read the "
                                       "environment variables...".format(
                                           self.thread_identifer))
         self.redis_log_keyname = os.getenv("redis_log_keyname_key",
                                            default=None)
         self.total_job_enqueued_count_redis_name = os.getenv(
             "total_job_enqueued_count_redis_name_key",
             default="total_job_enqueued_count")
         self.total_job_dequeued_count_redis_name = os.getenv(
             "total_job_dequeued_count_redis_name_key", default=None)
     logging_to_console_and_syslog(
         "RedisInterface:{} "
         "redis_log_keyname={}, "
         "total_job_enqueued_count_redis_name={}, "
         "total_job_dequeued_count_redis_name={}. ".format(
             self.thread_identifer, self.redis_log_keyname,
             self.total_job_enqueued_count_redis_name,
             self.total_job_dequeued_count_redis_name))
 def validate_master_db_entries(self):
     """
     This is a book keeping function.
     This function does the following:
     1. It walks through each and every document in the couch DB and gets the document Identifier.
     2. For each document identifier, it checks if there is a corresponding container identifier in the database.
         a. For each docker container identifier in the database, it checks if there is a corresponding
            docker container that is alive.
             1. If the docker container is alive, then it just logs a message.
             2. If the docker container is not alive, then it deletes the docker identifier from database and
             start a new docker container with the passed in data as the container argument.
     """
     for key, data in self.couchdb_instance.yield_database_handle_entries():
         logging_to_console_and_syslog(
             "Found a key {} and data {} in the briefcam database.".format(
                 key, data))
         container_id_dict = self.couchdb_instance.fetch_data_from_id_to_container_entry(
             key)
         if container_id_dict:
             container_id = container_id_dict[key]
             logging_to_console_and_syslog(
                 "There is a valid container {} "
                 "for this id {} in id to container db.".format(
                     container_id, key))
             self.validate_and_restart_container_if_needed(
                 key, data, container_id)
         else:
             self.create_a_new_container_and_assign_a_job(key, data)
    def __init__(self,
                 docker_tag=None,
                 image_name=None,
                 dockerfile_path=None):
        if not dockerfile_path:
            logging_to_console_and_syslog("You need to specify the path to Dockerfile.")
            raise BaseException

        if image_name:
            self.image_name = image_name
        else:
            self.image_name = dockerfile_path.split('/')[-1]

        self.dirname = dockerfile_path
        self.docker_tag = docker_tag
        logging_to_console_and_syslog("docker_tag={},"
                                      "image_name={},"
                                      "dirname={}"
                                      .format(docker_tag,
                                              self.image_name,
                                              self.dirname))
        super().__init__(docker_tag,
                         self.image_name,
                         self.dirname)
        self.docker_image = None
 def create_producer_and_produce_jobs(self, msgq_type):
     self.producer_instance = ProducerConsumerAPI(
         is_producer=True,
         thread_identifier="Producer",
         type_of_messaging_queue=msgq_type)
     logging_to_console_and_syslog("Posting messages.")
     self.assertTrue(self.post_messages())
Exemple #12
0
    def __consumer_connect_to_broker(self):
        """
        This method tries to connect to the kafka broker.
        :return:
        """
        is_connected = False
        if self.consumer_instance is None:
            try:

                logging_to_console_and_syslog(
                    "Consumer:{}:Trying to connect to broker_name={}".format(
                        self.thread_identifier, self.broker_name))

                self.consumer_instance = KafkaConsumer(
                    bootstrap_servers=self.broker_name,
                    group_id="kafka-consumer")
                is_connected = True
            except:
                logging_to_console_and_syslog(
                    "Consumer:{}:Exception in user code:".format(
                        self.thread_identifier))
                logging_to_console_and_syslog("-" * 60)
                traceback.print_exc(file=sys.stdout)
                logging_to_console_and_syslog("-" * 60)
                time.sleep(5)

        logging_to_console_and_syslog("Consumer:{}:Consumer Successfully "
                                      "connected to broker_name={}".format(
                                          self.thread_identifier,
                                          self.broker_name))
        return is_connected
    def create_test_delete_test(self, parser_type, docker_compose_yml_file):
        self.create_docker_container(docker_compose_yml_file)
        time.sleep(10)
        os.environ["data_parser_type_key"] = parser_type

        if parser_type is DataParserInterface.BriefCam:
            os.environ["case_name_key"] = "MEC-POC"
            os.environ["case_url_key"] = "http://mec-demo/synopsis/"
            os.environ["browser_name_key"] = "/opt/google/chrome/chrome"
            os.environ["browser_loc_key"] = "google-chrome"
            os.environ["login_username_key"] = "Brief"
            os.environ["login_password_key"] = "Cam"
            os.environ["image_directory"] = "image_cont"
            os.environ["max_retry_attempts_key"] = "8"
            os.environ["sleep_time_key"] = "1"
            os.environ["time_between_input_character_key"] = "0.1"
            os.environ["time_for_browser_to_open_key"] = "60"
            os.environ["total_job_done_count_redis_name_key"] = "total_job_done_count"
            os.environ["video_file_path_key"] = self.dirname

        self.data_parser_instance = DataParserInterface()
        logging_to_console_and_syslog("Validating Data Parser Instance to be not null.")
        self.assertIsNotNone(self.data_parser_instance)
        self.assertTrue(self.data_parser_instance.process_job(self.filename))
        self.delete_docker_container(docker_compose_yml_file)
Exemple #14
0
    def build(self, dockerfile_path):
        logging_to_console_and_syslog("Building docker image"
                                      " found in directory {}."
                                      .format(dockerfile_path))

        self.docker_instance = DockerBuildUTPublish(dockerfile_path=dockerfile_path)
        self.docker_instance.create_docker_container()
 def get_current_dequeue_count(self):
     count = self.redis_instance.read_key_value_from_redis_db(
         self.total_job_dequeued_count_redis_name)
     logging_to_console_and_syslog(
         "RedisInterface:{}."
         "{}={}".format(self.thread_identifer,
                        self.total_job_dequeued_count_redis_name, count))
     return count
 def __init__(self, thread_identifer=None):
     logging_to_console_and_syslog("Instantiating RedisInterface.")
     self.total_job_enqueued_count_redis_name = None
     self.total_job_dequeued_count_redis_name = None
     self.redis_log_keyname = None
     self.thread_identifer = thread_identifer
     self.read_environment_variables()
     self.redis_instance = RedisClient()
 def write_an_event_in_redis_db(self, event):
     logging_to_console_and_syslog("RedisInterface:{}."
                                   "Writing at key={}"
                                   " event={}".format(
                                       self.thread_identifer,
                                       self.redis_log_keyname, event))
     self.redis_instance.write_an_event_on_redis_db(event,
                                                    self.redis_log_keyname)
Exemple #18
0
 def load_environment_variables(self):
     while self.data_parser_type is None:
         time.sleep(1)
         self.data_parser_type = os.getenv("data_parser_type_key",
                                           default=None)
     logging_to_console_and_syslog(
         ("DataParserInterface: data_parser_type={}".format(
             self.data_parser_type)))
 def __init__(self):
     logging_to_console_and_syslog(
         "**********Initializing PyTorch Parser ***********")
     self.hostname = os.popen("cat /etc/hostname").read()
     self.cont_id = os.popen(
         "cat /proc/self/cgroup | head -n 1 | cut -d '/' -f3").read()
     PyTorchParser.redis_instance = RedisInterface("BriefCam+{}".format(
         self.cont_id))
 def increment_dequeue_count(self):
     logging_to_console_and_syslog(
         "RedisInterface:{}."
         "Incrementing total_job_dequeued_count={}".format(
             self.thread_identifer,
             self.total_job_dequeued_count_redis_name))
     self.redis_instance.increment_key_in_redis_db(
         self.total_job_dequeued_count_redis_name)
Exemple #21
0
 def deploy(self):
     dirname = '/'.join(os.path.dirname(os.path.realpath(__file__)).split('/')[:-1])
     deployment_file = dirname + "/" + DockerBuildUTDeploy.deployment_file
     logging_to_console_and_syslog("Deploying {}."
                                   .format(deployment_file))
     docker_instance = DockerBuildUTPublish(dockerfile_path='/')
     docker_instance.deploy(deployment_file,
                                 DockerBuildUTDeploy.stack_name)
Exemple #22
0
 def load_environment_variables(self):
     while self.video_file_path is None or \
           self.producer_consumer_type is None:
         time.sleep(1)
         self.video_file_path = os.getenv("video_file_path_key", default=None)
         self.producer_consumer_type = os.getenv("producer_consumer_queue_type_key", default=None)
     logging_to_console_and_syslog(("video_file_path={}".format(self.video_file_path)))
     logging_to_console_and_syslog(("producer_consumer_type={}".format(self.producer_consumer_type)))
Exemple #23
0
 def delivery_callback(err, msg):
     if err:
         logging_to_console_and_syslog('%% Message failed delivery: %s\n' %
                                       err)
     else:
         logging_to_console_and_syslog(
             '%% Message delivered to %s [%d] @ %s\n' %
             (msg.topic(), msg.partition(), str(msg.offset())))
 def __init__(self):
     event = "BriefCam Server click timeout occurred."
     logging_to_console_and_syslog(event)
     BriefCamParser.redis_instance.write_an_event_on_redis_db(event)
     BriefCamParser.redis_instance.write_an_event_on_redis_db(event)
     pyautogui.press('esc')
     pyautogui.press('esc')
     raise BriefCamServerException
 def __tear_down_docker_stack(self):
     completedProcess = subprocess.run(["docker",
                                        "stack",
                                        "rm",
                                        TestDockerService.service_name],
                                       stdout=subprocess.PIPE)
     self.assertIsNotNone(completedProcess)
     logging_to_console_and_syslog(completedProcess.stdout.decode('utf8'))
    def create_rows_in_master_db(self):
        logging_to_console_and_syslog("Unit testing function add_entry_into_master_database()")
        self.document_id1 = self.couchdb_instance.add_entry_into_master_database(self.document1)
        self.assertIsNotNone(self.document_id1)

        logging_to_console_and_syslog("Unit testing function add_entry_into_master_database()")
        self.document_id2 = self.couchdb_instance.add_entry_into_master_database(self.document2)
        self.assertIsNotNone(self.document_id2)
 def test_run(self):
     logging_to_console_and_syslog("Validating couchdb_instance to be not null.")
     self.assertIsNotNone(self.couchdb_instance)
     self.create_rows_in_master_db()
     self.create_rows_in_id_to_container_db()
     self.validate_data_from_master_db()
     self.fetch_rows_from_master_db()
     self.fetch_rows_from_id_to_container_db()
Exemple #28
0
 def __create_docker_stack(self):
     completedProcess = subprocess.run([
         "docker", "stack", "deploy", "-c", "docker-compose.yml",
         TestAutoScaler.service_name
     ],
                                       stdout=subprocess.PIPE)
     self.assertIsNotNone(completedProcess)
     logging_to_console_and_syslog(completedProcess.stdout.decode('utf8'))
     time.sleep(30)
    def create_docker_image(self):
        logging_to_console_and_syslog("Creating DockerAPIInterface instance()")
        self.docker_image = DockerAPIInterface(docker_tag=self.docker_tag,
                                               image_name=self.image_name,
                                               dockerfile_directory_name=self.dirname)

        logging_to_console_and_syslog("Invoking create_docker_container")

        self.docker_image.create_docker_container()
 def test_run(self):
     #logging_to_console_and_syslog("Validating **************** KAFKA MSGQ *****************.")
     #self.perform_enqueue_dequeue(ProducerConsumerAPI.kafkaMsgQType)
     #self.cleanup_test_environment()
     #logging_to_console_and_syslog("Validating **************** KAFKA MSGQ + SUBSCRIPTION *****************.")
     self.perform_enqueue_dequeue(ProducerConsumerAPI.kafkaMsgQType,
                                  perform_subscription=True)
     logging_to_console_and_syslog(
         "Validating **************** RABBIT MSGQ  *****************.")