def test_move_files(self): """Testing function move_files.""" # Initialize key variables source_filenames = {} target_filenames = {} ################################################# # Test with invalid source directory ################################################# invalid_path = ("/tmp/%s.%s") % (self.random_string, self.random_string) with self.assertRaises(SystemExit): testimport.move_files(invalid_path, "/tmp") ################################################# # Test with invalid destination directory ################################################# invalid_path = ("/tmp/%s.%s") % (self.random_string, self.random_string) with self.assertRaises(SystemExit): testimport.move_files("/tmp", invalid_path) ################################################# # Test with valid directory ################################################# # Create a source directory source_dir = ("/tmp/%s.1") % (self.random_string) if os.path.exists(source_dir) is False: os.makedirs(source_dir) # Create a target directory target_dir = ("/tmp/%s.2") % (self.random_string) if os.path.exists(target_dir) is False: os.makedirs(target_dir) # Place files in the directory for count in range(0, 4): filename = "".join([random.choice(string.ascii_letters + string.digits) for n in range(15)]) source_filenames[count] = ("%s/%s") % (source_dir, filename) target_filenames[count] = ("%s/%s") % (target_dir, filename) open(source_filenames[count], "a").close() # Check files in directory self.assertEqual(os.path.isfile(source_filenames[count]), True) # Delete files in directory testimport.move_files(source_dir, target_dir) # Check that files are not in source_dir for filename in source_filenames.values(): self.assertEqual(os.path.isfile(filename), False) # Check that files are in in target_dir for filename in target_filenames.values(): self.assertEqual(os.path.isfile(filename), True) # Delete directory shutil.rmtree(source_dir) # Delete directory shutil.rmtree(target_dir)
def make(config, verbose=False): """Process 'pagemaker' CLI option. Args: config: Configuration object verbose: Verbose output if True Returns: None """ # Initialize key variables threads_in_pool = 10 device_file_found = False # Create directory if needed perm_dir = config.web_directory() temp_dir = tempfile.mkdtemp() # Delete all files in temporary directory jm_general.delete_files(temp_dir) # Spawn a pool of threads, and pass them queue instance for _ in range(threads_in_pool): update_thread = PageMaker(THREAD_QUEUE) update_thread.daemon = True update_thread.start() # Get host data and write to file for host in config.hosts(): # Skip if device file not found if os.path.isfile(config.snmp_device_file(host)) is False: log_message = ( 'No YAML device file for host %s found in %s. ' 'Run toolbox.py with the "poll" option first.' '') % (host, config.snmp_directory()) log.log2quiet(1018, log_message) continue else: device_file_found = True #################################################################### # # Define variables that will be required for the database update # We have to initialize the dict during every loop to prevent # data corruption # #################################################################### data_dict = {} data_dict['host'] = host data_dict['config'] = config data_dict['verbose'] = verbose data_dict['temp_dir'] = temp_dir THREAD_QUEUE.put(data_dict) # Do the rest if device_file_found if device_file_found is True: # Wait on the queue until everything has been processed THREAD_QUEUE.join() # PYTHON BUG. Join can occur while threads are still shutting down. # This can create spurious "Exception in thread (most likely raised # during interpreter shutdown)" errors. # The "time.sleep(1)" adds a delay to make sure things really terminate # properly. This seems to be an issue on virtual machines in Dev only time.sleep(1) # Create index file write_file = ('%s/index.html') % (temp_dir) index_html = _index_html(config) with open(write_file, 'w') as file_handle: file_handle.write(index_html) # Cleanup, move temporary files to clean permanent directory. # Delete temporary directory if os.path.isdir(perm_dir): jm_general.delete_files(perm_dir) else: os.makedirs(perm_dir, 0o755) jm_general.move_files(temp_dir, perm_dir) # Clean up os.rmdir(temp_dir)
def snmp(config, verbose=False): """Process 'poll' CLI option. Args: config: Configuration object verbose: Verbose output if True Returns: None """ # Initialize key variables threads_in_pool = 10 # Create directory if needed perm_dir = config.snmp_directory() temp_dir = tempfile.mkdtemp() # Delete all files in temporary directory jm_general.delete_files(temp_dir) # Spawn a pool of threads, and pass them queue instance for _ in range(threads_in_pool): update_thread = PollAllSNMP(THREAD_QUEUE) update_thread.daemon = True update_thread.start() # Get host data and write to file for host in config.hosts(): #################################################################### # # Define variables that will be required for the database update # We have to initialize the dict during every loop to prevent # data corruption # #################################################################### data_dict = {} data_dict['host'] = host data_dict['config'] = config data_dict['verbose'] = verbose data_dict['temp_dir'] = temp_dir THREAD_QUEUE.put(data_dict) # Wait on the queue until everything has been processed THREAD_QUEUE.join() # PYTHON BUG. Join can occur while threads are still shutting down. # This can create spurious "Exception in thread (most likely raised # during interpreter shutdown)" errors. # The "time.sleep(1)" adds a delay to make sure things really terminate # properly. This seems to be an issue on virtual machines in Dev only time.sleep(1) # Cleanup, move temporary files to clean permanent directory. # Delete temporary directory if os.path.isdir(perm_dir): jm_general.delete_files(perm_dir) else: os.makedirs(perm_dir, 0o755) jm_general.move_files(temp_dir, perm_dir) os.rmdir(temp_dir)
def make(config, verbose=False): """Process 'pagemaker' CLI option. Args: config: Configuration object verbose: Verbose output if True Returns: None """ # Initialize key variables threads_in_pool = 10 device_file_found = False # Create directory if needed perm_dir = config.web_directory() temp_dir = tempfile.mkdtemp() # Delete all files in temporary directory jm_general.delete_files(temp_dir) # Spawn a pool of threads, and pass them queue instance for _ in range(threads_in_pool): update_thread = PageMaker(THREAD_QUEUE) update_thread.daemon = True update_thread.start() # Get host data and write to file for host in config.hosts(): # Skip if device file not found if os.path.isfile(config.snmp_device_file(host)) is False: log_message = ('No YAML device file for host %s found in %s. ' 'Run toolbox.py with the "poll" option first.' '') % (host, config.snmp_directory()) jm_general.logit(1018, log_message, False) continue else: device_file_found = True #################################################################### # # Define variables that will be required for the database update # We have to initialize the dict during every loop to prevent # data corruption # #################################################################### data_dict = {} data_dict['host'] = host data_dict['config'] = config data_dict['verbose'] = verbose data_dict['temp_dir'] = temp_dir THREAD_QUEUE.put(data_dict) # Do the rest if device_file_found if device_file_found is True: # Wait on the queue until everything has been processed THREAD_QUEUE.join() # PYTHON BUG. Join can occur while threads are still shutting down. # This can create spurious "Exception in thread (most likely raised # during interpreter shutdown)" errors. # The "time.sleep(1)" adds a delay to make sure things really terminate # properly. This seems to be an issue on virtual machines in Dev only time.sleep(1) # Create index file write_file = ('%s/index.html') % (temp_dir) index_html = _index_html(config) with open(write_file, 'w') as file_handle: file_handle.write(index_html) # Cleanup, move temporary files to clean permanent directory. # Delete temporary directory if os.path.isdir(perm_dir): jm_general.delete_files(perm_dir) else: os.makedirs(perm_dir, 0o755) jm_general.move_files(temp_dir, perm_dir) # Clean up os.rmdir(temp_dir)
def test_move_files(self): """Testing function move_files.""" # Initialize key variables source_filenames = {} target_filenames = {} ################################################# # Test with invalid source directory ################################################# invalid_path = ('/tmp/%s.%s') % (self.random_string, self.random_string) with self.assertRaises(SystemExit): testimport.move_files(invalid_path, '/tmp') ################################################# # Test with invalid destination directory ################################################# invalid_path = ('/tmp/%s.%s') % (self.random_string, self.random_string) with self.assertRaises(SystemExit): testimport.move_files('/tmp', invalid_path) ################################################# # Test with valid directory ################################################# # Create a source directory source_dir = ('/tmp/%s.1') % (self.random_string) if os.path.exists(source_dir) is False: os.makedirs(source_dir) # Create a target directory target_dir = ('/tmp/%s.2') % (self.random_string) if os.path.exists(target_dir) is False: os.makedirs(target_dir) # Place files in the directory for count in range(0, 4): filename = ''.join([ random.choice(string.ascii_letters + string.digits) for n in range(15) ]) source_filenames[count] = ('%s/%s') % (source_dir, filename) target_filenames[count] = ('%s/%s') % (target_dir, filename) open(source_filenames[count], 'a').close() # Check files in directory self.assertEqual(os.path.isfile(source_filenames[count]), True) # Delete files in directory testimport.move_files(source_dir, target_dir) # Check that files are not in source_dir for filename in source_filenames.values(): self.assertEqual(os.path.isfile(filename), False) # Check that files are in in target_dir for filename in target_filenames.values(): self.assertEqual(os.path.isfile(filename), True) # Delete directory shutil.rmtree(source_dir) # Delete directory shutil.rmtree(target_dir)