Ejemplo n.º 1
0
    def __init__(self, logfile, prefs, lock_file,
                 ignore_offset=0, first_time=0,
                 noemail=0, daemon=0):
        self.__denied_hosts = {}
        self.__prefs = prefs
        self.__lock_file = lock_file
        self.__first_time = first_time
        self.__noemail = noemail
        self.__report = Report(prefs.get("HOSTNAME_LOOKUP"), is_true(prefs['SYSLOG_REPORT']))
        self.__daemon = daemon
        self.__sync_server = prefs.get('SYNC_SERVER')
        self.__sync_upload = is_true(prefs.get("SYNC_UPLOAD"))
        self.__sync_download = is_true(prefs.get("SYNC_DOWNLOAD"))


        r = Restricted(prefs)
        self.__restricted = r.get_restricted()
        info("restricted: %s", self.__restricted)
        self.init_regex()
        
        try:
            self.file_tracker = FileTracker(self.__prefs.get('WORK_DIR'),
                                            logfile)
        except Exception, e:
            self.__lock_file.remove()
            die("Can't read: %s" % logfile, e)
 def __init__(self, path):
     self.path = path
     self._ros_connections = FileTracker(
         os.path.join(self.path, ConnectionsGenerator.CONNECTIONS_FILENAME))
     self._ros_connections.open()
class ConnectionsGenerator():
    CONNECTIONS_FILENAME = 'connections.json'

    def __init__(self, path):
        self.path = path
        self._ros_connections = FileTracker(
            os.path.join(self.path, ConnectionsGenerator.CONNECTIONS_FILENAME))
        self._ros_connections.open()

    def __del__(self):
        if self._ros_connections.opened:
            self._ros_connections.close()

    @property
    def path(self):
        return self._path

    @path.setter
    def path(self, value):
        self._path = ConnectionsGenerator._clean_path(value)

    ## Takes a path to a directory and cleans it up for use
    #  @param path The path to clean up
    @staticmethod
    def _clean_path(path):
        path = os.path.expanduser(path)
        return os.path.abspath(path)

    ## Add a connection to the internal FileTracker object
    #  @param bag_path The path of the bag file to use as the key
    #  @param connections_info The information to use as the value
    def add_connection(self, bag_path, connections_info):
        # Use final_bag_path as key in persisitant dictionary
        if not self._ros_connections.get(bag_path):
            self._ros_connections[bag_path] = {}

        self._ros_connections[bag_path].update(connections_info.topics)

    ## Update the ros connections information of a specific capture folder
    #  @param capture_folder The folder to update the connection information in
    def update_capture_ros_connections(self, capture_folder):
        clean_capture_folder = ConnectionsGenerator._clean_path(capture_folder)
        print('clean_capture_folder: {}'.format(clean_capture_folder))
        capture_files = glob.glob('{}/*.bag'.format(clean_capture_folder))
        if len(capture_files) <= 0:
            return

        pattern = '{}[/_]'.format(os.path.basename(clean_capture_folder))
        regex = re.compile(pattern, re.IGNORECASE)
        keys = map(
            lambda path: ConnectionsGenerator._get_file_key(path, regex),
            capture_files)

        if os.path.isfile(
                os.path.join(clean_capture_folder,
                             ConnectionsGenerator.CONNECTIONS_FILENAME)):
            with open(
                    os.path.join(clean_capture_folder,
                                 ConnectionsGenerator.CONNECTIONS_FILENAME),
                    'r') as connection_file:
                ros_connections = json.load(connection_file)
        else:
            ros_connections = {}

        for new_key, old_key in keys:
            if old_key in self._ros_connections.keys():
                ros_connections[new_key] = self._ros_connections[old_key]

        with open(
                os.path.join(clean_capture_folder,
                             ConnectionsGenerator.CONNECTIONS_FILENAME),
                'w') as connection_file:
            json.dump(ros_connections, connection_file)

    ## Remove the unecessary data from persistant storage of the FileTracker
    def clean_up_persistant(self, directory):
        bags = glob.glob('{}/*.bag'.format(directory))
        keys_copy = self._ros_connections.copy().keys()
        for key in keys_copy:
            if key not in bags:
                del self._ros_connections[key]

    @staticmethod
    def _get_file_key(path, regex):
        return (path, regex.sub('', path))
Ejemplo n.º 4
0
    def __init__(self, bag_path, shadow_size, test_event):

        ## @var test_event
        # Split Yoinker (bag deletion) will be disabled when this value @n
        # returns true
        self.test_event = test_event

        ## @var bag_path
        # The full path where bags are saved
        self.bag_path = bag_path

        # create /home/username/ddr_bags directory on startup if it does not @n
        # exist
        self.bag_path.mkdir(exist_ok=True)

        ## @var shadow_size
        # Retrieve the duration of a shadow_record in seconds
        self.shadow_size = shadow_size

        ## @var shadow_count
        # Keeps track of how many shadows have been called
        self.shadow_count = self.init_count("shadow")

        ## @var manual_count
        # Keeps track of how many manual records have been called
        self.manual_count = self.init_count("manual")

        ## @var manual_name
        # The name of the current manual recording
        self.manual_name = ""

        ## @var event_key
        # Keeps track of all bag files to be saved with the format of: @n
        # {event: [bag1, bag2, bag3]}
        self.event_key = FileTracker(os.path.abspath(
            os.path.join(os.path.dirname(__file__), "eventKey.json")))

        ## @var file_key
        # Keeps track of all bag files in the directory with the format of: @n
        # {filename: [[shadowList], manual_record(string), @n
        # test_event(bool), file_count(int)]}
        self.file_key = FileTracker(os.path.abspath(
            os.path.join(os.path.dirname(__file__), "fileKey.json")))

        self.event_key.open()

        self.file_key.open()

        for key in self.file_key.copy().keys():
            if key not in self.bag_path.glob('DDR_MAINBAGS*'):
                del self.file_key[key]

        ## @var file_list
        # A list of all files in the directory in the order they were created
        temp_list = [(key, value[3]) for key, value in self.file_key.items()]
        list.sort(temp_list, key=operator.itemgetter(1))
        self.file_list = [key for key, _ in temp_list]

        self.copy_queue = queue.Queue()
        for file in reversed(self.file_list):
            self.copy_queue.put(file)

        ## @var file_count
        # The number of files in the bag directory
        self.file_count = 0

        ## @var active_bag
        # Keep track of which bag files are currently active for saving purposes
        self.active_bag = ""

        ## @var manual_wait
        # Manual recordings that have been terminated are placed in this list @n
        # to ensure the next split gets saved to this recording
        self.manual_wait = []

        ## @var shadow_wait
        # Shadow recordings that have been triggered are placed in this list @n
        # to ensure the next split gets saved to this recording
        self.shadow_wait = []

        ## @var pub
        # The publisher
        self.pub = rospy.Publisher('/ddr/error', String, queue_size=10)

        ## @var generator_lock
        #  Locks generator for cuncurrent calls
        self.generator_lock = threading.Lock()
Ejemplo n.º 5
0
class SplitTracker(): # pylint: disable=too-many-instance-attributes

    ## The constructor for split_manager
    # @param bag_path the folder where bags are saved
    # @param shadow_size the number of splits needed for a shadow record
    def __init__(self, bag_path, shadow_size, test_event):

        ## @var test_event
        # Split Yoinker (bag deletion) will be disabled when this value @n
        # returns true
        self.test_event = test_event

        ## @var bag_path
        # The full path where bags are saved
        self.bag_path = bag_path

        # create /home/username/ddr_bags directory on startup if it does not @n
        # exist
        self.bag_path.mkdir(exist_ok=True)

        ## @var shadow_size
        # Retrieve the duration of a shadow_record in seconds
        self.shadow_size = shadow_size

        ## @var shadow_count
        # Keeps track of how many shadows have been called
        self.shadow_count = self.init_count("shadow")

        ## @var manual_count
        # Keeps track of how many manual records have been called
        self.manual_count = self.init_count("manual")

        ## @var manual_name
        # The name of the current manual recording
        self.manual_name = ""

        ## @var event_key
        # Keeps track of all bag files to be saved with the format of: @n
        # {event: [bag1, bag2, bag3]}
        self.event_key = FileTracker(os.path.abspath(
            os.path.join(os.path.dirname(__file__), "eventKey.json")))

        ## @var file_key
        # Keeps track of all bag files in the directory with the format of: @n
        # {filename: [[shadowList], manual_record(string), @n
        # test_event(bool), file_count(int)]}
        self.file_key = FileTracker(os.path.abspath(
            os.path.join(os.path.dirname(__file__), "fileKey.json")))

        self.event_key.open()

        self.file_key.open()

        for key in self.file_key.copy().keys():
            if key not in self.bag_path.glob('DDR_MAINBAGS*'):
                del self.file_key[key]

        ## @var file_list
        # A list of all files in the directory in the order they were created
        temp_list = [(key, value[3]) for key, value in self.file_key.items()]
        list.sort(temp_list, key=operator.itemgetter(1))
        self.file_list = [key for key, _ in temp_list]

        self.copy_queue = queue.Queue()
        for file in reversed(self.file_list):
            self.copy_queue.put(file)

        ## @var file_count
        # The number of files in the bag directory
        self.file_count = 0

        ## @var active_bag
        # Keep track of which bag files are currently active for saving purposes
        self.active_bag = ""

        ## @var manual_wait
        # Manual recordings that have been terminated are placed in this list @n
        # to ensure the next split gets saved to this recording
        self.manual_wait = []

        ## @var shadow_wait
        # Shadow recordings that have been triggered are placed in this list @n
        # to ensure the next split gets saved to this recording
        self.shadow_wait = []

        ## @var pub
        # The publisher
        self.pub = rospy.Publisher('/ddr/error', String, queue_size=10)

        ## @var generator_lock
        #  Locks generator for cuncurrent calls
        self.generator_lock = threading.Lock()


    def __del__(self):
        if self.event_key.opened:
            self.event_key.close()
        if self.file_key.opened:
            self.file_key.close()


    ## Determines if prior shadow and manual folders exist. @n
    # @return number of folders with folder_name
    def init_count(self, folder_name):
        count_list = [0]
        test = r'^(?P<name>' + folder_name + r')(_*)(?P<number>\d*)$'
        pattern = re.compile(test, re.IGNORECASE)
        for folder in self.bag_path.iterdir():
            if folder.is_dir():
                match = pattern.match(folder.name)
                if match:
                    if ''.join(match.groups()) == folder.name:
                        if match.group('number'):
                            count_list.append(int(match.group('number')))
                        else:
                            count_list.append(1)
        count_list.sort()
        return count_list[-1]


    ## Updates the file_key and event_key with a new shadow record
    def shadow_record(self):
        self.shadow_count += 1
        file_index = 0
        duration = 0
        shadow_name = "shadow" + str(self.shadow_count)
        for filename in reversed(self.file_list):
            duration += self.file_key[filename][4]
            if duration >= self.shadow_size:
                file_index = self.file_list.index(filename)
                break
        self.event_key.update({shadow_name : self.file_list[file_index:]})
        self.shadow_wait.append(shadow_name)
        for bag in self.file_list[file_index:]:
            self.file_key[bag][0].append(shadow_name)
            if self.file_key.opened:
                self.file_key.update_persistant()
        print(self.event_key)


    ## Updates the file_key and event_key with a new manual record
    # @param input_name The name of the manual recording
    def manual_record(self, input_name):
        if not self.manual_name:
            if input_name == "manual":
                self.manual_count += 1
                self.manual_name = input_name + str(self.manual_count)
            else:
                name_count = self.init_count(input_name)
                name_count += 1
                if name_count == 1:
                    self.manual_name = input_name
                else:
                    self.manual_name = input_name + "_" + str(name_count)
            self.event_key.update({self.manual_name: []})
        else:
            self.manual_wait.append(self.manual_name)
            self.manual_name = ""


    ## Uses the name of the bag file to determine what order they were @n
    # created in. Sorts the file_list based on this data
    def bag_sorter(self):
        bag_list = {}
        for bag in self.file_list:
            # grab the number before the first underscore
            num_name = int(bag.split("_")[0])
            if num_name not in bag_list:
                bag_list[num_name] = []
            # creates a dict w/ format {10: [10_idle_0.bag, 10_idle_1.bag...]}
            bag_list[num_name].append(bag)
        for key, bags in bag_list.items():
            sort_tool = []
            for bag in bags:
                name = bag[:-4] #strips .bag from filename
                if name.split("_")[-1].isdigit():
                    bag_num = name.split("_")[-1]
                else:
                    bag_num = 0
                sort_tool.append(int(bag_num))
            bag_list[key] = [x for _, x in sorted(zip(sort_tool, bags))]
        self.file_list = []
        for key in sorted(bag_list.keys()):
            self.file_list.extend(bag_list[key])


    ## Returns the duration of a given file within the bag_path
    # @param filename The name of the bag file
    # @return The duration of the provided bag
    def get_duration(self, filename):
        proc = subprocess.Popen("rosbag info " + str(self.bag_path) +
                                "/" + filename +
                                " | grep duration | awk '{ print $2 }' |" \
                                " sed 's/[(s)]//g'",
                                shell=True, stdout=subprocess.PIPE)
        output = proc.stdout.read()
        try:
            duration = float(output)
        except ValueError:
            duration = 0
            errmsg = "Unable to retreive the duration of " \
                "{}. Try running rosbag reindex".format(filename)
            self.pub.publish(errmsg)
        return duration


    ## Updates file_key and event_key if there is no current manual_record
    # @param filename The name of the bag file
    # @param duration The duration of the bag, filename
    def _gen_update_keys(self, filename, duration):
        self.file_key.update({filename : [self.shadow_wait, self.manual_wait,
                                          self.test_event, self.file_count,
                                          duration]})
        for event in self.manual_wait:
            self.event_key[event].append(filename)
            if self.event_key.opened:
                self.event_key.update_persistant()
        self.manual_wait = []


    ## Updates file_key and event_key if there is a current manual_record
    # @param filename The name of the bag file
    # @param duration The duration of the bag, filename
    def _gen_update_keys_manual(self, filename, duration):
        if self.manual_name not in self.event_key:
            self.event_key.update({self.manual_name: [filename]})
        else:
            self.event_key[self.manual_name].append(filename)
            if self.event_key.opened:
                self.event_key.update_persistant()
        self.file_key.update({filename : [self.shadow_wait, [self.manual_name],
                                          self.test_event, self.file_count,
                                          duration]})


    ## Updates event_key if there are items in the shadow_wait list
    # @param filename The name of the bag file
    def _gen_update_keys_shadow(self, filename):
        for event in self.shadow_wait:
            if event not in self.event_key:
                self.event_key.update({event : [filename]})
            else:
                self.event_key[event].append(filename)
                if self.event_key.opened:
                    self.event_key.update_persistant()


    ## Generates and updates the file_key and file_list based on new bags @n
    # in the bag directory
    def generator(self):
        start_time = time.time()
        print("generator started")
        temp_count = 0
        for file in self.bag_path.iterdir():
            if file.is_file() and ".bag" in file.name:
                filename = file.name
                with self.generator_lock:
                    if ("DDR_MAINBAGS" not in filename and
                            filename not in self.file_key):
                        duration = self.get_duration(filename)
                        self.file_count += 1
                        temp_count += 1
                        # If manual is false, it updates just the file_key
                        # If manual is true, it updates the event_key and the
                        # file_key and adds the manual event to the
                        # manual list in the file_key
                        if not self.manual_name:
                            self._gen_update_keys(filename, duration)
                        else:
                            self._gen_update_keys_manual(filename, duration)

                        self._gen_update_keys_shadow(filename)

                        self.shadow_wait = []
                        self.file_list.append(filename)
                        self.copy_queue.put(filename)

                    # Lets manager know that there is currently an active bag
                    # recording. Used strictly for error handling
                    else:
                        self.active_bag = filename


        # Sorts the list of bag files if more than one bag file was created
        # since the last time generator was ran or if this is the first time
        # running ddr
        if temp_count > 1:
            self.bag_sorter()
            for bag in self.file_list:
                self.file_key[bag][3] = self.file_list.index(bag)
            if self.file_key.opened:
                self.file_key.update_persistant()

        end_time = time.time()
        print(self.event_key)
        print("generator finished. Time: " + str(end_time - start_time))
Ejemplo n.º 6
0
	def execute_search(self, logfile, rules_list, rtrctv):

		file_tracker = FileTracker(logfile, rtrctv)
		last_offset = file_tracker.get_offset()
		tmp_offset = last_offset
		#print("LAST OFFSET -> {}".format(last_offset))

		# there are new entries in the logfile
		if last_offset is not None:
			print("Processing log file [{}] from offset [{}]".format(logfile, last_offset))

			fp = None

			try:
				if logfile.startswith('/'):
					fp = open(logfile, "r")
				else:
					relpath = os.path.dirname(os.path.realpath(__file__)) + '/' + logfile
					fp = open(relpath, "r")

				fp.seek(last_offset)
			except IOError:
				print("File pointer not obtained. ~Return")
				return
				pass

			# Play the rules
			for rule in rules_list:
				regexyolo = re.compile(rule.getRegex())
				#print(service.getName() + ": " + str(rule.getName()))
				threshold_count = rule.getThresholdCount() if rule.getThresholdCount() != None else self.prefs.getGeneralPref('THRESHOLDCOUNT')

				for line in fp:
					line = line.strip()
					r1 = regexyolo.search(line)
					if r1 is not None:
						#print "yes"
						try:
							ipaddr = r1.group(rule.getCriteriaToDistinguish())
							#print(ipaddr)
							# Check if detected event is not apriori enabled in HOSTS_ALLOW
							if checkIPenabled(ipaddr) == 1:
								continue
							
							dictx = rule.getIpXoccurDict()
							ipcnt = dictx.get(ipaddr, 0) # 0 is the default value if key does not exist
							#print("ipaddr {} / {}".format(ipaddr, ipcnt))items
							rule.updateIpXoccur(ipaddr, ipcnt + 1)
						except:
							pass

				if self.is_last(rule, rules_list) == False:
					print("rules_list has more, seeking back to last_offset")
					fp.seek(last_offset)

				tempAction = rule.getAction()
				tempAntiaction = rule.getAntiaction()
				for element, cnt in rule.getIpXoccurDict().items():
					# IP is in HOSTS_DENY or THRESHOLD value has been exceeded
					if checkIPenabled(element) == 0 or cnt >= int(threshold_count):
						#print("Threshold count -> {}".format(threshold_count))
						#print("OccurenceCount after this read -> {}".format(cnt))
						
						# Replace CRITERIA_TO_DISTINGUISH group placeholder in Regex
						if tempAction != None:
							replaced1 = re.sub(rule.getCriteriaToDistinguish(), str(element), tempAction)
							rule.setAction(replaced1)

							# check if such a tuple (rule,distingueur) is already in database
							# if it is, we do not want to apply ACTION and ANTIACTION again
							# nor we want to log this event to events table, only eventlog
							if self._prefs.getGeneralPref('DO_NOT_DUPLICATE').lower() == 'true':
								if self.db.checkDistingueurDetectedForRule(element, rule.getRulename()) == False:
									# Run the action, let it roll baby
									print("Executing ACTION -> {}".format(rule.getAction()))
									Utils.execute_action(rule.getAction())
								else:
									print("DO_NOT_DUPLICATE is ON -> skipping ACTION")
							else:
								# Run the action, let it roll baby
								print("Executing ACTION -> {}".format(rule.getAction()))
								Utils.execute_action(rule.getAction())

						if tempAntiaction != None:
							replaced2 = re.sub(rule.getCriteriaToDistinguish(), str(element), tempAntiaction)
							rule.setAntiaction(replaced2)

							#print(rule.getAntiaction())

						if self._prefs.getGeneralPref('DO_NOT_DUPLICATE').lower() == 'true':
							if self.db.checkDistingueurDetectedForRule(element, rule.getRulename()) == False:
								# Add event to DB
								if rule.getAntiaction() != None:
									self.db.addEvent(rule.getNameOfBelongService(), rule.getRulename(), element, time.asctime(), 
										rule.getJailtime() if rule.getJailtime() != None else self._prefs.getGeneralPref('JAILTIME'), rule.getAntiaction())
							else:
								print("DO_NOT_DUPLICATE is ON -> skipping DB store to events.")

						self.db.addEventlog(rule.getNameOfBelongService(), rule.getRulename(), element, time.asctime())

						# We imposed a sanction, now we reset the counter
						rule.updateIpXoccur(element, 0)


				print("{}: IP X OCCUR DICT -> {}".format(rule.getRulename(), rule.getIpXoccurDict()))


			last_offset = fp.tell()
			#print(last_offset)
			fp.close()

			if last_offset != tmp_offset:
				file_tracker.save_offset(last_offset)
				tmp_offset = last_offset
			else:
				print("Log file size has not changed. Nothing to do.")