def _rejectError(self, syscall, path, flags, error):
        """Print a warning that a syscall failed and invalidate the Event."""
        # Don't log failed syscalls, but inform the reader
        if debugEnabled():
            print("Info: system call %s(%s, %d) from Application %s:%d "
                  "failed with error %d, and will not be logged." %
                  (syscall, path, flags, self.actor.desktopid, self.actor.pid,
                   error),
                  file=sys.stderr)

        self.markInvalid()
示例#2
0
    def purgeFDReferences(self):
        """Remove FD references that weren't solved yet from the FileStore."""
        count = 0
        dels = set()
        delInodes = set()
        for name in self.nameStore:
            if name.startswith("@fdref"):
                dels.add(name)
                for f in self.nameStore.get(name):
                    delInodes.add(f.inode)

        for name in dels:
            del self.nameStore[name]

        for inode in delInodes:
            count += 1
            del self.inodeStore[inode]

        if debugEnabled():
            print("Info: purged %d unresolved file descriptor references." %
                  count)
    def simulateAllEvents(self):
        """Simulate all events to instantiate Files in the FileStore."""
        if not self._sorted:
            self.sort()

        fileStore = FileStore.get()
        fileFactory = FileFactory.get()

        # First, parse for Zeitgeist designation events in order to instantiate
        # the designation cache.
        if debugEnabled():
            print("Instantiating Zeitgeist acts of designation...")
        for event in self.store:
            if event.isInvalid():
                continue

            if event.getSource() == EventSource.zeitgeist:
                # The event grants 5 minutes of designation both ways.
                self.desigcache.addItem(event,
                                        start=event.time - 5*60*1000,
                                        duration=10*60*1000)
            # The current Event is an act of designation for future Events
            # related to the same Application and Files. Save it.
            elif event.getFileFlags() & EventFileFlags.designationcache:
                self.desigcache.addItem(event)

        if debugEnabled():
            print("Done. Starting simulation...")
        # Then, dispatch each event to the appropriate handler
        for event in self.store:
            if event.isInvalid():
                continue

            # Designation events are already processed.
            if event.getFileFlags() & EventFileFlags.designationcache:
                continue

            if debugEnabled():
                print("Simulating Event %s from %s at time %s." % (
                    event.evflags, event.actor.uid(), time2Str(event.time)))

            for data in event.data_app:
                if data[2] == FD_OPEN:
                    event.actor.openFD(data[0], data[1], event.time)
                elif data[2] == FD_CLOSE:
                    event.actor.closeFD(data[0], event.time)

            if event.getFileFlags() & EventFileFlags.destroy:
                res = self.simulateDestroy(event, fileFactory, fileStore)

            elif event.getFileFlags() & EventFileFlags.create:
                res = self.simulateCreate(event, fileFactory, fileStore)

            elif event.getFileFlags() & EventFileFlags.overwrite:
                res = self.simulateCreate(event, fileFactory, fileStore)

                # We received a list of files that were created
                if isinstance(res, list):
                    pass
                # We received instructions to hot-patch the event list
                else:
                    raise NotImplementedError  # TODO

            elif event.getFileFlags() & (EventFileFlags.read |
                                         EventFileFlags.write):
                self.simulateAccess(event, fileFactory, fileStore)

            # Keep me last, or use elif guards: I WILL change your event flags!
            elif event.getFileFlags() & EventFileFlags.move or \
                    event.getFileFlags() & EventFileFlags.copy:
                res = self.simulateCopy(event,
                                        fileFactory,
                                        fileStore,
                                        keepOld=event.getFileFlags() &
                                        EventFileFlags.copy)

        # Filter out invalid file descriptor references before computing stats.
        fileStore.purgeFDReferences()
    def simulateCopy(self,
                     event: Event,
                     fileFactory: FileFactory,
                     fileStore: FileStore,
                     keepOld: bool=True):
        """Simulate a file copy or move Event, based on :keepOld:."""
        newFiles = []
        ctype = 'copy' if keepOld else 'move'
        baseFlags = event.evflags

        def _delFile(event: Event, f, read: bool=False):
            event.evflags = (baseFlags |
                             EventFileFlags.write |
                             EventFileFlags.destroy)
            if read:
                event.evflags |= EventFileFlags.read
            res = self.desigcache.checkForDesignation(event, [f])
            fileFactory.deleteFile(f, event.actor, event.time, res[0][1])
            event.evflags = baseFlags

        def _addRead(event: Event, f):
            event.evflags = (baseFlags | EventFileFlags.read)
            res = self.desigcache.checkForDesignation(event, [f])
            f.addAccess(actor=event.actor,
                        flags=res[0][1],
                        time=event.time)
            event.evflags = baseFlags

        def _createCopy(event: Event, oldFile, newPath):
            # Create a file on the new path which is identical to the old File.
            event.evflags = (baseFlags |
                             EventFileFlags.write |
                             EventFileFlags.create |
                             EventFileFlags.overwrite)
            newFile = self.__doCreateFile(newPath,
                                          oldFile.ftype,
                                          event,
                                          fileFactory,
                                          fileStore)
            event.evflags = baseFlags

            # Update the files' links
            oldFile.addFollower(newFile.inode, event.time, ctype)
            newFile.setPredecessor(oldFile.inode, event.time, ctype)
            fileStore.updateFile(oldFile)
            fileStore.updateFile(newFile)

            return newFile

        # Get each file, set its starting time and type, and update the store
        subjects = list((old.path, new.path) for (old, new) in event.getData())
        for (old, new) in subjects:

            # Not legal. 'a' and 'a' are the same file.
            if old == new:
                # TODO DBG?
                continue

            # Get the old file. It must exist, or the simulation is invalid.
            oldFile = fileFactory.getFile(old, event.time)
            if not oldFile:
                raise ValueError("Attempting to move/copy from a file that "
                                 "does not exist: %s at time %d" % (
                                  old,
                                  event.time))

            if debugEnabled():
                print("Info: %s '%s' to '%s' at time %s, by actor %s." % (
                      ctype,
                      old,
                      new,
                      time2Str(event.time),
                      event.actor.uid()))

            # Check if the target is a directory, or a regular file. If it does
            # not exist, it's a regular file.
            newFile = fileFactory.getFileIfExists(new, event.time)
            newIsFolder = newFile and newFile.isFolder()

            # If the target is a directory, we will copy/move inside it.
            sourceIsFolder = oldFile.isFolder()
            targetPath = new if not newIsFolder else \
                new + "/" + oldFile.getFileName()
            # If mv/cp'ing a folder to an existing path, restrictions apply.
            if sourceIsFolder:
                # oldFile is a/, newFile is b/, targetFile is b/a/
                targetFile = fileFactory.getFileIfExists(targetPath,
                                                         event.time)
                targetIsFolder = targetFile and targetFile.isFolder()

                # cannot overwrite non-directory 'b' with directory 'a'
                # cannot overwrite non-directory 'b/a' with directory 'a'
                if targetFile and not targetIsFolder:
                    # TODO DBG?
                    continue

                # mv: cannot move 'a' to 'b/a': Directory not empty
                elif targetIsFolder and ctype == "move":
                    children = fileStore.getChildren(targetFile, event.time)
                    if len(children) == 0:
                        _delFile(event, targetFile)
                    else:
                        # TODO DBG?
                        continue

                # mv or cp would make the target directory here. Our code later
                # on in this function will create a copy of the old file, which
                # means the new folder will be made, with a creation access
                # from the actor that performs the copy event we are analysing.
                elif not targetFile and ctype == "copy":
                    pass

            # When the source is a file, just delete the new target path.
            else:
                targetFile = fileFactory.getFileIfExists(targetPath,
                                                         event.time)
                if targetFile:
                    _delFile(event, targetFile)

            # Collect the children of the source folder.
            children = fileStore.getChildren(oldFile, event.time) if \
                sourceIsFolder else []

            # Make the target file, and link the old and target files.
            _createCopy(event, oldFile, targetPath)
            if ctype == "move":
                _delFile(event, oldFile, read=True)
            else:
                _addRead(event, oldFile)

            # Move or copy the children.
            for child in children:
                childRelPath = child.path[len(oldFile.path)+1:]
                childTargetPath = targetPath + "/" + childRelPath
                childNewFile = None

                # Let the Python purists hang me for that. Iterators will catch
                # appended elements on a mutable list and this is easier to
                # read than other solutions that don't modify the list while it
                # is iterated over.
                subjects.append((child.path, childTargetPath))

            newFiles.append(targetFile)

        return newFiles
示例#5
0
    def loadDb(self,
               store: ApplicationStore = None,
               checkInitialised: bool = False):
        """Load the PreloadLogger database.

        Go through the directory and create all the relevant app instances and
        events. Can be made to insert all found apps into an ApplicationStore,
        or to exit if some Application instances are not properly initialised.
        """

        count = 0  # Counter of fetched files, for stats
        actors = set()  # Apps that logged anything at all
        empties = 0  # Matching files without content (logger crash)
        invalids = 0  # Files with corrupted content (logger crash)
        nosyscalls = []  # Logs with zero syscalls logged (not a bug)
        nosyscallactors = set()  # Apps that logged zero syscalls
        instanceCount = 0  # Count of distinct app instances in the dataset
        hasErrors = False  # Whether some uninitialised apps were found
        invalidApps = set()  # List of desktop IDs that could not be init'd
        eventCount = 0

        # List all log files that match the PreloadLogger syntax
        for file in os.listdir(self.path):
            # Ignore files that don't match
            if not PreloadLoggerLoader.pattern.match(file):
                continue

            count += 1

            # Process log files that match the PreloadLogger name pattern
            try:
                f = open(self.path + "/" + file, 'rb')
            except (IOError) as e:
                print("Error: could not open file %s: %s" % (file, str(e)),
                      file=sys.stderr)
            else:
                with f:
                    if os.fstat(f.fileno()).st_size == 0:
                        print("Info: file '%s' is empty. Skipping." % file)
                        continue

                    # Parse the first line to get the identity of the app,
                    # but sometimes the header ends up on the second line
                    # in some logs... So, parse until we find a match, and
                    # remember the line index of the header
                    idx = 0
                    headerLocation = 0
                    result = None
                    for binary in f:
                        try:
                            line = binary.decode('utf-8')
                        except (UnicodeDecodeError) as e:
                            print("Error: %s has a non utf-8 line: %s " %
                                  (file, str(e)),
                                  file=sys.stderr)
                            idx += 1
                            continue
                        result = PreloadLoggerLoader.header.match(line)
                        if result:
                            headerLocation = idx
                            break
                        idx += 1

                    # Files with a missing or corrupted header are invalid
                    if result is None:
                        print("%s is missing a header" % file, file=sys.stderr)
                        invalids += 1
                        continue

                    # Parse the header line, make sure it has the right length.
                    g = result.groups()
                    if (len(g) != 3):
                        print("%s has wrong group count: " % file,
                              result.group(),
                              file=sys.stderr)
                        invalids += 1
                        continue

                    # Filter interpreters, and rewrite them to get the identity
                    # of the app they launched instead.
                    items = space.split(g[2])
                    interpreterid = None

                    # Python
                    if (pyre.match(g[0])):
                        interpreterid = g[0]
                        g = self.parsePython(g, items)
                        # print("PYTHON APP: %s" % g[2])

                    # Bash
                    if (bashre.match(g[0])):
                        interpreterid = g[0]
                        g = self.parseBash(g, items)
                        # print("BASH APP: %s" % g[2])

                    # Java
                    if (javare.match(g[0])):
                        interpreterid = g[0]
                        g = self.parseJava(g, items)
                        # print("JAVA APP: %s" % g[2])
                    # Perl
                    if (perlre.match(g[0])):
                        interpreterid = g[0]
                        g = self.parsePerl(g, items)
                        # print("PERL APP: %s" % g[2])

                    # Mono
                    if (monore.match(g[0])):
                        interpreterid = g[0]
                        g = self.parseMono(g, items)
                        # print("MONO APP: %s" % g[2])

                    # PHP
                    if (phpre.match(g[0])):
                        interpreterid = g[0]
                        g = self.parsePHP(g, items)
                        # print("PHP APP: %s" % g[2])

                    # Get first and last event to calculate the timestamps.
                    tstart = float("inf")
                    tend = 0

                    skipCache = None
                    lineIdx = 0
                    f.seek(0, 0)
                    for binary in f:
                        # Ignore the header.
                        if lineIdx == headerLocation:
                            lineIdx += 1
                            skipCache = None
                            continue

                        # Decode line.
                        try:
                            line = binary.decode('utf-8')
                        except (UnicodeDecodeError) as e:
                            print("Error: %s has a non utf-8 line: %s " %
                                  (file, str(e)),
                                  file=sys.stderr)
                            lineIdx += 1
                            skipCache = None
                            continue

                        # Previous line did not end and was skipped, merge it.
                        if skipCache:
                            line = skipCache + line
                            skipCache = None

                        # Line continues...
                        if line.endswith('\\\n'):
                            skipCache = line
                            lineIdx += 1
                            continue

                        line = line.rstrip("\n").lstrip(" ")

                        # Line is a parameter to the last system call logged
                        if line.startswith(' '):
                            lineIdx += 1
                            continue

                        # Check that line is a syntactically valid system call
                        result = PreloadLoggerLoader.syscall.match(line)
                        if result is None:
                            if debugEnabled():
                                print("%s has a corrupted line (match): %s" %
                                      (file, line),
                                      file=sys.stderr)
                            lineIdx += 1
                            continue

                        # Update the timestamp (convert to ZG millisec format)
                        h = result.groups()
                        tstart = int(h[0]) * 1000
                        break

                    # TODO, first non-header line + tail code.
                    lastLine = tail(f)
                    result = None
                    if lastLine:
                        result = PreloadLoggerLoader.syscall.match(lastLine)

                    if result is None:
                        if debugEnabled():
                            print("%s's last line is corrupted: %s" %
                                  (file, lastLine),
                                  file=sys.stderr)
                    else:
                        # Update the timestamp (convert to ZG millisec format)
                        h = result.groups()
                        tend = int(h[0]) * 1000

                    # Check if the timestamps have been set
                    if tend == 0:
                        nosyscalls.append(g)
                        nosyscallactors.add(g[0])
                        continue

                    # Sometimes, short logs have event ordering problems... We
                    # can try to ignore these problems as all events are indi-
                    # vidually timestamped anyway.
                    if tstart > tend:
                        tend, start = tstart, tend

                    # TODO: process deletions and remove corresponding files

                    # Make the application
                    try:
                        app = Application(desktopid=g[0],
                                          pid=int(g[1]),
                                          tstart=tstart,
                                          tend=tend,
                                          interpreterid=interpreterid)
                        app.setCommandLine(g[2])
                    except (ValueError) as e:
                        print("MISSING: %s" % g[0], file=sys.stderr)
                        hasErrors = True
                        invalidApps.add(g[0])
                        continue

                    # Ignore study artefacts!
                    if app.isStudyApp():
                        continue

                    # Add command-line event
                    event = Event(actor=app, time=tstart, cmdlineStr=g[2])
                    app.addEvent(event)

                    # Add system call events
                    skipCache = None
                    lineIdx = 0
                    currentCall = None
                    prevTimestamp = 0
                    timeDelta = 0
                    f.seek(0, 0)
                    for binary in f:
                        # Ignore the header.
                        if lineIdx == headerLocation:
                            lineIdx += 1
                            skipCache = None
                            continue

                        # Decode line.
                        try:
                            line = binary.decode('utf-8')
                        except (UnicodeDecodeError) as e:
                            print("Error: %s has a non utf-8 line: %s " %
                                  (file, str(e)),
                                  file=sys.stderr)
                            lineIdx += 1
                            skipCache = None
                            continue

                        # Previous line did not end and was skipped, merge it.
                        if skipCache:
                            line = skipCache + line
                            skipCache = None

                        # Line continues...
                        if line.endswith('\\\n'):
                            skipCache = line
                            lineIdx += 1
                            continue

                        line = line[:-1]  # Remove ending "\n"

                        # Line is a parameter to the last system call logged
                        if line.startswith(' '):
                            if currentCall:
                                currentCall = (currentCall[0],
                                               currentCall[1] + '\n' + line)
                            elif debugEnabled():
                                print("%s has a corrupted line (no call): %s" %
                                      (file, line),
                                      file=sys.stderr)
                            lineIdx += 1
                            continue

                        # Check that line is a syntactically valid system call
                        result = PreloadLoggerLoader.syscall.match(line)
                        if result is None:
                            if debugEnabled():
                                print("%s has a corrupted line (match): %s" %
                                      (file, line),
                                      file=sys.stderr)
                            lineIdx += 1
                            continue

                        # Update the timestamp (convert to ZG millisec format)
                        h = result.groups()
                        timestamp = int(h[0]) * 1000

                        # Append the system call to our syscall list. Note that
                        # we do something odd with the timestamp: because PL
                        # only logs at second precision, a lot of system calls
                        # have the same timestamp, which causes the EventStore
                        # to sort them in the wrong order. So, every time we
                        # have a timestamp identical to the previous one, we
                        # increase a counter that sorts them. This works under
                        # the assumption that there are at most 1000 events per
                        # second.
                        if timestamp == prevTimestamp:
                            timeDelta += 1
                        else:
                            timeDelta = 0

                        # Process the last system call into an Event, and clear
                        # up the syscalls list to keep RAM free!
                        if currentCall:
                            event = Event(actor=app,
                                          time=currentCall[0],
                                          syscallStr=currentCall[1])
                            app.addEvent(event)
                            eventCount += 1

                        # Create the new syscalls list.
                        currentCall = (timestamp + timeDelta, h[1])
                        prevTimestamp = timestamp

                        lineIdx += 1

                    # Add the found process id to our list of actors, using the
                    # app identity that was resolved by the Application ctor
                    actors.add(app.desktopid)

                    if checkInitialised and not app.isInitialised():
                        print("MISSING: %s" % g[0], file=sys.stderr)
                        hasErrors = True

                    # Insert into the ApplicationStore if one is available
                    if store is not None:
                        store.insert(app)
                        instanceCount += 1

        if checkInitialised and hasErrors:
            if invalidApps:
                print("Invalid apps:", file=sys.stderr)
                for a in sorted(invalidApps):
                    print("\t%s" % a, file=sys.stderr)
            sys.exit(-1)

        # print("Apps that logged valid files:")
        # for act in sorted(actors):
        #     print(act)

        # print("\nApps that logged files without a single system call:")
        # for act in sorted(nosyscallactors):
        #     print(act)

        self.appCount = len(actors)
        self.instCount = count - empties - invalids - len(nosyscalls)
        self.eventCount = eventCount
        self.validEventRatio = 100 - 100 * (invalids + empties +
                                            len(nosyscalls)) / (count)

        print("Finished loading DB.\n%d files seen, %d valid from %d apps, "
              "%d empty files, "
              "%d logs with 0 syscalls from %d apps, "
              "%d invalid.\nIn "
              "total, %.02f%% files processed." %
              (count, self.instCount, self.appCount, empties, len(nosyscalls),
               len(nosyscallactors), invalids, self.validEventRatio))
        print("Instance count: %d" % instanceCount)
示例#6
0
    def loadDb(self, store: ApplicationStore = None):
        """Browse the SQLite db and create all the relevant app instances."""

        # Load up our events from the Zeitgeist database
        self.cur = self.con.cursor()
        self.cur.execute('SELECT * \
                          FROM event_view \
                          WHERE id IN (SELECT DISTINCT id \
                                       FROM event_view \
                                       WHERE subj_uri LIKE "activity://%")')

        # Merge all event subjects based on their event id, and find their pids
        eventsMerged = dict()
        data = self.cur.fetchone()
        while data:
            pid = 0
            if "pid://" in data[EV_SUBJ_URI]:
                m = re.search('(?<=pid://)\d+', data[EV_SUBJ_URI])
                pid = int(m.group(0)) if m else 0

            ev = eventsMerged.get(data[EV_ID])
            if not ev:
                ev = SqlEvent(id=data[EV_ID],
                              pid=pid,
                              timestamp=data[EV_TIMESTAMP],
                              interpretation=self.getInterpretation(
                                             data[EV_INTERPRETATION]),
                              manifestation=self.getManifestation(
                                             data[EV_MANIFESTATION]),
                              origin_uri=data[EV_EVENT_ORIGIN_URI],
                              actor_uri=data[EV_ACTOR_URI])
            elif pid and ev.pid:
                assert ev.pid == pid, ("Error: multiple events record a pid "
                                       " event %d, and they disagree on the "
                                       "pid to record (%d != %d)." % (
                                        data[EV_ID], ev.pid, pid))
            elif pid and not ev.pid:
                ev.pid = pid

            subj = SqlEventSubject(uri=data[EV_SUBJ_URI],
                                   interpretation=self.getInterpretation(
                                                 data[EV_SUBJ_INTERPRETATION]),
                                   manifestation=self.getManifestation(
                                                 data[EV_SUBJ_MANIFESTATION]),
                                   origin_uri=data[EV_SUBJ_ORIGIN_URI],
                                   mimetype=self.getMimeType(
                                            data[EV_SUBJ_MIMETYPE]),
                                   text=data[EV_SUBJ_TEXT],
                                   storage_uri=data[EV_SUBJ_STORAGE],
                                   current_uri=data[EV_SUBJ_CURRENT_URI])
            ev.addSubject(subj)
            eventsMerged[data[EV_ID]] = ev

            data = self.cur.fetchone()

        # Now, sort the events per app PID so we can build apps
        nopids = []            # Matching events without a PID
        eventsPerPid = dict()  # Storage for our events
        count = len(eventsMerged)  # Counter of fetched events, for stats
        instanceCount = 0      # Count of distinct app instances in the dataset
        actors = set()

        for event in eventsMerged.items():
            pid = event[1].pid
            if not pid:
                nopids.append(event[1])
            else:
                try:
                    eventsPerPid[pid].append(event[1])
                except KeyError as e:
                    eventsPerPid[pid] = [event[1]]
        del eventsMerged  # no longer needed

        # For each PID, we'll now identify the successive Application instances
        for (pkey, pevent) in eventsPerPid.items():
            pevent = sorted(pevent, key=lambda x: x.timestamp)
            currentId = ''     # currently matched Desktop Id
            currentApp = None  # currently matched Application
            apps = []          # temp storage for found Applications

            for ev in pevent:
                (evId, __) = Application.getDesktopIdFromDesktopUri(
                    ev.actor_uri)

                if evId != currentId:
                    if debugEnabled():
                        print ("New application:", evId, currentId, ev)
                    currentId = evId
                    currentApp = Application(desktopid=evId,
                                             pid=int(pkey),
                                             tstart=ev.timestamp,
                                             tend=ev.timestamp)
                    actors.add(currentApp.desktopid)
                    apps.append(currentApp)
                else:
                    currentApp.setTimeOfStart(min(ev.timestamp,
                                                  currentApp.getTimeOfStart()))

                    currentApp.setTimeOfEnd(max(ev.timestamp,
                                                currentApp.getTimeOfEnd()))
                # Ignore study artefacts!
                if not currentApp.isStudyApp():
                    event = Event(actor=currentApp,
                                  time=ev.timestamp,
                                  zgEvent=ev)
                    currentApp.addEvent(event)

            # Insert into the ApplicationStore if one was given to us
            instanceCount += len(apps)
            if store is not None:
                for app in apps:
                    # Ignore study artefacts!
                    if not app.isStudyApp():
                        store.insert(app)
                    else:
                        instanceCount -= 1  # We discount this app instance


        self.appCount = len(actors)
        self.instCount = instanceCount
        self.eventCount = count
        self.validEventRatio = 100-100*len(nopids) / count

        print("Finished loading DB.\n%d events seen, %d normal, %d without a "
              "PID.\nIn total, %.02f%% events accepted." % (
               count,
               count-len(nopids),
               len(nopids),
               self.validEventRatio))
        print("Instance count: %d" % self.instCount)
    def _runAttackRound(self, attack: Attack, policy: Policy, acListInst: dict,
                        lookUps: dict, allowedCache: dict):
        """Run an attack round with a set source and time."""
        fileStore = FileStore.get()
        appStore = ApplicationStore.get()
        userConf = UserConfigLoader.get()
        userHome = userConf.getHomeDir()

        seen = set()  # Already seen targets.
        spreadTimes = dict()  # Times from which the attack can spread.

        toSpread = deque()
        toSpread.append(attack.source)
        spreadTimes[attack.source] = attack.time

        # Statistics counters.
        appSet = set()
        userAppSet = set()
        fileCount = 0
        docCount = 0

        if debugEnabled():
            tprnt("Launching attack on %s at time %s %s app memory." %
                  (attack.source if isinstance(attack.source, File) else
                   attack.source.uid(), time2Str(attack.time),
                   "with" if attack.appMemory else "without"))

        def _allowed(policy, f, acc):
            k = (policy, f, acc)
            if k not in allowedCache:
                v = (policy.fileOwnedByApp(f, acc)
                     or policy.allowedByPolicy(f, acc.actor)
                     or policy.accessAllowedByPolicy(f, acc))
                allowedCache[k] = v
                return v
            else:
                return allowedCache[k]

        # As long as there are reachable targets, loop.
        while toSpread:
            current = toSpread.popleft()
            currentTime = spreadTimes[current]

            # When the attack spreads to a File.
            if isinstance(current, File):
                fileCount += 1
                if current.isUserDocument(userHome):
                    docCount += 1
                if debugEnabled():
                    tprnt("File added @%d: %s" % (currentTime, current))

                # Add followers.
                for f in current.follow:
                    if f.time > currentTime:
                        follower = fileStore.getFile(f.inode)
                        if follower not in seen:
                            toSpread.append(follower)
                            seen.add(follower)
                            spreadTimes[follower] = f.time

                # Add future accesses.
                for acc in current.accesses:
                    if acc.time > currentTime and \
                            acc.actor.desktopid not in appSet and \
                            _allowed(policy, current, acc):
                        toSpread.append(acc.actor)
                        spreadTimes[acc.actor] = acc.time

            # When the attack spreads to an app instance.
            elif isinstance(current, Application):
                if debugEnabled():
                    tprnt("App added @%d: %s" % (currentTime, current.uid()))

                # Add files accessed by the app.
                for (accFile, acc) in acListInst.get(current.uid()) or []:
                    if acc.time > currentTime and \
                            accFile not in seen and \
                            _allowed(policy, accFile, acc):
                        toSpread.append(accFile)
                        seen.add(accFile)
                        spreadTimes[accFile] = acc.time

                # Add future versions of the app.
                if attack.appMemory and current.desktopid not in appSet:
                    for app in appStore.lookupDesktopId(current.desktopid):
                        if app.tstart > currentTime:
                            toSpread.append(app)
                            spreadTimes[app] = app.tstart

                # We do this last to use appSet as a cache for already seen
                # apps, so we append all future instances once and for all to
                # the spread list.
                appSet.add(current.desktopid)
                if current.isUserlandApp():
                    userAppSet.add(current.desktopid)

            else:
                print("Error: attack simulator attempting to parse an unknown"
                      " object (%s)" % type(current),
                      file=sys.stderr)

        return (appSet, userAppSet, fileCount, docCount)
def main(argv):
    __opt_inode_query = None
    __opt_post_analysis = None
    __opt_quick_pol = None

    # Parse command-line parameters
    try:
        (opts, args) = getopt.getopt(argv, "hta:cedf:o:q:sk:rpgGi:u:x", [
            "help", "attacks", "post-analysis=", "check-missing",
            "check-excluded-files", "debug", "frequency", "inode",
            "extensions", "related-files", "output=", "output-fs=", "score",
            "quick-pol=", "skip=", "user", "clusters", "print-clusters",
            "graph", "graph-clusters", "disable-plotting"
        ])
    except (getopt.GetoptError):
        print(USAGE_STRING)
        sys.exit(2)
    else:
        for opt, arg in opts:
            if opt in ('-h', '--help'):
                print(USAGE_STRING + "\n\n\n\n")

                print("--attacks:\n\tSimulates attacks and reports "
                      "on proportions of infected files and apps.\n")
                print("--check-excluded-files:\n\tPrints the lists of files "
                      "accessed by apps that also wrote to excluded\n\tfiles,"
                      " then aborts execution of the program.\n")
                print("--check-missing:\n\tChecks whether some Desktop IDs "
                      "for apps in the user's directory are\n\tmissing. If so,"
                      " aborts execution of the program.\n")
                print("--clusters:\n\tPrints clusters of files with "
                      "information flows to one another. Requires\n\tthe "
                      "--score option.\n")
                print("--debug:\n\tPrints additional debug information in "
                      "various code paths to help debug\n\tthe program.\n")
                print("--disable-plotting:\n\tDo not plot cluster graphs. See "
                      "the --graph option.\n")
                print("--extensions:\n\tPrints file extensions and MIME type "
                      "associations for this user.\n")
                print("--frequency:\n\tSets the frequency used by the "
                      "frequent-itemsets algorithm in the\n\t--related-files "
                      "post-analysis. Requires the --related-files option.\n")
                print("--graph:\n\tFind communities in file/app "
                      "accesses using graph theory methods.\n")
                print("--help:\n\tPrints this help information and exits.\n")
                print("--output=<DIR>:\n\tSaves a copy of the simulated "
                      "files, and some information on events\n\trelated to "
                      "them, in a folder created at the <DIR> path.\n")
                print("--post-analysis=<DIR,DIR,DIR>:\n\t"
                      "Uses the value pointed to"
                      " by --output in order to produce graphs and\n\t"
                      "statistics.\n")
                print("--quick-pol=Policy:\n\tReplace the default policies "
                      "with this one single Policy.\n")
                print("--related-files:\n\tMines for files that are frequently"
                      " accessed together by apps. Produces\n\toutput files in"
                      " scoring mode, and an analysis output in post-analysis"
                      "\n\tmode. See also --frequency.\n")
                print("--score:\n\tCalculates the usability and security "
                      "scores of a number of file access\n\tcontrol policies"
                      ", replayed over the simulated accesses. Prints results"
                      "\n\tand saves them to the output directory.\n")
                print(
                    "--skip=<Policy,Policy,'graphs'>:\n\tSkip the scoring of "
                    "policies in the lists. If the list contains the word"
                    "\n\t'graphs', skips the general graph computation.\n")
                sys.exit()
            elif opt in ('-c', '--check-missing'):
                __setCheckMissing(True)
            elif opt in ('-e', '--check-excluded-files'):
                __setCheckExcludedFiles(True)
            elif opt in ('-x', '--extensions'):
                __setPrintExtensions(True)
            elif opt in ('-d', '--debug'):
                __setDebug(True)
            elif opt in ('-r', '--related-files'):
                __setRelatedFiles(True)
            elif opt in ('-s', '--score'):
                __setScore(True)
            elif opt in ('-p', '--print-clusters', '--clusters'):
                __setPrintClusters(True)
            elif opt in ('-g', '--graph-clusters', '--graph'):
                __setGraph(True)
            elif opt in ('-t', '--attacks'):
                __setAttacks(True)
            elif opt in ('-G', '--disable-plotting'):
                __setPlottingDisabled(True)
            elif opt in ('-f', '--frequency'):
                if not arg:
                    print(USAGE_STRING)
                    sys.exit(2)
                __setFrequency(arg[1:] if arg[0] == '=' else arg)
            elif opt in ('-o', '--output-fs', '--output'):
                if not arg:
                    print(USAGE_STRING)
                    sys.exit(2)
                __setOutputFs(arg[1:] if arg[0] == '=' else arg)
            elif opt in ('-u', '--user'):
                if not arg:
                    print(USAGE_STRING)
                    sys.exit(2)
                __setUser(arg[1:] if arg[0] == '=' else arg)
            elif opt in ('-i', '--inode'):
                if not arg:
                    print(USAGE_STRING)
                    sys.exit(2)
                try:
                    __opt_inode_query = (arg[1:] if arg[0] == '=' else arg)
                except (ValueError) as e:
                    print(USAGE_STRING)
                    sys.exit(2)
            elif opt in ('-a', '--post-analysis'):
                if not arg:
                    print(USAGE_STRING)
                    sys.exit(2)
                __opt_post_analysis = (arg[1:] if arg[0] == '=' else arg)
            elif opt in ('-q', '--quick-pol'):
                if not arg:
                    print(USAGE_STRING)
                    sys.exit(2)
                __opt_quick_pol = (arg[1:] if arg[0] == '=' else arg)
            elif opt in ('-k', '--skip'):
                if not arg:
                    print(USAGE_STRING)
                    sys.exit(2)
                __opt_skip = (arg[1:] if arg[0] == '=' else arg)
                __setSkip(__opt_skip.split(","))

    registerTimePrint()

    if __opt_post_analysis:
        if relatedFilesEnabled():
            tprnt("Starting post-analysis of related files...\n")
            engine = FrequentFileEngine()
            engine.processFrequentItemLists(__opt_post_analysis)

        else:
            tprnt("Starting post-analysis of usability/security scores...\n")
            from AnalysisEngine import AnalysisEngine
            if outputFsEnabled():
                engine = AnalysisEngine(inputDir=__opt_post_analysis,
                                        outputDir=outputFsEnabled())
            else:
                engine = AnalysisEngine(inputDir=__opt_post_analysis)
            engine.analyse()

        sys.exit(0)

    # Make the application, event and file stores
    store = ApplicationStore.get()
    evStore = EventStore.get()
    fileStore = FileStore.get()
    initMimeTypes()
    datapath = getDataPath()

    # Load up user-related variables
    userConf = UserConfigLoader.get(path=datapath + USERCONFIGNAME)

    # Load up and check the SQLite database
    sql = None
    tprnt("\nLoading the SQLite database: %s..." % (datapath + DATABASENAME))
    try:
        sql = SqlLoader(datapath + DATABASENAME)
    except ValueError as e:
        print("Failed to parse SQL: %s" % e.args[0], file=sys.stderr)
        sys.exit(-1)
    if checkMissingEnabled():
        tprnt("Checking for missing application identities...")
        sql.listMissingActors()
    sql.loadDb(store)
    sqlAppCount = sql.appCount
    sqlInstCount = sql.instCount
    sqlEvCount = sql.eventCount
    sqlValidEvCount = sql.validEventRatio
    tprnt("Loaded the SQLite database.")

    # Load up the PreloadLogger file parser
    tprnt("\nLoading the PreloadLogger logs in folder: %s..." % datapath)
    pll = PreloadLoggerLoader(datapath)
    if checkMissingEnabled():
        tprnt("Checking for missing application identities...")
        pll.listMissingActors()
    pll.loadDb(store)
    pllAppCount = pll.appCount
    pllInstCount = pll.instCount
    pllEvCount = pll.eventCount
    pllValidEvCount = pll.validEventRatio
    tprnt("Loaded the PreloadLogger logs.")

    # Resolve actor ids in all apps' events
    tprnt("\nUsing PreloadLogger Applications to resolve interpreters in "
          "Zeitgeist Applications...")
    (interpretersAdded, instancesEliminated) = store.resolveInterpreters()
    tprnt("Resolved interpreter ids in %d Applications, and removed %d "
          "instances by merging them with another as a result." %
          (interpretersAdded, instancesEliminated))

    # Update events' actor ids in the ApplicationStore, then take them and send
    # them to the EvnetStore. Finally, sort the EventStore by timestamp.
    tprnt("\nInserting and sorting all events...")
    store.sendEventsToStore()
    evStore.sort()
    evCount = evStore.getEventCount()
    tprnt("Sorted all %d events in the event store." % evCount)

    # Simulate the events to build a file model
    tprnt("\nSimulating all events to build a file model...")
    evStore.simulateAllEvents()
    del sql
    del pll
    evStore.sort()
    tprnt("Simulated all events. %d files initialised." % len(fileStore))

    appCount = store.getAppCount()
    userAppCount = store.getUserAppCount()
    instCount = len(store)
    userInstCount = store.getUserInstCount()
    fileCount = len(fileStore)
    docCount = fileStore.getUserDocumentCount(userConf.getSetting("HomeDir"))

    if printExtensions():
        exts = set()
        for f in fileStore:
            exts.add(f.getExtension())
        try:
            exts.remove(None)
        except (KeyError):
            pass
        tprnt("Info: the following file extensions were found:")
        for e in sorted(exts):
            print("\t%s: %s" %
                  (e, mimetypes.guess_type("f.%s" % e, strict=False)))

        if checkExcludedFilesEnabled():
            tprnt("\nPrinting files written and read by instances which wrote"
                  "to excluded directories...")
            dbgPrintExcludedEvents()
        import time as t
        t.sleep(10)

    # Manage --inode queries
    if __opt_inode_query:
        inodes = __opt_inode_query.split(",")
        for inode in sorted(int(i) for i in inodes):
            f = fileStore.getFile(inode)
            tprnt("\nInode queried: %d" % inode)
            tprnt("Corresponding file: %s\n\t(%s)" % (f.getName(), f))
        sys.exit(0)

    # Print the model as proof of concept
    if debugEnabled():
        tprnt("\nPrinting the file model...\n")
        fileStore.printFiles(showDeleted=True,
                             showCreationTime=True,
                             showDocumentsOnly=True,
                             userHome=userConf.getSetting("HomeDir"),
                             showDesignatedOnly=False)

    # Make the filesystem corresponding to the model
    if outputFsEnabled():
        tprnt("\nMaking a copy of the file model at '%s'...\n" %
              outputFsEnabled())
        fileStore.makeFiles(outputDir=outputFsEnabled(),
                            showDeleted=True,
                            showDocumentsOnly=False,
                            userHome=userConf.getSetting("HomeDir"),
                            showDesignatedOnly=False)

        with open(os.path.join(outputFsEnabled(), "statistics.txt"), "w") as f:
            msg = "SQL: %d apps; %d instances; %d events; %d%% valid\n" % \
                  (sqlAppCount, sqlInstCount, sqlEvCount, sqlValidEvCount)
            msg += "PreloadLogger: %d apps; %d instances; %d events; " \
                   "%d%% valid\n" % \
                  (pllAppCount, pllInstCount, pllEvCount, pllValidEvCount)
            msg += "Simulated: %d apps; %d instances; %d user apps; %d user" \
                   " instances; %d events; %d files; %d user documents\n" % \
                  (appCount, instCount, userAppCount, userInstCount,
                   evCount, fileCount, docCount)
            exclLists = userConf.getDefinedSecurityExclusionLists()
            for l in exclLists:
                msg += "Exclusion list '%s' defined.\n" % l
            print(msg, file=f)

    # Build a general access graph.
    if graphEnabled():
        skipList = skipEnabled()
        if skipList and 'graphs' in skipList:
            tprnt("\nGraphs in skip list, skipping global graph generation.")
        else:
            engine = GraphEngine.get()
            engine.runGraph(policy=None)

    # Policy engine. Create a policy and run a simulation to score it.
    if scoreEnabled() or attacksEnabled() or graphEnabled():
        engine = PolicyEngine()

        if __opt_quick_pol:
            policies = [__opt_quick_pol]
            polArgs = [None]
        else:
            policies = [
                CompoundLibraryPolicy,
                CustomLibraryPolicy,
                DesignationPolicy,
                DistantFolderPolicy,
                FilenamePolicy,
                FileTypePolicy,
                FolderPolicy,
                OneDistantFolderPolicy,
                OneFolderPolicy,
                OneLibraryPolicy,
                UnsecurePolicy,
                Win10Policy,
                Win8Policy,
                HSecurePolicy,
                HBalancedPolicy,
                'HSecureSbPolicy',
                'HSecureSbFaPolicy',
                'HSecureFaPolicy',
                'HBalancedSbPolicy',
                'HBalancedSbFaPolicy',
                'HBalancedFaPolicy',
                'OneDistantFolderSbPolicy',
                'OneDistantFolderSbFaPolicy',
                'OneDistantFolderFaPolicy',
                'HUsableSecuredSbPolicy',
                'HUsableSecuredSbFaPolicy',
                'HUsableSecuredFaPolicy',
                'HBalancedSecuredSbPolicy',
                'HBalancedSecuredSbFaPolicy',
                'HBalancedSecuredFaPolicy',
                'DistantFolderSbPolicy',
                'DistantFolderSbFaPolicy',
                'DistantFolderFaPolicy',
                'LibraryFolderSbPolicy',
                'LibraryFolderSbFaPolicy',
                'LibraryFolderFaPolicy',
                'FileTypeSbPolicy',
                'FileTypeSbFaPolicy',
                'FileTypeFaPolicy',
                'OneFolderSbPolicy',
                'OneFolderSbFaPolicy',
                'OneFolderFaPolicy',
                'FolderSbPolicy',
                'FolderSbFaPolicy',
                'FolderFaPolicy',
                'OneLibrarySbPolicy',
                'OneLibrarySbFaPolicy',
                'OneLibraryFaPolicy',
                'CompoundLibrarySbPolicy',
                'CompoundLibrarySbFaPolicy',
                'CompoundLibraryFaPolicy',
                'CustomLibrarySbPolicy',
                'CustomLibrarySbFaPolicy',
                'CustomLibraryFaPolicy',
            ]

            polArgs = [
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
                None,
            ]
            # dict(folders=["~/Downloads", "/tmp"])

        skipList = skipEnabled()
        for (polIdx, polName) in enumerate(policies):
            pol = None
            arg = polArgs[polIdx]

            # Names with certain suffixes are dynamically generated policies.
            if isinstance(polName, str):
                if polName.endswith('SbPolicy'):
                    pols = [
                        getattr(sys.modules[__name__],
                                polName[:-8] + 'Policy'), StickyBitPolicy
                    ]
                    args = [arg, dict(folders=["~", "/media", "/mnt"])]
                    pol = CompositionalPolicy(pols, args, polName)
                elif polName.endswith('SbFaPolicy'):
                    pols = [
                        getattr(sys.modules[__name__],
                                polName[:-10] + 'Policy'), StickyBitPolicy,
                        FutureAccessListPolicy
                    ]
                    args = [arg, dict(folders=["~", "/media", "/mnt"]), None]
                    pol = CompositionalPolicy(pols, args, polName)
                elif polName.endswith('FaPolicy'):
                    pols = [
                        getattr(sys.modules[__name__],
                                polName[:-8] + 'Policy'),
                        FutureAccessListPolicy
                    ]
                    args = [arg, None]
                    pol = CompositionalPolicy(pols, args, polName)
                # A normal policy, just invoke it directly.
                else:
                    polName = getattr(sys.modules[__name__], polName)

            # Existing policies, with arguments / or normal policies passed as
            # strings, including via the --quick flag.
            if not pol:
                pol = polName(**arg) if arg else polName()

            tprnt("\nRunning %s..." % pol.name)

            if skipList and pol.name in skipList:
                tprnt("%s is in skip list, skipping." % pol.name)
                continue

            engine.runPolicy(pol,
                             outputDir=outputFsEnabled(),
                             printClusters=printClustersEnabled())

            if pol.name == "FileTypePolicy" and checkMissingEnabled():
                pol.abortIfUnsupportedExtensions()

            if attacksEnabled():
                tprnt("Simulating attacks on %s..." % pol.name)
                sim = AttackSimulator(seed=0)
                sim.runAttacks(pol, outputDir=outputFsEnabled() or "/tmp/")

            del pol

    # Calculate frequently co-accessed files:
    if relatedFilesEnabled():
        engine = FrequentFileEngine()

        tprnt("\nMining for frequently co-accessed file types...")
        engine.mineFileTypes()