示例#1
0
        return node1 in self._graph and node2 in self._graph[node1]

    def find_path(self, node1, node2, path=[]):
        """ Find any path between node1 and node2 (may not be shortest) """

        path = path + [node1]
        if node1 == node2:
            return path
        if node1 not in self._graph:
            return None
        for node in self._graph[node1]:
            if node not in path:
                new_path = self.find_path(node, node2, path)
                if new_path:
                    return new_path
        return None

    def __str__(self):
        return '{}({})'.format(self.__class__.__name__, dict(self._graph))


if __name__ == "__main__":
    print("MAIN:")
    import graph
    connections = [('A', 'B'), ('B', 'C'), ('B', 'D'), ('C', 'D'), ('E', 'F'),
                   ('F', 'C')]
    g = graph.Graph(connections, directed=False)

    pretty_print = pprint.PrettyPrinter()
    pretty_print.pprint(g._graph)
from collections import deque
import graph as g

graph = g.Graph()
graph = g.populate_graph(graph, g.example_graph2)


def bf_traversal(tree):
    '''Breadth-first tree traaversal with loops'''
    queue = deque()
    queue += [tree.root]
    traversed = []
    contents = []
    while queue:
        current_node = queue.popleft()
        if current_node.name not in traversed:
            contents.append((current_node.name, current_node.content))
        traversed.append(current_node.name)
        for adjacent_node in current_node.nearest_vertices:
            if adjacent_node.name not in traversed:
                queue.append(adjacent_node)
                contents.append((adjacent_node.name, adjacent_node.content))
                traversed.append(adjacent_node.name)
    return contents


def df_traversal(tree):
    '''Depth-first tree traversal with recursion'''
    traversed = []
    contents = []
示例#3
0
    def __init__(self,
                 graph_db=None,
                 query_dict=None,
                 display_cols=None,
                 **kwds):
        """
        A query for an instance of GraphDatabase. This class nicely wraps
        the SQLQuery class located in sage.databases.database.py to make
        the query constraints intuitive and with as many pre-definitions as
        possible. (i.e.: since it has to be a GraphDatabase, we already know
        the table structure and types; and since it is immutable, we can
        treat these as a guarantee).

        .. note::

           SQLQuery functions are available for GraphQuery. See
           sage.dataabases.database.py for more details.

        INPUT:


        -  ``graph_db`` - The GraphDatabase instance to apply
           the query to. (If None, then a new instance is created).

        -  ``query_dict`` - A dictionary specifying the query
           itself. Format is: 'table_name': 'tblname', 'display_cols':
           ['col1', 'col2'], 'expression':[col, operator, value] If not None,
           query_dict will take precedence over all other arguments.

        -  ``display_cols`` - A list of column names (strings)
           to display in the result when running or showing a query.

        -  ``kwds`` - The columns of the database are all
           keywords. For a database table/column structure dictionary, call
           graph_db_info. Keywords accept both single values and lists of
           length 2. The list allows the user to specify an expression other
           than equality. Valid expressions are strings, and for numeric
           values (i.e. Reals and Integers) are: '=','','','=','='. String
           values also accept 'regexp' as an expression argument. The only
           keyword exception to this format is induced_subgraphs, which
           accepts one of the following options: 1.
           ['one_of',String,...,String] Will search for graphs containing a
           subgraph isomorphic to any of the graph6 strings in the list. 2.
           ['all_of',String,...,String] Will search for graphs containing a
           subgraph isomorphic to each of the graph6 strings in the list.


        EXAMPLES::

            sage: Q = GraphQuery(display_cols=['graph6','num_vertices','degree_sequence'],num_edges=['<=',5],min_degree=1)
            sage: Q.number_of()
            35
            sage: Q.show()
            Graph6               Num Vertices         Degree Sequence
            ------------------------------------------------------------
            A_                   2                    [1, 1]
            BW                   3                    [1, 1, 2]
            CF                   4                    [1, 1, 1, 3]
            CK                   4                    [1, 1, 1, 1]
            CL                   4                    [1, 1, 2, 2]
            CN                   4                    [1, 2, 2, 3]
            D?{                  5                    [1, 1, 1, 1, 4]
            D@s                  5                    [1, 1, 1, 2, 3]
            D@{                  5                    [1, 1, 2, 2, 4]
            DBg                  5                    [1, 1, 2, 2, 2]
            DBk                  5                    [1, 1, 2, 3, 3]
            DIk                  5                    [1, 2, 2, 2, 3]
            DK[                  5                    [1, 2, 2, 2, 3]
            D_K                  5                    [1, 1, 1, 1, 2]
            D`K                  5                    [1, 1, 2, 2, 2]
            E?Bw                 6                    [1, 1, 1, 1, 1, 5]
            E?Fg                 6                    [1, 1, 1, 1, 2, 4]
            E?N?                 6                    [1, 1, 1, 1, 2, 2]
            E?NG                 6                    [1, 1, 1, 1, 3, 3]
            E@FG                 6                    [1, 1, 1, 2, 2, 3]
            E@N?                 6                    [1, 1, 2, 2, 2, 2]
            E@Q?                 6                    [1, 1, 1, 1, 1, 1]
            E@QW                 6                    [1, 1, 1, 2, 2, 3]
            E@YO                 6                    [1, 1, 2, 2, 2, 2]
            E_?w                 6                    [1, 1, 1, 1, 1, 3]
            E_Cg                 6                    [1, 1, 1, 1, 2, 2]
            E_Cw                 6                    [1, 1, 1, 2, 2, 3]
            E_Ko                 6                    [1, 1, 2, 2, 2, 2]
            F??^?                7                    [1, 1, 1, 1, 1, 2, 3]
            F?LCG                7                    [1, 1, 1, 1, 2, 2, 2]
            FK??W                7                    [1, 1, 1, 1, 1, 1, 2]
            FK?GW                7                    [1, 1, 1, 1, 2, 2, 2]
            F_?@w                7                    [1, 1, 1, 1, 1, 1, 4]
            F_?Hg                7                    [1, 1, 1, 1, 1, 2, 3]
            F_?XO                7                    [1, 1, 1, 1, 2, 2, 2]
        """
        if graph_db is None: graph_db = GraphDatabase()
        if query_dict is not None:
            if query_dict['expression'][0] == 'degree_sequence':
                query_dict['expression'][3] = degseq_to_data(
                    query_dict['expression'][3])
            elif query_dict['expression'][0] == 'induced_subgraphs':
                query_dict['expression'][3] = subgraphs_to_data(
                    query_dict['expression'][3])
            SQLQuery.__init__(self, graph_db, query_dict)
        else:
            # construct a query from the given parameters
            SQLQuery.__init__(self, graph_db)

            #if display_cols is None:
            #    raise TypeError, 'Nonetype display_cols cannot retrieve data.'

            master_join = {}

            for key in kwds:
                # check validity
                if not key in valid_kwds:
                    raise KeyError('%s is not a valid key for this database.' %
                                   str(key))

                # designate a query_dict
                qdict = {
                    'display_cols': None
                }  # reserve display cols until end
                # (database.py currently concatenates
                # them including repeats)

                # set table name
                if key in graph_data: qdict['table_name'] = 'graph_data'
                elif key in aut_grp: qdict['table_name'] = 'aut_grp'
                elif key in degrees: qdict['table_name'] = 'degrees'
                elif key in misc: qdict['table_name'] = 'misc'
                elif key in spectrum: qdict['table_name'] = 'spectrum'

                # set expression
                if not isinstance(kwds[key], list):
                    if key == 'induced_subgraphs':
                        qdict['expression'] = [
                            key, 'regexp',
                            '.*%s.*' % (graph.Graph(
                                kwds[key]).canonical_label()).graph6_string()
                        ]
                    else:
                        qdict['expression'] = [key, '=', kwds[key]]
                elif key == 'degree_sequence':
                    qdict['expression'] = [key, '=', degseq_to_data(kwds[key])]
                elif key != 'induced_subgraphs':
                    qdict['expression'] = [key] + kwds[key]

                # add key parameter to query
                join_dict = {qdict['table_name']: ('graph_id', 'graph_id')}
                if key == 'induced_subgraphs' and isinstance(kwds[key], list):
                    self.intersect(subgraphs_to_query(kwds[key], graph_db),
                                   'graph_data',
                                   join_dict,
                                   in_place=True)
                else:
                    self.intersect(SQLQuery(graph_db, qdict),
                                   'graph_data',
                                   join_dict,
                                   in_place=True)

                # include search params (keys) in join clause
                # again, we exclude graph_data because it is the base table
                if qdict['table_name'] != 'graph_data':
                    master_join[qdict['table_name']] = ('graph_id', 'graph_id')

            # display columns from each table
            aut_grp_disp = ['aut_grp']
            degrees_disp = ['degrees']
            misc_disp = ['misc']
            spectrum_disp = ['spectrum']
            graph_data_disp = ['graph_data']

            disp_tables = [
                aut_grp_disp, degrees_disp, misc_disp, spectrum_disp
            ]
            # graph_data intentionally left out because it is always called

            # organize display
            if display_cols is not None:
                for col in display_cols:
                    if col in graph_data: graph_data_disp.append(col)
                    elif col in aut_grp: aut_grp_disp.append(col)
                    elif col in degrees: degrees_disp.append(col)
                    elif col in misc: misc_disp.append(col)
                    elif col in spectrum: spectrum_disp.append(col)

                # finish filling master join with display tables
                for tab in disp_tables:
                    if len(tab) > 1:
                        master_join[tab[0]] = ('graph_id', 'graph_id')

                # join clause for display tables
                join_str = 'FROM graph_data '
                for tab in master_join:
                    join_str += 'INNER JOIN %s ON graph_data.graph_id=%s.graph_id ' % (
                        tab, tab)

                # construct sql syntax substring for display cols
                disp_str = 'SELECT graph_data.graph6, '
                for col in graph_data_disp[1:]:
                    if col != 'graph6': disp_str += 'graph_data.%s, ' % col
                for col in aut_grp_disp[1:]:
                    disp_str += 'aut_grp.%s, ' % col
                for col in degrees_disp[1:]:
                    disp_str += 'degrees.%s, ' % col
                for col in misc_disp[1:]:
                    disp_str += 'misc.%s, ' % col
                for col in spectrum_disp[1:]:
                    disp_str += 'spectrum.%s, ' % col
                disp_str = disp_str.rstrip(', ') + ' '

                # substitue disp_str and join_str back into self's query string
                self.__query_string__ = re.sub('SELECT.*WHERE ', disp_str + join_str + \
                                                                                'WHERE ', self.__query_string__)
                self.__query_string__ += ' ORDER BY graph_data.graph6'
示例#4
0
    def createSeccompProfile(self, tempOutputFolder, resultsFolder):
        returnCode = 0
        if os.geteuid() != 0:
            self.logger.error("This script must be run as ROOT only!")
            exit("This script must be run as ROOT only. Exiting.")
        self.logger.debug("tempOutputFolder: %s", tempOutputFolder)

        allSyscalls = set()

        muslSyscallList = list()
        glibcSyscallList = list()

        i = 0
        while i < 400:
            muslSyscallList.append("syscall(" + str(i) + ")")
            glibcSyscallList.append("syscall(" + str(i) + ")")
            glibcSyscallList.append("syscall ( " + str(i) + " )")
            glibcSyscallList.append("syscall( " + str(i) + " )")
            i += 1

        fineGrainCfgs = dict()

        glibcGraph = graph.Graph(self.logger)
        glibcGraph.createGraphFromInput(self.glibcCfgpath, ":")

        glibcWrapperListTemp = []
        if (self.strictMode):
            for func in self.glibcFuncList:
                glibcWrapperListTemp.extend(
                    glibcGraph.getSyscallFromStartNode(func))
        else:
            i = 0
            while i < 400:
                glibcWrapperListTemp.append(i)
                i += 1
        glibcWrapperList = set(glibcWrapperListTemp)
        muslGraph = graph.Graph(self.logger)
        muslGraph.createGraphFromInput(self.muslCfgpath, "->")
        muslWrapperListTemp = []
        if (self.strictMode):
            for func in self.muslFuncList:
                muslWrapperListTemp.extend(
                    muslGraph.getSyscallFromStartNode(func))
        else:
            i = 0
            while i < 400:
                muslWrapperListTemp.append(i)
                i += 1
        muslWrapperList = set(muslWrapperListTemp)

        #        self.logger.debug("glibcWrapperList: %s", str(glibcWrapperList))
        #        self.logger.debug("muslWrapperList: %s", str(muslWrapperList))

        #TODO Separate libaio-like CFGs from fine-grained CFGs
        #Go through extra CFGs such as libaio to extract lib->syscall mapping
        #for fileName in os.listdir(self.cfgFolderPath):
        #    self.logger.debug("Adding cfg: %s", fileName)
        #    glibcGraph.createGraphFromInput(self.cfgFolderPath + "/" + fileName, "->")
        #    muslGraph.createGraphFromInput(self.cfgFolderPath + "/" + fileName, "->")

        #time.sleep(10)

        exceptList = [
            "access", "arch_prctl", "brk", "close", "execve", "exit_group",
            "fcntl", "fstat", "geteuid", "lseek", "mmap", "mprotect", "munmap",
            "openat", "prlimit64", "read", "rt_sigaction", "rt_sigprocmask",
            "set_robust_list", "set_tid_address", "stat", "statfs", "write",
            "setns", "capget", "capset", "chdir", "fchown", "futex",
            "getdents64", "getpid", "getppid", "lstat", "openat", "prctl",
            "setgid", "setgroups", "setuid", "stat", "io_setup", "getdents",
            "clone", "readlinkat", "newfstatat", "getrandom", "sigaltstack",
            "getresgid", "getresuid", "setresgid", "setresuid", "alarm",
            "getsid", "getpgrp", "epoll_pwait", "vfork"
        ]

        javaExceptList = [
            "open", "getcwd", "openat", "close", "fopen", "fclose", "link",
            "unlink", "unlinkat", "mknod", "rename", "renameat", "mkdir",
            "rmdir", "readlink", "realpath", "symlink", "stat", "lstat",
            "fstat", "fstatat", "chown", "lchown", "fchown", "chmod", "fchmod",
            "utimes", "futimes", "lutimes", "readdir", "read", "write",
            "access", "getpwuid", "getgrgid", "statvfs", "clock_getres",
            "get_mempolicy", "gettid", "getcpu", "fallocate", "memfd_create",
            "fstatat64", "newfstatat"
        ]

        binaryReady = False
        libFileReady = False
        languageReady = False
        try:
            self.logger.debug("Checking cache in %s", tempOutputFolder)
            myFile = open(tempOutputFolder + "/" + C.CACHE, 'r')
            binaryReady = True
            myFile = open(tempOutputFolder + "/" + C.LIBFILENAME, 'r')
            libFileReady = True
        #    myFile = open(tempOutputFolder + "/" + C.LANGFILENAME, 'r')
        #    languageReady = True
        except OSError as e:
            self.logger.info(
                "Cache doesn't exist, must extract binaries and libraries")

        self.logger.debug("binaryReady: %s libFileReady: %s", str(binaryReady),
                          str(libFileReady))

        myContainer = container.Container(self.imagePath, self.options,
                                          self.logger, self.args)
        self.containerName = myContainer.getContainerName()

        if (not myContainer.pruneVolumes()):
            self.logger.warning(
                "Pruning volumes failed, storage may run out of space\n")
        returncode, out, err = util.runCommand("mkdir -p " + tempOutputFolder)
        if (returncode != 0):
            self.logger.error("Failed to create directory: %s with error: %s",
                              tempOutputFolder, err)
        else:
            self.logger.debug("Successfully created directory: %s",
                              tempOutputFolder)

        ttr = 10
        logSleepTime = 60
        sysdigTotalRunCount = 3
        if (binaryReady):
            sysdigTotalRunCount = 1
        sysdigRunCount = 1

        if (self.name == "softwareag-apigateway"):
            logSleepTime = 60

        if (self.name == "cirros"):
            logSleepTime = 120

        psListAll = set()

        self.logger.info("--->Starting MONITOR phase:")
        while (sysdigRunCount <= sysdigTotalRunCount):
            myMonitor = processMonitorFactory.Factory(
                self.logger,
                self.monitoringTool,
                psListFilePath=self.binLibList)
            #mySysdig = sysdig.Sysdig(self.logger)
            self.logger.debug(
                "Trying to kill and delete container which might not be running in loop... Not a problem if returns error"
            )
            str(myContainer.kill())
            str(myContainer.delete())
            self.logger.info(
                "Running sysdig multiple times. Run count: %d from total: %d",
                sysdigRunCount, sysdigTotalRunCount)
            sysdigRunCount += 1
            #sysdigResult = mySysdig.runSysdigWithDuration(logSleepTime)
            monitorResult = myMonitor.runWithDuration(logSleepTime)
            if (not monitorResult):
                self.logger.error(
                    "Running sysdig with execve failed, not continuing for container: %s",
                    self.name)
                self.logger.error(
                    "Please make sure sysdig is installed and you are running the script with root privileges. If problem consists please contact our support team."
                )
                self.errorMessage = "Running sysdig with execve failed"

            if (monitorResult and
                    myContainer.runWithoutSeccomp()):  #myContainer.run() ):
                self.status = True
                self.logger.info(
                    "Ran container sleeping for %d seconds to generate logs and extract execve system calls",
                    logSleepTime)
                time.sleep(logSleepTime)
                myMonitor.waitUntilComplete()
                originalLogs = myContainer.checkLogs()
                self.logger.debug("originalLog: %s", originalLogs)
                time.sleep(10)
                if (not myContainer.checkStatus()):
                    self.logger.warning(
                        "Container exited after running, trying to run in attached mode!"
                    )
                    self.logger.debug(str(myContainer.delete()))
                    if (not myContainer.runInAttachedMode()):
                        self.errorMessage = "Container didn't run in attached mode either, forfeiting!"
                        self.logger.error(
                            "Container didn't run in attached mode either, forfeiting!"
                        )
                        self.logger.error(
                            "There is a problem launching a container for %s. Please validate you can run the container without Confine. If so, contact our support team.",
                            self.name)
                        self.logger.debug(str(myContainer.delete()))
                        return C.NOATTACH
                    else:
                        time.sleep(10)
                        if (not myContainer.checkStatus()):
                            self.errorMessage = "Container got killed after running in attached mode as well!"
                            self.logger.error(
                                "Container got killed after running in attached mode as well, forfeiting!"
                            )
                            self.logger.error(
                                "There is a problem launching a container for %s. Please validate you can run the container without Confine. If so, contact our support team.",
                                self.name)
                            self.logger.debug(str(myContainer.kill()))
                            self.logger.debug(str(myContainer.delete()))
                            return C.CONSTOP
                self.runnable = True
                self.logger.debug(
                    "Ran container %s successfully, sleeping for %d seconds",
                    self.name, ttr)
                time.sleep(ttr)
                self.logger.debug(
                    "Finished sleeping, extracting psNames for %s", self.name)
                self.logger.debug(
                    "Starting to identify running processes and required binaries and libraries through dynamic analysis."
                )

                if (not binaryReady):
                    psList = myMonitor.extractPsNames(
                        "execve", myContainer.getContainerName(),
                        myContainer.getContainerId())

                    if (not psList):
                        self.logger.error(
                            "PS List is None from extractPsNames(). Retrying this container: %s",
                            self.name)
                        self.logger.debug(str(myContainer.kill()))
                        self.logger.debug(str(myContainer.delete()))
                        self.errorMessage = "PS List is None from extractPsNames(), error in sysdig, retrying this container"
                        return C.SYSDIGERR
                    if (len(psList) == 0):
                        self.logger.error(
                            "PS List is None from extractPsNames(). Retrying this container: %s",
                            self.name)
                        self.logger.debug(str(myContainer.kill()))
                        self.logger.debug(str(myContainer.delete()))
                        self.errorMessage = "PS List is None from extractPsNames(), error in sysdig, retrying this container"
                        return C.NOPROCESS
                    self.logger.info("len(psList) from sysdig: %d",
                                     len(psList))
                    # TODO: Do we need to do this?  Or can we just rely on copyFromContainerWithLibs below
                    psList = psList.union(myContainer.extractLibsFromProc())
                    self.logger.debug(
                        "len(psList) after extracting proc list: %d",
                        len(psList))
                    self.logger.debug("Container: %s PS List: %s", self.name,
                                      str(psList))
                    self.logger.debug(
                        "Container: %s extracted psList with %d elements",
                        self.name, len(psList))
                    self.logger.debug("Entering not binaryReady")
                    if (not util.deleteAllFilesInFolder(
                            tempOutputFolder, self.logger)):
                        self.logger.error(
                            "Failed to delete files in temporary output folder, exiting..."
                        )
                        self.errorMessage = "Failed to delete files in temporary output folder"
                        sys.exit(-1)

                    psListAll.update(psList)
                    self.logger.info(
                        "Container: %s extracted psList with %d elements",
                        self.name, len(psListAll))

        if (self.status):
            if (not binaryReady):
                self.logger.info("Container: %s PS List: %s", self.name,
                                 str(psListAll))
                self.logger.info(
                    "Starting to copy identified binaries and libraries (This can take some time...)"
                )  #Will try to copy from different paths. Some might not exist. Errors are normal.")
                if (self.extractAllBinaries):
                    psListAll.update(myContainer.extractAllBinaries())

                for binaryPath in psListAll:
                    if (binaryPath.strip() != ""):
                        myContainer.copyFromContainerWithLibs(
                            binaryPath, tempOutputFolder)
                        #if ( not myContainer.copyFromContainerWithLibs(binaryPath, tempOutputFolder) ):
                        #    self.logger.error("Problem copying files from container!")
                binaryReady = True
                myFile = open(tempOutputFolder + "/" + C.CACHE, 'w')
                myFile.write("complete")
                myFile.flush()
                myFile.close()
                self.logger.info(
                    "Finished copying identified binaries and libraries")
                self.logger.info("<---Finished MONITOR phase\n")

            self.logger.debug(str(myContainer.kill()))
            self.logger.debug(str(myContainer.delete()))

            if (binaryReady):
                self.logger.info("--->Starting Direct Syscall Extraction")
                self.logger.info("Extracting direct system call invocations")
                directSyscallSet = self.extractDirectSyscalls(tempOutputFolder)
                self.logger.info("<---Finished Direct Syscall Extraction\n")
                if (not libFileReady):
                    self.logger.info("--->Starting ANALYZE phase")
                    self.logger.info(
                        "Extracting imported functions and storing in libs.out"
                    )
                    self.extractAllImportedFunctions(tempOutputFolder,
                                                     C.LIBFILENAME)
                    self.logger.info("<---Finished ANALYZE phase\n")
                #if ( not languageReady ):
                self.extractBinaryType(tempOutputFolder)
                isMusl = self.usesMusl(tempOutputFolder)
                funcFilePath = tempOutputFolder + "/" + C.LIBFILENAME
                funcFile = open(funcFilePath, 'r')
                funcLine = funcFile.readline()
                if (not funcLine and not os.path.isfile(
                        os.path.join(self.goFolderPath,
                                     self.name + ".syscalls"))
                        and len(directSyscallSet) == 0):
                    self.logger.info(
                        "%s container can't be hardened because no functions can be extracted from binaries and no direct syscalls found",
                        self.name)
                    self.errorMessage = "container can't be hardened because no functions can be extracted from binaries and no direct syscalls found"
                    return C.NOFUNCS

                self.logger.info(
                    "--->Starting INTEGRATE phase, extracting the list required system calls"
                )
                functionStartsOriginal = set()
                functionStartsFineGrain = set()

                if (self.fineGrain):
                    #TODO Fix fine grained analysis
                    #1. Create CFG for each library
                    #2. Extract leaves from all imported functions in libs.out
                    #3. Create a list of required functions for each library
                    #4. Use fine grained version or all imported for libraries without CFG

                    libsWithCfg = set()
                    libsInLibc = set()
                    for fileName in os.listdir(self.cfgFolderPath):
                        libsWithCfg.add(fileName)

                    libsInLibc.add("libcrypt.callgraph.out")
                    libsInLibc.add("libdl.callgraph.out")
                    libsInLibc.add("libnsl.callgraph.out")
                    libsInLibc.add("libnss_compat.callgraph.out")
                    libsInLibc.add("libnss_files.callgraph.out")
                    libsInLibc.add("libnss_nis.callgraph.out")
                    libsInLibc.add("libpthread.callgraph.out")
                    libsInLibc.add("libm.callgraph.out")
                    libsInLibc.add("libresolv.callgraph.out")
                    libsInLibc.add("librt.callgraph.out")
                    libsInLibc.add("libutil.callgraph.out")
                    libsInLibc.add("libnss_dns.callgraph.out")

                    cfgAvailable = False
                    for fileName in os.listdir(tempOutputFolder):
                        self.logger.debug("fileName: %s", fileName)
                        tmpFileName = fileName
                        functionList = set()
                        if (fileName.startswith("lib")
                                and fileName != "libs.out"):
                            cfgAvailable = True
                            tmpFileName = re.sub("-.*so", ".so", fileName)
                            tmpFileName = tmpFileName[:tmpFileName.index(".so"
                                                                         )]
                            tmpFileName = tmpFileName + ".callgraph.out"
                            self.logger.debug("tmpFileName: %s", tmpFileName)
                        if (tmpFileName in libsWithCfg):
                            tmpGraph = graph.Graph(self.logger)
                            tmpGraph.createGraphFromInput(
                                self.cfgFolderPath + "/" + tmpFileName, "->")
                            funcFile.seek(0)
                            funcLine = funcFile.readline()
                            while (funcLine):
                                funcName = funcLine.strip()
                                leaves = tmpGraph.getLeavesFromStartNode(
                                    funcName, list(), list())
                                if (len(leaves) != 0
                                        and funcName not in leaves):
                                    #self.logger.debug("funcName: %s leaves: %s", funcName, str(leaves))
                                    functionList.update(set(leaves))
                                funcLine = funcFile.readline()
                        elif (tmpFileName in libsInLibc):
                            continue
                        else:
                            self.logger.info("Adding function starts for %s",
                                             fileName)
                            functionList = util.extractImportedFunctions(
                                tempOutputFolder + "/" + fileName, self.logger)
                            if (not functionList):
                                self.logger.warning(
                                    "Function extraction for file: %s failed!",
                                    fileName)
                        functionStartsFineGrain.update(set(functionList))

                funcFile.seek(0)
                funcLine = funcFile.readline()
                while (funcLine):
                    funcLine = funcLine.strip()
                    functionStartsOriginal.add(funcLine)
                    funcLine = funcFile.readline()

                funcFile.close()

                self.logger.info(
                    "Traversing libc call graph to identify required system calls"
                )
                tmpSet = set()
                allSyscallsOriginal = set()
                for function in functionStartsOriginal:
                    if (isMusl):
                        leaves = muslGraph.getLeavesFromStartNode(
                            function, muslSyscallList, list())
                    else:
                        leaves = glibcGraph.getLeavesFromStartNode(
                            function, glibcSyscallList, list())
                    #self.logger.debug("function: %s, tmpSet: %s", function, tmpSet)
                    tmpSet = tmpSet.union(leaves)
                for syscallStr in tmpSet:
                    syscallStr = syscallStr.replace("syscall( ", "syscall(")
                    syscallStr = syscallStr.replace("syscall ( ", "syscall(")
                    syscallStr = syscallStr.replace(" )", ")")
                    syscallNum = int(syscallStr[8:-1])
                    allSyscallsOriginal.add(syscallNum)

                self.logger.debug("allSyscallsOriginal: %s",
                                  str(allSyscallsOriginal))
                allSyscallsFineGrain = set()
                if (self.fineGrain):
                    tmpSet = set()
                    for function in functionStartsFineGrain:
                        #if ( function == "fork" ):
                        #    self.logger.debug("/////////////////////////////////////////FORK has been found///////////////////////////////////")
                        if (isMusl):
                            leaves = muslGraph.getLeavesFromStartNode(
                                function, muslSyscallList, list())
                        else:
                            leaves = glibcGraph.getLeavesFromStartNode(
                                function, glibcSyscallList, list())
                        tmpSet = tmpSet.union(leaves)
                    for syscallStr in tmpSet:
                        syscallStr = syscallStr.replace(
                            "syscall( ", "syscall(")
                        syscallStr = syscallStr.replace(
                            "syscall ( ", "syscall(")
                        syscallStr = syscallStr.replace(" )", ")")
                        syscallNum = int(syscallStr[8:-1])
                        allSyscallsFineGrain.add(syscallNum)

                #Check if we have go syscalls
                staticSyscallList = []
                try:
                    staticSyscallListFile = open(
                        os.path.join(self.goFolderPath,
                                     self.name + ".syscalls"), 'r')
                    syscallLine = staticSyscallListFile.readline()
                    while (syscallLine):
                        staticSyscallList.append(int(syscallLine.strip()))
                        syscallLine = staticSyscallListFile.readline()
                except Exception as e:
                    self.logger.debug(
                        "Can't extract syscalls from: %s",
                        os.path.join(
                            self.goFolderPath, self.name +
                            ".syscalls (probably not a golang developed application)"
                        ))
                self.logger.debug(
                    "After reading file: %s len(staticSyscallList): %d",
                    os.path.join(self.goFolderPath, self.name + ".syscalls"),
                    len(staticSyscallList))

                syscallMapper = syscall.Syscall(self.logger)
                syscallMap = syscallMapper.createMap(self.maptype)

                self.logger.info("Generating final system call filter list")
                blackListOriginal = []
                i = 1
                while i < 400:
                    if ((self.directSyscallCount == 0
                         and self.libcSyscallCount == 0)
                            or (isMusl and i in muslWrapperList)
                            or (i in glibcWrapperList)):
                        if (i not in directSyscallSet
                                and i not in staticSyscallList
                                and i not in allSyscallsOriginal
                                and syscallMap.get(i, None)
                                and syscallMap[i] not in exceptList):
                            if (("Java" in self.languageSet
                                 and syscallMap[i] not in javaExceptList)
                                    or ("Java" not in self.languageSet)):
                                blackListOriginal.append(syscallMap[i])
                    i += 1

                blackListFineGrain = []
                if (self.fineGrain):
                    i = 1
                    while i < 400:
                        if ((self.directSyscallCount == 0
                             and self.libcSyscallCount == 0)
                                or (isMusl and i in muslWrapperList)
                                or (i in glibcWrapperList)):
                            if (i not in directSyscallSet
                                    and i not in staticSyscallList
                                    and i not in allSyscallsFineGrain
                                    and syscallMap.get(i, None)
                                    and syscallMap[i] not in exceptList):
                                if (("Java" in self.languageSet
                                     and syscallMap[i] not in javaExceptList)
                                        or ("Java" not in self.languageSet)):
                                    blackListFineGrain.append(syscallMap[i])
                        i += 1

                self.logger.info(
                    "************************************************************************************"
                )
                self.logger.info(
                    "Container Name: %s Num of filtered syscalls (original): %s",
                    self.name, str(len(blackListOriginal)))
                self.logger.info(
                    "************************************************************************************"
                )
                self.logger.info("<---Finished INTEGRATE phase\n")

                self.blSyscallsOriginal = blackListOriginal
                self.blSyscallOriginalCount = len(blackListOriginal)

                if (self.fineGrain):
                    self.logger.info(
                        "Container Name: %s Num of filtered syscalls (fine grained): %s",
                        self.name, str(len(blackListFineGrain)))
                    self.blSyscallsFineGrain = blackListFineGrain
                    self.blSyscallFineGrainCount = len(blackListFineGrain)

                seccompProfile = seccomp.Seccomp(self.logger)
                if (self.fineGrain):
                    blackListProfile = seccompProfile.createProfile(
                        blackListFineGrain)
                else:
                    blackListProfile = seccompProfile.createProfile(
                        blackListOriginal)
                if ("/" in self.name):
                    outputPath = resultsFolder + "/" + self.name.replace(
                        "/", "-") + ".seccomp.json"
                else:
                    outputPath = resultsFolder + "/" + self.name + ".seccomp.json"
                outputFile = open(outputPath, 'w')
                outputFile.write(blackListProfile)
                outputFile.flush()
                outputFile.close()
                self.logger.info(
                    "--->Validating generated Seccomp profile: %s", outputPath)
                if (myContainer.runWithSeccompProfile(outputPath)):
                    time.sleep(logSleepTime)
                    debloatedLogs = myContainer.checkLogs()
                    if (len(originalLogs) == len(debloatedLogs)):
                        time.sleep(3)
                        if (myContainer.checkStatus()):
                            self.logger.info(
                                "************************************************************************************"
                            )
                            self.logger.info(
                                "Finished validation. Container for image: %s was hardened SUCCESSFULLY!",
                                self.name)
                            self.logger.info(
                                "************************************************************************************"
                            )
                            self.debloatStatus = True
                            returnCode = 0
                        else:
                            self.logger.warning(
                                "Container for image: %s was hardened with problems. Dies after running!",
                                self.name)
                            self.errorMessage = "Container was hardened with problems. Dies after running!"
                            returnCode = C.HSTOPS
                    else:
                        self.logger.warning(
                            "Container for image: %s was hardened with problems: len(original): %d len(seccomp): %d original: %s seccomp: %s",
                            self.name, len(originalLogs), len(debloatedLogs),
                            originalLogs, debloatedLogs)
                        self.errorMessage = "Unknown problem in hardening container!"
                        returnCode = C.HLOGLEN
                    if (self.isDependent):
                        self.logger.info(
                            "Not killing container: %s because it is a dependent for hardening another container",
                            self.name)
                    else:
                        if (not myContainer.kill() and self.debloatStatus):
                            self.logger.warning(
                                "Container can't be killed even though successfully hardened! Hardening has been unsuccessfull!"
                            )
                            self.errorMessage = "Container can't be killed even though successfully hardened! Hardening has been unsuccessfull!"
                            self.debloatStatus = False
                            returnCode = C.HNOKILL
                else:
                    self.errorMessage = "Unknown problem in hardening container!"
                    returnCode = C.HNORUN
                if (not self.isDependent):
                    self.logger.debug(str(myContainer.delete()))
        return returnCode
示例#5
0
#! /usr/bin/python
import graph

#set up graph
graph = graph.Graph()

vertex = {'A', 'B', 'C', 'D', 'E', 'F'}
for v in vertex:
    graph.addVertex(v)

edges = {('A', 'B', 1), ('A', 'C', 2), ('B', 'D', 3), ('B', 'E', 2),
         ('C', 'F', 2)}

for e in edges:
    graph.addUndirectedEdge(e[0], e[1], e[2])


#breadth first +greedy
def dijkstras(graph, edges):
    #set up
    unvisited = queue.Queue()
    distance = []

    #add each key to the unvisited queue
    for v in graph:
        unvisited.add(v)
        distance[v] = float("inf")

    current = unvisited.pop
    distance[current] = 0
示例#6
0
data = burndowndata.BurndownData(args)

# Configure plot parameters
plot = plot.Plot(data)

title = "Sprint" + str(data.sprint_number)
title_fontsize = 'large'
plot.setTitle(title, title_fontsize)
plot.setXAxisLabel("Days")

plot.drawDiagonal("grey")
plot.drawWaterLine("blue", ":")
plot.drawWeekendLines("grey", ":")

# Plot all graphs
graph_story_points = graph.Graph(plot.storyPoints())

y_label = "Story Points"
color = "black"
color_unplanned = "magenta"
marker = "o"
linestyle = "solid"
linewidth = 2
label_unplanned = "Unplanned Story Points"
legend_list = []

graph_story_points.draw(y_label, color, color_unplanned, marker, linestyle,
                        linewidth, label_unplanned, plot)

legend_list.append(graph_story_points.subplot)
示例#7
0

robot = OneWheelRobot(math.pi, 3, 5)
print(robot)
print()

cities = {
    "Скопје": ["Загреб"],
    "Белград": ["Загреб", "Подгорица"],
    "Загреб": ["Скопје", "Белград", "Сараево", "Подгорица"],
    "Сараево": ["Загреб"],
    "Подгорица": ["Загреб", "Белград"],
    "Софија": []
}

g = graph.Graph(cities)
print(g, '\n')

print(
    'Ова е листа на изолирани градови. Значи ако загливите тука не одите никаде'
)
print(g.isolated_verices(), '\n')
print()

print('Ја додаваме Љубљана')
g.add_vertex("Љубљана")
print(g)
print()

print('Додаваме лет Загреб -> Љубљана')
g.add_edge(('Загреб', 'Љубљана'))
示例#8
0
文件: group.py 项目: ZiyaoLi/SectNE
    if not isinstance(override_set, set):
        override_set = set(override_set)
    inverse_index = [-1] * n_vertices
    for i in override_set:
        inverse_index[i] = 0
    for group_id, group in enumerate(groups):
        for i in group:
            if inverse_index[i] < 0:
                inverse_index[i] = group_id + 1
    return inverse_index


def pure_override_nodes(groups, inv_index):
    for group_id, group in enumerate(groups):
        t = 0
        while t < len(group):
            if inv_index[group[t]] == 0:
                group.remove(group[t])
            else:
                t += 1


if __name__ == '__main__':
    G = graph.Graph('data\\wiki\\links.txt', sep='\t', typ=1)
    algorithm = Louvain(G)
    communities = algorithm.execute()
    for c in communities:
        print(c)
    print("--------------------")
    print(pd.value_counts([len(t) for t in communities]))
示例#9
0
import torch
import numpy as np
import networkx as nx
import graph
import random

g = graph.Graph('cycle_graph', 100, 1)
ego_network = g.two_level_ego_network(1)
adj = nx.adjacency_matrix(ego_network)
adj = adj.todense()  # store in matrix
adj = torch.from_numpy(np.expand_dims(adj.astype(int), axis=0))
adj = adj.type(torch.FloatTensor)
ego_network_nodes = ego_network.nodes()

#print(adj.numpy()[0,:,0])
#x = np.where(adj.numpy()[0,:,0] == 0)
#print(x)
#print(adj)
#print(ego_network_nodes)
#print( torch.zeros(1, 5 , 1, dtype=torch.float) )

info = torch.zeros(1, 5, 2, dtype=torch.float)
info[0, 0, 0] = 1
print(info)
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 11 12:19:37 2019

@author: apple
"""
import graph as G
import pdb

import sys
 
sys.setrecursionlimit(100000)

f = open('SCC.txt','r')
g = G.Graph('directed')

for i in range(0,875714):
    g.addVertex(i+1)

for line in f: 
    line = line.rstrip('\t \n')
    line = list(line.split(' '))
    
    g.addEdge(int(line[0]),int(line[1]))   #convert each line into an empty vertex directly  

f.close()
leader = g.SCC()

#lead_dict = g.SCC()
示例#11
0
    size = (400, 400)
    screen = pygame.display.set_mode(size)
    pygame.display.set_caption('test LIDAR data on ' + TOPICNAME)

    done = False
    clock = pygame.time.Clock()

    prune(readings)
    cloud = neato.toCloud(readings)

    print 'cloud =', cloud
    displayZoom = .10
    displayRotate = 0
    threshold = 0.100

    myGraph = graph.Graph((400, 400), origin=(200, 200), scale=displayZoom)

    while not done:
        for event in pygame.event.get():
            if event.type == pygame.QUIT:
                done = True
            if event.type == pygame.KEYDOWN:
                if event.key == pygame.K_UP:
                    #displayZoom = displayZoom-0.02
                    #myGraph.scale=displayZoom
                    cloud = neato.shiftCloud(cloud, -20)
                if event.key == pygame.K_DOWN:
                    #displayZoom = displayZoom+0.02
                    #myGraph.scale=displayZoom
                    cloud = neato.shiftCloud(cloud, +20)
                if event.key == pygame.K_LEFT:
示例#12
0
def construct_graph(nodes, edges):
    return graph.Graph(nodes, edges)
示例#13
0
def empty_graph():
    return graph.Graph()
class TestGraph(unittest.TestCase):

    movies_info = json.load(open('movies-fine.json'))
    actors_info = json.load(open('actors-fine.json'))
    # print(movies_info)
    my_graph = graph.Graph(actors_info, movies_info)


    my_graph.find_movie_grossed("Paper Moon (film)")
    assert 30900000.0==my_graph.find_movie_grossed("Paper Moon (film)")
    my_graph.find_movie_grossed("Running Man")
    assert 0==my_graph.find_movie_grossed("Running Man")


    my_graph.find_actor_movielist("Jackie Curtis")
    assert 4==len(my_graph.find_actor_movielist("Jackie Curtis"))
    my_graph.find_actor_movielist("Obama")
    assert []==my_graph.find_actor_movielist("Obama")


    my_graph.find_movie_actorlist("Panic in the Streets (film)")
    assert 4==len( my_graph.find_movie_actorlist("Panic in the Streets (film)"))
    my_graph.find_movie_actorlist("SMTM6")
    assert []==my_graph.find_movie_actorlist("SMTM6")

    my_graph.find_oldest_x_actors(3)
    assert ['Eva Marie Saint', 'Carleton Carpenter', 'Sidney Poitier']==my_graph.find_oldest_x_actors(3)
    my_graph.find_oldest_x_actors(888)
    assert []==my_graph.find_oldest_x_actors(888)

    my_graph.find_topgross_x_actors(6)
    assert ['Henry_Thomas', 'Robert_MacNaughton', 'Drew_Barrymore', 'Dee_Wallace', 'Anthony_Gonzalez_(actor)', 'Tom_Cruise']==my_graph.find_topgross_x_actors(6)
    my_graph.find_topgross_x_actors(666)
    assert []==my_graph.find_topgross_x_actors(666)

    my_graph.movies_in_the_year(1950)
    assert 5==len(my_graph.movies_in_the_year(1950))
    my_graph.movies_in_the_year(1800)
    assert 0==len(my_graph.movies_in_the_year(1800))


    my_graph.actors_in_the_year(1983)
    assert 72==(len(my_graph.actors_in_the_year(1983)))
    my_graph.actors_in_the_year(1500)
    assert 0 == (len(my_graph.actors_in_the_year(1500)))




    my_graph.built_age_gross_plot()

    my_graph.built_hub_plot()

    print(my_graph.get_actor_age("Ethan Hawke"))
    print(my_graph.find_hub_actor(1))



    test = json.load(open('data.json',encoding='utf-8'))
    my_graph.actors_dict=test[0]
    my_graph.movies_dict=test[1]
    my_graph.built_graph_visualization()
示例#15
0
def cross_validation(drug_drug_matrix, CV_num):
    # 3-folds or 5-folds cross validation
    results = []
    link_number = 0
    link_position = []
    nonLinksPosition = []

    for i in range(0, len(drug_drug_matrix)):
        for j in range(i + 1, len(drug_drug_matrix)):
            if drug_drug_matrix[i, j] == 1:
                link_number = link_number + 1
                link_position.append([i, j])
            elif drug_drug_matrix[i, j] == 0 and np.sum(
                    drug_drug_matrix[i, :], axis=0) > 0 and np.sum(
                        drug_drug_matrix[:, j], axis=0) > 0:
                nonLinksPosition.append([i, j])

    link_position = np.array(link_position)
    print("link_position:" + str(len(link_position)))
    nonLinksPosition = np.array(nonLinksPosition)
    print("nonLinksPosition:" + str(len(nonLinksPosition)))

    index = np.arange(0, len(link_position))
    random.shuffle(index)

    fold_num = len(link_position) // CV_num
    print(fold_num)

    for CV in range(0, CV_num):
        print('*********round:' + str(CV) + "**********\n")
        starttime = datetime.datetime.now()

        #  Build the drug-drug interaction network
        g = graph.Graph()
        g.read_edgelist('../data/dataset/drug_drug.txt')
        print(g.G.number_of_edges())

        test_index = index[(CV * fold_num):((CV + 1) * fold_num)]
        train_index = np.setdiff1d(index, test_index)

        test_index.sort()
        train_index.sort()
        print(len(test_index) + len(train_index))

        testPosition = np.array(link_position)[test_index]
        print(testPosition)
        trainPosition = np.array(link_position)[train_index]
        print(trainPosition)
        print("testPosition:" + str(len(testPosition)))
        print("trainPosition:" + str(len(trainPosition)))

        # Remove the test_links in the network
        for i in range(0, len(testPosition)):
            if drug_drug_matrix[testPosition[i, 0]][testPosition[i, 1]] == 1:
                g.G.remove_edge(str(testPosition[i, 0] + 1),
                                str(testPosition[i, 1] + 1))
        print(g.G.number_of_edges())

        # Obtain representation vectors by SDNE
        print("Test Begin")
        model = sdne.SDNE(
            g,
            [1000, 128],
        )
        print("Test End")

        data = pd.DataFrame(model.vectors).T
        data.to_csv('../data/embeddings/d_embeddings.csv', header=None)

        model_s = loadmodel('../data/embeddings/s_embeddings.csv')
        model_t = loadmodel('../data/embeddings/t_embeddings.csv')
        model_e = loadmodel('../data/embeddings/e_embeddings.csv')
        model_p = loadmodel('../data/embeddings/p_embeddings.csv')
        I1 = []
        with open('../data/embeddings/d_embeddings.csv',
                  "rt",
                  encoding='utf-8') as csvfile1:
            reader = csv.reader(csvfile1)
            for i in reader:
                I1.append(i[0])
        I1.sort()

        # Concatenate of representation vectors generated by five drug feature networks
        E = np.zeros((841, 640), float)
        for i in I1:
            E[int(i) - 1][0:128] = model_s[int(i) - 1]
            E[int(i) - 1][128:256] = model_t[int(i) - 1]
            E[int(i) - 1][256:384] = model_e[int(i) - 1]
            E[int(i) - 1][384:512] = model_p[int(i) - 1]
            E[int(i) - 1][512:640] = model.vectors[str(i)]

        # Training set
        X_train1 = []
        X_train2 = []
        Y_train = []
        trainPosition = np.concatenate(
            (np.array(trainPosition), nonLinksPosition), axis=0)

        for i in range(0, len(trainPosition)):
            X_train1.append(E[(trainPosition[i, 0])])
            X_train2.append(E[(trainPosition[i, 1])])
            Y_train.append(drug_drug_matrix[trainPosition[i, 0],
                                            trainPosition[i, 1]])

        X_train1 = np.array(X_train1)
        X_train2 = np.array(X_train2)
        Y_train = np.array(Y_train)
        Y_train = to_categorical(Y_train, 2)

        dnn = DNN()
        dnn.fit([X_train1, X_train2],
                Y_train,
                batch_size=128,
                epochs=150,
                shuffle=True,
                verbose=1)

        # Test set
        X_test1 = []
        X_test2 = []
        Y_test = []
        testPosition = np.concatenate(
            (np.array(testPosition), nonLinksPosition), axis=0)

        for i in range(0, len(testPosition)):
            X_test1.append(E[(testPosition[i, 0])])
            X_test2.append(E[(testPosition[i, 1])])
            Y_test.append(drug_drug_matrix[testPosition[i, 0],
                                           testPosition[i, 1]])

        X_test1 = np.array(X_test1)
        X_test2 = np.array(X_test2)
        y_pred_label = dnn.predict([X_test1, X_test2])

        y_pred_label = np.argmax(y_pred_label, axis=1)
        y_pred_label = np.array(y_pred_label).tolist()

        results.append(calculate_metric_score(Y_test, y_pred_label))

        endtime = datetime.datetime.now()
        print(endtime - starttime)
    return results
 def __init__(self):
     self.graf = graph.Graph()
     self.sets = SeparableSets()
示例#17
0
def random_partition_graph(groups, p_in, p_out, seed=None):
    """
    Return the random partition graph with a partition of sizes.

    A partition graph is a graph of communities with sizes defined by
    s in groups. Nodes in the same group are connected with probability
    p_in and nodes of different groups are connected with probability
    p_out.

    :param groups: list of ints, [3, 4, 1] defines sizes of groups
    :param p_in: intra-group connect possibility
    :param p_out: inter-group connect possibility
    :param seed: random seed
    :return: a random graph with partition

    Notes
    -----
    The partition is store as a graph attribute 'partition'.

    References
    ----------
    .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports
       Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612
    """

    if p_in > 1 or p_in < 0:
        raise errorhandler.ErrorHandler("p_in must be in [0,1]")

    if p_out > 1 or p_out < 0:
        raise errorhandler.ErrorHandler("p_out must be in [0,1]")

    size = sum(groups)
    g = graph.Graph(size, is_partition=True)

    next_group = {}
    start = 0
    group_index = 0
    for n in groups:  # connect nodes inside a group
        edges = ((u + start, v + start)
                 for u, v in fast_random_graph(n, p_in).edges)
        g.add_edges(edges)
        g.partition.append(set(range(start, start + n)))
        next_group.update(dict.fromkeys(range(start, start + n), start + n))
        group_index += 1
        start += n

    # connect nodes between groups
    if p_out == 0:
        return g
    if p_out == 1:
        for n in next_group:
            targets = range(next_group[n], len(g))
            g.add_edges(zip([n] * len(targets), targets))
        return g

    # using method similar to fast_random_graph
    lp = math.log(1.0 - p_out)
    n = len(g)

    for u in range(n - 1):
        v = next_group[u]
        while v < n:
            lr = math.log(1.0 - random.random())
            v += int(lr / lp)
            if v < n:
                g.add_edge(u, v)
                v += 1

    return g
示例#18
0
import numpy as np
import graph as G
import pdb

g = G.Graph(500)
for i in range(1, 501):
    g.addVertex(i)

file = open('clustering.txt', 'r')
i = -1
for line in file:
    if i == -1:
        i += 1
        continue
    line.rstrip('\n')
    [n1, n2, length] = line.split(' ')
    g.addEdge(int(n1), int(n2), int(length))
    # use g.edges to get the custom_arr of edges
    i += 1
sortedEdge = g.edges[0:g.edges.size]
sortedEdge = sorted(sortedEdge, key=lambda x: (x.length, x.end1, x.end2))
# for i in range(0, 100):
#    print(sortedEdge[i].length)
# in sortedEdge, edges are in length ascending order


def clustering(sortedEdge, k=4):
    ufs = UFS()
    spacedict = {}
    for e in sortedEdge:
示例#19
0
    The method used below is k-paths matrix exponentiation -- if A is the
    adjacency matrix for a graph with entry (i,j) = 1 denoting a directed
    edge from node i to node j, then entry (i,j) in A^k denotes the number
    of length k paths from i to j in the graph.

    If for every matrix power A^k for 1 <= k <= n, each and every diagonal
    entry in A^k is 0, then there are no paths of length 1 <= k <= n that 
    start and end at the same node (hence, no cycles). 

    A cycle that doesn't use repeated edges can have a maximum length of n 
    assuming there are no multi edges in our graph; hence our check is sufficient.
    """
    power_matrix = [[1 if i == j else 0 for j in range(graph.size)]
                    for i in range(graph.size)]
    for exp in range(graph.size):
        power_matrix = matrix_multiply(power_matrix, graph.adjacency_matrix)
        for i in range(graph.size):
            if power_matrix[i][i] != 0:
                return "Cycle exists, and node " + str(i) + " is in the cycle."
    return "No cycles."


if __name__ == "__main__":
    adj1 = [[0, 1, 1, 1], [0, 0, 1, 1], [0, 0, 0, 1], [0, 0, 0, 0]]
    adj2 = [[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1], [0, 1, 0, 0]]
    adj3 = [[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]
    g1, g2, g3 = graph.Graph(adj1), graph.Graph(adj2), graph.Graph(adj3)
    print(has_cycle(g1))
    print(has_cycle(g2))
    print(has_cycle(g3))
import graph

node_numbers = {
    'John': 0,
    'Sally': 1,
    'George': 2,
    'Phil': 3,
    'Rose': 4,
    'Alice': 5
}

G = graph.Graph(len(node_numbers))
for node in node_numbers:
    G.set_node_data(node_numbers[node], node)

G.add_edge(node_numbers['John'], node_numbers['Sally'])
G.add_edge(node_numbers['John'], node_numbers['George'])
G.add_edge(node_numbers['John'], node_numbers['Rose'])
G.add_edge(node_numbers['George'], node_numbers['Sally'])
G.add_edge(node_numbers['Phil'], node_numbers['Sally'])
G.add_edge(node_numbers['Rose'], node_numbers['Alice'])

G.print_graph()

print('Breadth First Traversal:')
G.breadth_first_traverse()
    setLayoutDebugMode(True)

#fml
if args.fix_window_size:
    size = [1920, 1080]
else:
    size = None

App = window.ApplicationHandle(size, loop=loop)
App.setDisplayFlags(pygame.NOFRAME)
App.setDisplayFlags(window.SDLFlags.CENTER_WINDOW, "1")
App.setDisplayCaption("Overwatch Visual Scripting Editor - Untitled")

root = App.getRoot()

GRAPH = graph.Graph()

windowBar = ui.WindowBar(App.size)
root.addObject(windowBar)
menuBar = ui.MenuBar(App.size)
root.addObject(menuBar)
graphViewer = ui.GraphViewer(App.size, GRAPH)
root.addObject(graphViewer)
blockDrawer = ui.BlockDrawer(App.size)
root.addObject(blockDrawer)
dnd = ui.DnDLayer(App.size)
root.addObject(dnd)


async def handleUserEvent(event):
示例#22
0
 def __state(self, thread_id):
     while len(self.__states) <= thread_id:
         self.__states.append(graph.Graph())
     return self.__states[thread_id]
示例#23
0
    def createFineGrainedSeccompProfile(self, tempOutputFolder, resultsFolder):
        self.logger.debug("tempOutputFolder: %s", tempOutputFolder)

        allSyscalls = set()

        muslSyscallList = list()
        glibcSyscallList = list()

        i = 0
        while i < 400:
            muslSyscallList.append("syscall(" + str(i) + ")")
            glibcSyscallList.append("syscall(" + str(i) + ")")
            glibcSyscallList.append("syscall ( " + str(i) + " )")
            glibcSyscallList.append("syscall( " + str(i) + " )")
            i += 1

        glibcGraph = graph.Graph(self.logger)
        glibcGraph.createGraphFromInput(self.glibcCfgpath, ":")
        glibcWrapperListTemp = []
        if (self.strictMode):
            for func in self.glibcFuncList:
                glibcWrapperListTemp.extend(
                    glibcGraph.getSyscallFromStartNode(func))
        else:
            i = 0
            while i < 400:
                glibcWrapperListTemp.append(i)
                i += 1
        glibcWrapperList = set(glibcWrapperListTemp)
        muslGraph = graph.Graph(self.logger)
        muslGraph.createGraphFromInput(self.muslCfgpath, "->")
        muslWrapperListTemp = []
        if (self.strictMode):
            for func in self.muslFuncList:
                muslWrapperListTemp.extend(
                    muslGraph.getSyscallFromStartNode(func))
        else:
            i = 0
            while i < 400:
                muslWrapperListTemp.append(i)
                i += 1
        muslWrapperList = set(muslWrapperListTemp)

        #        self.logger.debug("glibcWrapperList: %s", str(glibcWrapperList))
        #        self.logger.debug("muslWrapperList: %s", str(muslWrapperList))

        #Go through extra CFGs such as libaio to extract lib->syscall mapping
        for fileName in os.listdir(self.cfgFolderPath):
            self.logger.debug("Adding cfg: %s", fileName)
            glibcGraph.createGraphFromInput(
                self.cfgFolderPath + "/" + fileName, "->")
            muslGraph.createGraphFromInput(self.cfgFolderPath + "/" + fileName,
                                           "->")

        exceptList = [
            "access", "arch_prctl", "brk", "close", "execve", "exit_group",
            "fcntl", "fstat", "geteuid", "lseek", "mmap", "mprotect", "munmap",
            "openat", "prlimit64", "read", "rt_sigaction", "rt_sigprocmask",
            "set_robust_list", "set_tid_address", "stat", "statfs", "write",
            "setns", "capget", "capset", "chdir", "fchown", "futex",
            "getdents64", "getpid", "getppid", "lstat", "openat", "prctl",
            "setgid", "setgroups", "setuid", "stat", "io_setup", "getdents",
            "clone", "readlinkat", "newfstatat", "getrandom", "sigaltstack",
            "getresgid", "getresuid", "setresgid", "setresuid", "alarm",
            "getsid", "getpgrp", "epoll_pwait", "vfork"
        ]

        javaExceptList = [
            "open", "getcwd", "openat", "close", "fopen", "fclose", "link",
            "unlink", "unlinkat", "mknod", "rename", "renameat", "mkdir",
            "rmdir", "readlink", "realpath", "symlink", "stat", "lstat",
            "fstat", "fstatat", "chown", "lchown", "fchown", "chmod", "fchmod",
            "utimes", "futimes", "lutimes", "readdir", "read", "write",
            "access", "getpwuid", "getgrgid", "statvfs", "clock_getres",
            "get_mempolicy", "gettid", "getcpu", "fallocate", "memfd_create",
            "fstatat64", "newfstatat"
        ]

        libsWithCfg = set()
        libsInLibc = set()
        functionStarts = set()
        for fileName in os.listdir(self.cfgFolderPath):
            libsWithCfg.add(fileName)

        libsInLibc.add("libcrypt.callgraph.out")
        libsInLibc.add("libdl.callgraph.out")
        libsInLibc.add("libnsl.callgraph.out")
        libsInLibc.add("libnss_compat.callgraph.out")
        libsInLibc.add("libnss_files.callgraph.out")
        libsInLibc.add("libnss_nis.callgraph.out")
        libsInLibc.add("libpthread.callgraph.out")
        libsInLibc.add("libm.callgraph.out")
        libsInLibc.add("libresolv.callgraph.out")
        libsInLibc.add("librt.callgraph.out")

        #iterate over ELF files
        #IF library which has CFG add to graph
        #ELIF binary or library without CFG add to starting nodes
        cfgAvailable = False
        for fileName in os.listdir(tempOutputFolder):
            self.logger.debug("fileName: %s", fileName)
            if (fileName.startswith("lib") and fileName != "libs.out"):
                cfgAvailable = True
                tmpFileName = re.sub("-.*so", ".so", fileName)
                tmpFileName = tmpFileName[:tmpFileName.index(".so")]
                tmpFileName = tmpFileName + ".callgraph.out"
                self.logger.debug("tmpFileName: %s", tmpFileName)
                if (tmpFileName in libsWithCfg):
                    glibcGraph.createGraphFromInput(
                        self.cfgFolderPath + "/" + tmpFileName, "->")
                elif (tmpFileName in libsInLibc):
                    cfgAvailable = True
                else:
                    cfgAvailable = False
            if (not fileName.startswith("lib") or not cfgAvailable):
                self.logger.info("Adding function starts for %s", fileName)
                functionList = util.extractImportedFunctions(
                    tempOutputFolder + "/" + fileName, self.logger)
                if (not functionList):
                    self.logger.warning(
                        "Function extraction for file: %s failed!", fileName)
                functionStarts.update(set(functionList))

        tmpSet = set()
        allSyscalls = set()
        for function in functionStarts:
            leaves = glibcGraph.getLeavesFromStartNode(function,
                                                       glibcSyscallList,
                                                       list())
            tmpSet = tmpSet.union(leaves)
        syscallList = list()
        for syscallStr in tmpSet:
            syscallStr = syscallStr.replace("syscall( ", "syscall(")
            syscallStr = syscallStr.replace("syscall ( ", "syscall(")
            syscallStr = syscallStr.replace(" )", ")")
            syscallNum = int(syscallStr[8:-1])
            allSyscalls.add(syscallNum)

        syscallMapper = syscall.Syscall(self.logger)
        syscallMap = syscallMapper.createMap(self.maptype)

        blackList = set()
        i = 0
        while i < 400:
            if (i not in allSyscalls and syscallMap.get(i, None)
                    and syscallMap[i] not in exceptList):
                blackList.add(syscallMap[i])
            i += 1

        self.logger.info("Results for %s:///////////////////////////////////",
                         self.name)
        self.logger.info("%s: len(blacklist): %d", self.name, len(blackList))
        self.logger.info("%s: blacklist: %s", self.name, str(blackList))
        self.logger.info(
            "//////////////////////////////////////////////////////////////////"
        )
示例#24
0
        rootLogger.info("(exeImports-libImports): %s",
                        str(exeImports - libraryImports))

        #Map to system calls
        libSyscalls = set()
        exeSyscalls = set()
        glibcSyscallList = list()

        i = 0
        while i < 400:
            glibcSyscallList.append("syscall(" + str(i) + ")")
            glibcSyscallList.append("syscall ( " + str(i) + " )")
            glibcSyscallList.append("syscall( " + str(i) + " )")
            i += 1

        glibcGraph = graph.Graph(rootLogger)
        glibcGraph.createGraphFromInput(options.glibccfgpath, ":")
        glibcWrapperListTemp = []
        i = 0
        while i < 400:
            glibcWrapperListTemp.append(i)
            i += 1
        glibcWrapperList = set(glibcWrapperListTemp)

        tmpSet = set()
        for function in libraryImports:
            leaves = glibcGraph.getLeavesFromStartNode(function,
                                                       glibcSyscallList,
                                                       list())
            tmpSet = tmpSet.union(leaves)
示例#25
0
import graph

g = graph.Graph()
for i in range(6):
    g.addVertex(i)

print(g.vertList)
#g.collectEdges()
#g.showEdge()

g1 = graph.Graph()
g1.buildCycle(5)
print(g1.vertList)
print(g1.vertList[1].id)
print(g1.vertList[1].connectedTo)
print('get connections')
print(g1.vertList[1].getConnections())
print(g1.vertList[1].getConnectionsID())
g1.collectEdges()
print('showEdge')
g1.showEdge()
elist = g1.getEdgeList()
print('sort edges')
#print(elist)
elist.sort()
print(elist)


def showEdge(g):
    i = 0
    for v in g:
示例#26
0
    DDR_loc_2d_x = DDR_loc_2d_x, 
    DDR_loc_2d_y = DDR_loc_2d_y, 
    DDR_enable = DDR_enable,
    max_usage_ratio_2d = max_usage_ratio_2d,
    board_name = 'u280',
    coorinate_expansion_ratio = 2,
    max_width_threshold = 10000,
    NUM_PER_SLR_HORIZONTAL = 4,
    horizontal_cross_weight = 0.7,
    target_dir = None,
    relay_station_count = relay_station_count,
    relay_station_template = relay_station_template,
    constraint_edge = constraint_edge,
    constraint_marked_edge = constraint_marked_edge)

  g = graph.Graph(formator)


  ################

  if (os.path.isdir(target_dir)):
    #target_dir = f'{target_dir}_dup'
    subprocess.run(['rm', '-rf', f'{target_dir}/'])

  subprocess.run(['mkdir', f'{target_dir}/'])
  subprocess.run(['cp', '-r', tlp_path, f'{target_dir}/'])
  subprocess.run(['cp', os.path.realpath(__file__), f'{target_dir}/archived_source.txt'])
  subprocess.run(['chmod', '+w', '-R', f'{target_dir}'])
  subprocess.run(['mv', 'constraint.tcl', target_dir])
  subprocess.run(['mv', f'{top_name}_{top_name}.v', f'{target_dir}/tlpc_result/hdl'])
  subprocess.run(['rm', f'{target_dir}/tlpc_result/hdl/relay_station.v'])
示例#27
0
        
        if mode=='init':
            cutoff_val = 1.0
        if mode=='pbest':
            cutoff_val = self.alpha
        if mode=='gbest':
            cutoff_val = self.beta
            
        for i in range(len(a)-1):
            if a[i] != b[i]:
                swap_ind = np.where(b==a[i])[0][0]
                b[i], b[swap_ind] = b[swap_ind], b[i]
                if random() < cutoff_val:
                    swap_seq.append((i, swap_ind))
            
        return swap_seq
       

doc = "../Data/ulysses16.xml"

vertices, edges = helper.read_tsp(doc)
graph = graph.Graph(vertices, edges)

pso = PSO(graph, 30, 50, 0.5, 0.5)


pso.iterate()



示例#28
0
文件: line.py 项目: roving99/robert
            0,
            0,
        ), (0, 1)),
        Line((
            0,
            0,
        ), (1, 0)),
    ]

    pygame.init()
    clock = pygame.time.Clock()

    screen = pygame.display.set_mode(size)
    pygame.display.set_caption('Line class')
    displayZoom = 10
    myGraph = graph.Graph((300, 300), origin=(150, 150), scale=displayZoom)

    done = False

    for line in lines:
        print line.asText(), ' length:', line.length(
        ), ' grad:', line.gradient(), ' angle:', line.angle()

    for line in lines:
        for line2 in lines:
            print line.asText(), line2.asText(), 'parr:', line.parallelTo(
                line2), ' perp:', line.perpendicularTo(
                    line2), 'pdist:', line.parallelDistance(
                        line2), 'int:', line.intersect(line2)

    print lines[0].distance(lines[1])
示例#29
0
import graph
import rtls
import imitation
import threading
import time
import sys

_in = sys.stdin
_out = sys.stdout
sys.stdin = open('test.txt', 'r')

if sys.stdin != _in:
	sys.stdout = open('trash.txt', 'w+')

g = graph.Graph()

if sys.stdin != _in:
	sys.stdout = _out

rtls.Receiver.maxLenghtOfSignal = 200

r = rtls.Receiver(g, g.listOfVertices[0])
r2 = rtls.Receiver(g, g.listOfVertices[2])
r3 = rtls.Receiver(g, g.listOfVertices[9])
tr = rtls.Transmitter((0, g.listOfVertices[0], g.listOfEdges[0]), g)

mainer = imitation.Miner(tr)

thr = threading.Thread(target=mainer.start_walking)
thr.start()
示例#30
0
import graph
matrix=graph.Graph()#making graph object
matrix.add_edge('Aaron','Titus Andronicus',1)#making graph with edge weight 1
matrix.add_edge("Abbott of Westminster","Richard ii",1)
matrix.add_edge("Lord Abergavenny","Henry viii",1)
matrix.add_edge("Abhorson","Measure of Measure",1)
matrix.add_edge("Abraham Slender","The Merry wives of Windsor",1)
matrix.add_edge("Abraham","Richard ii",1)
matrix.add_edge("Archilles","Troilus and Cressida",1)
matrix.add_edge("Adam","As you like it",1)
matrix.add_edge("Adrian","Coriolanus",1)
matrix.add_edge("Adriana","The comedy of Errors",1)
matrix.add_edge("Aediles","Coriolanus",1)
matrix.add_edge("Aemelia","The comedy of Errors",1)
matrix.add_edge("Aemilius","Titus Andronicus",1)
matrix.add_edge("Aeneas","Troilus and Cressida",1)
matrix.add_edge("Agamemnon","Troilus and Cressida",1)
matrix.add_edge("Agrippa","Antony and Cleopatra",1)
matrix.add_edge("Agamemnon","Troilus and Cressida",1)
matrix.add_edge("Ajax","Troilus and Cressida",1)
matrix.add_edge("Alarbus","Titus Andronicus",1)
matrix.add_edge("The Mayor of St. Albans","Henry vi",1)
matrix.add_edge("Alcibiades","Timon of Athens",1)
matrix.add_edge("Alexander","Troilus and Cressida",1)
matrix.add_edge("Alexas","Antony and Cleopatra",1)
matrix.add_edge("Alonso","The tempest",1)
matrix.add_edge("Amiens","As you like it",1)
matrix.add_edge("Andromache","Troilus and Cressida",1)
matrix.add_edge("Angelica","Romeo and Juliet",1)
matrix.add_edge("Angelo","Measure for Measure",1)
matrix.add_edge("Angus","Macbeth",1)