示例#1
0
文件: dialog.py 项目: petres/atomgrid
    def _openFile(self):
        fileName = QtGui.QFileDialog.getOpenFileName(self, "OpenImage", "src", "Bitmaps (*.bmp)")

        if fileName == "":
            h.warn("No file selected.")
            return



        h.log("Loaded!")
        #self.pixmapItem = QtGui.QPixmap(fileName)
        #item = QtGui.QGraphicsPixmapItem(self.pixmapItem)
        #self.scene.addItem(item)

        item = QtGui.QStandardItem(os.path.basename(str(fileName)))
        item.imageFileName = str(fileName)
        orgImg = Image.open(str(fileName), mode='r').convert()
        item.image = MImage.pil_to_array(orgImg.convert('L'))
        item.pixmap = QtGui.QPixmap(fileName)

        #item.setCheckable(True)

        self.model.appendRow(item)
        self.ui.imageListView.setModel(self.model)


        self._checkButtons()
示例#2
0
def preprocessor_constants(lines):
    """
    Replaces constants usage with the defined vaule.

    Example:

        $const = 5
        MOV [2] $const

    Results in:

        MOV [2] 5

    :type lines: list[Line]
    """
    global automem_counter
    automem_counter = count()
    constants = {}

    for lineno, line in enumerate(lines):
        tokens = []
        iterator = iter(line.contents.split())
        is_assignment_line = False

        # Process all tokens in this line
        for token, next_token in neighborhood(iterator):
            if token[0] == '$':
                const_name = token[1:]

                if next_token == '=':
                    # Found assignment, store the associated value
                    is_assignment_line = True
                    value = next(iterator)

                    if const_name in constants:
                        warn('Redefined ${}'.format(const_name),
                             RedefinitionWarning, line)

                    if value == '[_]':
                        # Process auto increment memory
                        value = automem(line)

                    constants[const_name] = value

                else:
                    # Found usage of constant, replace with stored value
                    try:
                        tokens.append(constants[const_name])
                    except KeyError:
                        fatal_error('No such constant: ${}'.format(const_name),
                                    NoSuchConstantError, line)

            else:
                # Uninteresting token
                tokens.append(token)

        # Skip assignment lines
        if not is_assignment_line:
            debug('Constants:', constants)
            yield set_contents(line, ' '.join(tokens))
示例#3
0
文件: dialog.py 项目: petres/atomgrid
    def _openFile(self):
        fileName = QtGui.QFileDialog.getOpenFileName(self, "OpenImage", "src",
                                                     "Bitmaps (*.bmp)")

        if fileName == "":
            h.warn("No file selected.")
            return

        h.log("Loaded!")
        #self.pixmapItem = QtGui.QPixmap(fileName)
        #item = QtGui.QGraphicsPixmapItem(self.pixmapItem)
        #self.scene.addItem(item)

        item = QtGui.QStandardItem(os.path.basename(str(fileName)))
        item.imageFileName = str(fileName)
        orgImg = Image.open(str(fileName), mode='r').convert()
        item.image = MImage.pil_to_array(orgImg.convert('L'))
        item.pixmap = QtGui.QPixmap(fileName)

        #item.setCheckable(True)

        self.model.appendRow(item)
        self.ui.imageListView.setModel(self.model)

        self._checkButtons()
示例#4
0
    def __init__(self, graph=None):
        """
        We initialize models with the graph
        """

        print 'Initializing Model(%s)' % self.get_name()

        self.send_cost = {}
        self.recv_cost = {}

        self.send_history = {}
        self.mm = None # MultiMessage benchmark - see NetosMachine
        self.enable_mm = False

        assert graph == None

        # Topology parser
        # --------------------------------------------------
        try:
            self.machine_topology = topology_parser.parse_machine_db(self.get_name(), 'machinedb/')
            print 'Parsing machine topology was successful, machine is %s' % \
                (topology_parser.generate_short_name(self.machine_topology))
        except:
            helpers.warn('Warning: topology parser did not find machine data')
            raise
            exit (0)


        # Pairwise
        # --------------------------------------------------

        # Parse pairwise send and receive costs. We need this to
        # build the graph with the pairwise measurements.
        self._parse_receive_result_file()
        self._parse_send_result_file()


        # Multimessage
        # --------------------------------------------------
        mm_fname = '%s/%s/multimessage.gz' % \
                   (config.MACHINE_DATABASE, self.get_name())

        self.mm = None

        print 'Reading multimessage data: ', mm_fname
        try:
            f = gzip.open(mm_fname, 'r')
            self.mm = MultiMessage(f, self)
            f.close()
        except IOError:
            print 'No multimessage data for this machine'
        except:
            helpers.warn('Unable to read multimessage data')

        # Build graph and reset
        # --------------------------------------------------
        self.graph = self._build_graph()
        self.reset()
示例#5
0
    def _initStataExport(self):
        #--check if in each Tab at least one is selected---
        checkLCD = self.ui.lcdNumber.value()
        if checkLCD == 0:
            f.warn("WARNING: For an Export procedure at least one item in each Tab need to be selected!!")
            return

        #---write box selection in CLass Array ---
        self.options["selection"] = self.getSelectedCats()

        #---show export option dialog---
        dialog = StataExportDialog(self)
        dialog.init(self.metaData, dict(self.options))
        dialog.exec_()
示例#6
0
    def _loadPreset(self):
        fileName = QtGui.QFileDialog.getOpenFileName(self, "Run Preset", Settings.presetPath, "Presets (*.preset)")

        if fileName == "":
            f.warn("No file selected.")
            return

        options = f.getPresetFromFile(fileName)

        self.worker = f.LoadDbWorker((options["name"], options["name"]), baseDialog=self, parent=self)
        self.worker.startWork()

        #self.worker.finishedTrigger.connect(lambda: self.updateTab(self.worker.metaData))
        #self.worker.finishedTrigger.connect(lambda: self.setSelectedCats(options))
        self.updateTab(self.worker.metaData)
        self.setSelectedCats(options)

        if options['fileType'] == 'EXCEL':
            self._initExcelExport()
        elif options['fileType'] == 'STATA':
            self._initStataExport()
示例#7
0
    def get_factor(self, sender, c_batch):
        """Determine the correction factor for the given send batch <c_batch>
        starting from core sender

        """

        c_local = 0
        c_remote = 0

        for c in c_batch:
            if self.machine.on_same_numa_node(sender, c):
                c_local += 1
            else:
                c_remote += 1
                if c_local > 0:
                    helpers.warn('mm: adding remote communication AFTER local communication')

        assert c_local>0 or c_remote>0

        while c_local >= self.cores_local:
            helpers.warn('mm matrix local exceeded %d -> %d' % (c_local, self.cores_local))
            c_local -= 1

        while c_remote >= self.cores_remote:
            helpers.warn('mm matrix remote exceeded %d -> %d' % (c_remote, self.cores_remote))
            c_remote -= 1

        return self.matrix[c_remote][c_local]
示例#8
0
from discord.ext import commands
import config
import helpers

bot = commands.AutoShardedBot(command_prefix=config.PREFIX,
                              case_insensitive=True,
                              description=config.DESCRIPTION,
                              owner_ids=config.OWNERS)

for extension in config.extensions:

    try:
        bot.load_extension(extension)
        helpers.info(f'Loaded {extension}')
    except:
        helpers.warn(f'Failed to load {extension}')

bot.run(config.TOKEN)
示例#9
0
文件: utilities.py 项目: Aresiel/orm
 async def test_log_levels(self, ctx):
     helpers.info("Test: Level INFO")
     helpers.warn("Test: Level WARN")
     helpers.error("Test: Level ERROR")
     helpers.critical("Test: Level CRITICAL")
示例#10
0
def simulate(args):

    machine = args.machine
    config.args.machine = args.machine
    config.args.group = args.group
    config.args.multicast = args.multicast
    config.args.hybrid = args.hybrid
    config.args.hybrid_cluster = args.hybrid_cluster

    print "machine: %s, topology: %s, hybrid: %s" % \
        (machine, args.overlay, args.hybrid)

    m_class = config.arg_machine(machine)
    m = m_class()
    assert m != None
    gr = m.get_graph()

    if args.multicast:
        print "Building a multicast"

    # --------------------------------------------------
    # Switch main action

    # XXX Cleanup required
    if True:

        # Generate model headers
        helpers.output_quroum_start(m, len(args.overlay))
        all_last_nodes = []
        all_leaf_nodes = []
        model_descriptions = []
        num_models = 0
        # Generate representation of each topology
        for _overlay in args.overlay:

            if config.args.hybrid :
                _overlay = _overlay + "-hybrid"

            # ------------------------------
            # Hybrid
            hyb_cluster = None
            shm_writers = None

            hyb_leaf_nodes = None

            if config.args.hybrid:

                print args.hybrid_cluster
                if 'socket' in args.hybrid_cluster:
                    print "Clustering: Sockets"
                    hyb_cluster = m.machine_topology['Package'].get()
                elif 'all' in args.hybrid_cluster:
                    print "Clustering: All cores"
                    hyb_cluster = [range(0, m.machine_topology['numcpus'])]
                elif 'numa' in args.hybrid_cluster:
                    print "Clustering: NUMA nodes"
                    if len(args.hybrid_cluster) > 4:
                        hyb_cluster = m.machine_topology['NUMA'].get()
                        size = float(args.hybrid_cluster[4:])

                        if size > 1:
                            # Merge NUMA nodes
                            if ((size % 2) != 0):
                                raise Exception(('Only support powers of two for'
                                                 ' numa node merge'))
                            if (size > (len(hyb_cluster)/2)):
                                raise Exception(('Only support values less or equal to half'
                                                 'the numa nodes'))
                            new_cluster = []
                            for i in range(0,len(hyb_cluster), int(size)):
                                tmp = []
                                for j in range(0, int(size)):
                                    tmp += hyb_cluster[i+j]

                                new_cluster.append(tmp)
                            hyb_cluster = new_cluster
                        else:
                            # Split NUMA nodes
                            print hyb_cluster
                            new_cluster = []
                            split = int(1/size)
                            if split > (len(hyb_cluster[0])/2):
                                raise Exception(('Single core in clusters not allowed'))
                            if (len(hyb_cluster[0]) % split) != 0:
                                raise Exception(('Only support splitting numa nodes if'
                                                 ' the numa size is divisible by the number'
                                                 ' of splits'))
                            for i in range(0, len(hyb_cluster)):
                                seg_len = int(len(hyb_cluster[0])/split)
                                for j in range(1, split+1):
                                    tmp1 = hyb_cluster[i][(j-1)*seg_len:j*seg_len]
                                    new_cluster.append(tmp1)

                            hyb_cluster = new_cluster
                            print hyb_cluster
                    else:
                        hyb_cluster = m.machine_topology['NUMA'].get()
                else:
                    print "Warning: Unknown cluster argument for hybrid, using default option"
                    print "Clustering: NUMA nodes"
                    hyb_cluster = m.machine_topology['NUMA'].get()

                # Simulate a multicast tree
                args.multicast = True

                shm_writers = [ min(x) for x in hyb_cluster ]
                hyb_leaf_nodes = [ max(x) for x in hyb_cluster ]

                args.group = map(int, shm_writers)
                config.args.group = map(int, shm_writers)
                #args.group = ','.join(map(str, shm_writers))

            # type(topology) = hybrid.Hybrid | binarytree.BinaryTree -- inherited from overlay.Overlay
            (topo, evs, root, sched, topology) = \
                simulation._simulation_wrapper(_overlay, m, gr, args.multicast)
            hierarchies = topo.get_tree()

            # Dictionary for translating core IDs
            d = helpers.core_index_dict(m.graph.nodes())

            tmp = topology.get_name()
            if hyb_cluster:
                tmp += " (hybrid)"
            model_descriptions.append(tmp)

            tmp_last_node = -1
            receive_order = None
            for (label, ev) in evs:
                if label == 'atomic broadcast':
                    tmp_last_node = ev.last_node
                    receive_order = ev.node_finished_list
                print "Cost %s for tree is: %d (%d), last node is %s" % \
                    (label, ev.time, ev.time_no_ab, ev.last_node)

            assert receive_order != None

            # Output c configuration for quorum program
            helpers.output_quorum_configuration(m, hierarchies, root, sched,
                                                topology, num_models,
                                                shm_clusters=hyb_cluster,
                                                shm_writers=shm_writers)


            if config.args.hybrid:
                # Set ONE reader of the shared memory cluster as last node
                all_leaf_nodes.append(hyb_leaf_nodes)

                all_last_nodes.append(max(hyb_leaf_nodes))

            else:
                # Determine last node for this model
                all_leaf_nodes.append([d[l] for l in topo.get_leaf_nodes(sched)])

                # Determine last node for this model
                all_last_nodes.append(tmp_last_node)

                # Output final graph: we have to do this here, as the
                # final topology for the adaptive tree is not known before
                # simulating it.
                if not config.running_as_server:
                    helpers.draw_final(m, sched, topo)

            num_models += 1

        print all_leaf_nodes

        # Cut down number of leafs
        LEAFS_MAX = 10
        if len(all_leaf_nodes[0])>LEAFS_MAX:
            # Filter last nodes, only taking leaf nodes
            _l = [ x for x in receive_order if x in all_leaf_nodes[0] ]
            assert(len(_l) >= len(all_leaf_nodes[0]))
            all_leaf_nodes[0] = _l[-10:]
            helpers.warn('Cropping leaf nodes to: %s' % ','.join(map(str, all_leaf_nodes[0])))

        helpers.warn('Leaf nodes are: %s - %d' % (','.join(map(str, all_leaf_nodes[0])), len(all_leaf_nodes[0])))


        # Generate footer
        helpers.output_quorum_end(all_last_nodes, all_leaf_nodes, \
                                  model_descriptions)
        return (all_last_nodes, all_leaf_nodes, root)