Exemplo n.º 1
0
    def comparisons(self, cover, compound_length) :
        CP = predicates.CompoundPredicate

        block_index = {}
        for predicate, blocks in viewitems(cover):
            block_index[predicate] = {}
            for block_id, blocks in viewitems(blocks) :
                for id in self._blocks(blocks) :
                    block_index[predicate].setdefault(id, set()).add(block_id)

        compounder = self.Compounder(cover, block_index)
        comparison_count = {}
        simple_predicates = sorted(cover, key=str)

        for i in range(2, compound_length+1) :
            for combo in itertools.combinations(simple_predicates, i) :
                comparison_count[CP(combo)] = sum(self.pairs(ids)
                                                  for ids in 
                                                  viewvalues(compounder(combo)))
        for pred in simple_predicates :
            comparison_count[pred] = sum(self.pairs(ids)
                                         for ids
                                         in viewvalues(cover[pred]))

        return comparison_count    
Exemplo n.º 2
0
    def update_category(self, category, samples_and_values):
        """Update an existing column

        Parameters
        ----------
        category : str
            The category to update
        samples_and_values : dict
            A mapping of {sample_id: value}

        Raises
        ------
        QiitaDBUnknownIDError
            If a sample_id is included in values that is not in the template
        QiitaDBColumnError
            If the column does not exist in the table. This is implicit, and
            can be thrown by the contained Samples.
        ValueError
            If one of the new values cannot be inserted in the DB due to
            different types
        """
        with TRN:
            if not set(self.keys()).issuperset(samples_and_values):
                missing = set(self.keys()) - set(samples_and_values)
                table_name = self._table_name(self._id)
                raise QiitaDBUnknownIDError(missing, table_name)

            for k, v in viewitems(samples_and_values):
                sample = self[k]
                sample.setitem(category, v)

            try:
                TRN.execute()
            except ValueError as e:
                # catching error so we can check if the error is due to
                # different column type or something else

                value_types = set(type_lookup(type(value)) for value in viewvalues(samples_and_values))

                sql = """SELECT udt_name
                         FROM information_schema.columns
                         WHERE column_name = %s
                            AND table_schema = 'qiita'
                            AND (table_name = %s OR table_name = %s)"""
                TRN.add(sql, [category, self._table, self._table_name(self._id)])
                column_type = TRN.execute_fetchlast()

                if any([column_type != vt for vt in value_types]):
                    value_str = ", ".join([str(value) for value in viewvalues(samples_and_values)])
                    value_types_str = ", ".join(value_types)

                    raise ValueError(
                        'The new values being added to column: "%s" are "%s" '
                        '(types: "%s"). However, this column in the DB is of '
                        'type "%s". Please change the values in your updated '
                        "template or reprocess your template." % (category, value_str, value_types_str, column_type)
                    )

                raise e
Exemplo n.º 3
0
    def _checkData(self, data_1, data_2):
        if len(data_1) == 0:
            raise ValueError("Dictionary of records from first dataset is empty.")
        elif len(data_2) == 0:
            raise ValueError("Dictionary of records from second dataset is empty.")

        self.data_model.check(next(iter(viewvalues(data_1))))
        self.data_model.check(next(iter(viewvalues(data_2))))
Exemplo n.º 4
0
Arquivo: cpu.py Projeto: cea-sec/miasm
def factor_one_bit(tree):
    if isinstance(tree, set):
        return tree
    new_keys = defaultdict(lambda: defaultdict(dict))
    if len(tree) == 1:
        return tree
    for k, v in viewitems(tree):
        if k == "mn":
            new_keys[k] = v
            continue
        l, fmask, fbits, fname, flen = k
        if flen is not None or l <= 1:
            new_keys[k] = v
            continue
        cfmask = fmask >> (l - 1)
        nfmask = fmask & ((1 << (l - 1)) - 1)
        cfbits = fbits >> (l - 1)
        nfbits = fbits & ((1 << (l - 1)) - 1)
        ck = 1, cfmask, cfbits, None, flen
        nk = l - 1, nfmask, nfbits, fname, flen
        if nk in new_keys[ck]:
            raise NotImplementedError('not fully functional')
        new_keys[ck][nk] = v
    for k, v in list(viewitems(new_keys)):
        new_keys[k] = factor_one_bit(v)
    # try factor sons
    if len(new_keys) != 1:
        return new_keys
    subtree = next(iter(viewvalues(new_keys)))
    if len(subtree) != 1:
        return new_keys
    if next(iter(subtree)) == 'mn':
        return new_keys

    return new_keys
Exemplo n.º 5
0
    def log(self, input_count, batch_count, additional_values):
        logdict = OrderedDict()
        delta_t = time.time() - self.last_time
        delta_count = input_count - self.last_input_count
        self.last_time = time.time()
        self.last_input_count = input_count

        logdict['time_spent'] = delta_t
        logdict['cumulative_time_spent'] = time.time() - self.start_time
        logdict['input_count'] = delta_count
        logdict['cumulative_input_count'] = input_count
        logdict['cumulative_batch_count'] = batch_count
        if delta_t > 0:
            logdict['inputs_per_sec'] = delta_count / delta_t
        else:
            logdict['inputs_per_sec'] = 0.0

        for k in sorted(viewkeys(additional_values)):
            logdict[k] = additional_values[k]

        # Write the headers if they are not written yet
        if self.headers is None:
            self.headers = list(viewkeys(logdict))
            self.logstr(",".join(self.headers))

        self.logstr(",".join(str(v) for v in viewvalues(logdict)))

        for logger in self.external_loggers:
            try:
                logger.log(logdict)
            except Exception as e:
                logging.warn(
                    "Failed to call ExternalLogger: {}".format(e), e)
Exemplo n.º 6
0
def cb_arm_fix_call(mn, cur_bloc, loc_db, offsets_to_dis, *args, **kwargs):
    """
    for arm:
    MOV        LR, PC
    LDR        PC, [R5, 0x14]
    * is a subcall *

    """
    if len(cur_bloc.lines) < 2:
        return
    l1 = cur_bloc.lines[-1]
    l2 = cur_bloc.lines[-2]
    if l1.name != "LDR":
        return
    if l2.name != "MOV":
        return

    values = viewvalues(mn.pc)
    if not l1.args[0] in values:
        return
    if not l2.args[1] in values:
        return
    loc_key_cst = loc_db.get_or_create_offset_location(l1.offset + 4)
    cur_bloc.add_cst(loc_key_cst, AsmConstraint.c_next)
    offsets_to_dis.add(l1.offset + 4)
Exemplo n.º 7
0
    def aggressive_coalesce_block(self):
        """Try to coalesce phi var with their pre/post variables"""

        ircfg = self.ssa.graph

        # Run coalesce on the post phi parallel copy
        for irblock in viewvalues(ircfg.blocks):
            if not irblock_has_phi(irblock):
                continue
            parallel_copies = {}
            for dst in self.phi_destinations[irblock.loc_key]:
                parallel_copies[dst] = self.phi_new_var[dst]
            self.aggressive_coalesce_parallel_copy(parallel_copies, None)

            # Run coalesce on the pre phi parallel copy

            # Stand for the virtual parallel copies at the end of Phi's block
            # parents
            parent_to_parallel_copies = {}
            for dst in irblock[0]:
                new_var = self.phi_new_var[dst]
                for parent, src in self.phi_parent_sources[dst]:
                    parent_to_parallel_copies.setdefault(parent, {})[new_var] = src

            for parent, parallel_copies in viewitems(parent_to_parallel_copies):
                self.aggressive_coalesce_parallel_copy(parallel_copies, parent)
Exemplo n.º 8
0
    def lib_get_add_func(self, libad, imp_ord_or_name, dst_ad=None):
        if not libad in viewvalues(self.name2off):
            raise ValueError('unknown lib base!', hex(libad))

        # test if not ordinatl
        # if imp_ord_or_name >0x10000:
        #    imp_ord_or_name = vm_get_str(imp_ord_or_name, 0x100)
        #    imp_ord_or_name = imp_ord_or_name[:imp_ord_or_name.find('\x00')]

        #/!\ can have multiple dst ad
        if not imp_ord_or_name in self.lib_imp2dstad[libad]:
            self.lib_imp2dstad[libad][imp_ord_or_name] = set()
        self.lib_imp2dstad[libad][imp_ord_or_name].add(dst_ad)

        if imp_ord_or_name in self.lib_imp2ad[libad]:
            return self.lib_imp2ad[libad][imp_ord_or_name]
        # log.debug('new imp %s %s' % (imp_ord_or_name, dst_ad))
        ad = self.libbase2lastad[libad]
        self.libbase2lastad[libad] += 0x10  # arbitrary
        self.lib_imp2ad[libad][imp_ord_or_name] = ad

        name_inv = dict(
            (value, key) for key, value in viewitems(self.name2off)
        )
        c_name = canon_libname_libfunc(name_inv[libad], imp_ord_or_name)
        self.fad2cname[ad] = c_name
        self.cname2addr[c_name] = ad
        self.fad2info[ad] = libad, imp_ord_or_name
        return ad
Exemplo n.º 9
0
def ShiftActivationDevices(model, activations, shifts):
    '''
    Function to enable simple model-parallellism for data_parallel_model
    models. 'shifts' is a dictionary from_gpu -> to_gpu, and activations is
    a list of activation blobs (wout gpu_x/ prefix -- use GetActivationBlobs()).

    Operators handling these activations are shifted to the gpu declared in
    'shifts'. Also related operators such as gradient operators will be moved.
    Appropriate copy-ops are inserted.

    This allows shifting memory usage from one gpu to another, enabling bigger
    models to be trained.
    '''
    assert set(viewvalues(shifts)).intersection(set(viewkeys(shifts))) == set()
    for from_device, to_device in viewitems(shifts):
        log.info(
            "Shifting {} activations from {} --> {}".
            format(len(activations), from_device, to_device)
        )
        _ShiftActivationDevices(model, activations, from_device, to_device)

    param_init_net, blob_to_device = core.InjectCrossDeviceCopies(model.param_init_net)
    net, _blob_to_device = core.InjectCrossDeviceCopies(model.net, blob_to_device)
    model.param_init_net = param_init_net
    model.net = net
Exemplo n.º 10
0
def generateFeatureGroups(fgiContainer, linkageGroups, matchArr, timeKey,
                          massKey, logMassKey, massScalingFactor):
    """ #TODO: docstring

    :param fgiContainer:
    :param linkageGroups:

    :returns: a list of ids of the newly generated :class:`Fgi`
    """
    #Generate feature groups from the linked features
    newFgiIds = list()
    for linkageGroup in viewvalues(linkageGroups):
        fgiId = fgiContainer._getNextFgiId()
        fgi = fiGroupFromLinkageGroup(matchArr, linkageGroup, fgiId,
                                      timeKey, massKey
                                      )
        fgiContainer.container[fgiId] = fgi
        fgi.metrics = clusterMetrics(matchArr[timeKey][linkageGroup],
                                     matchArr[logMassKey][linkageGroup],
                                     massScalingFactor=massScalingFactor
                                     )
        fgi.rt = fgi.metrics['meanTime']
        fgi.mz = fgi.metrics['meanMass']
        newFgiIds.append(fgiId)
    return newFgiIds
Exemplo n.º 11
0
    def isolate_phi_nodes_block(self):
        """
        Init structures and virtually insert parallel copy before/after each phi
        node
        """
        ircfg = self.ssa.graph
        for irblock in viewvalues(ircfg.blocks):
            if not irblock_has_phi(irblock):
                continue
            for dst, sources in viewitems(irblock[0]):
                assert sources.is_op('Phi')
                new_var = self.create_copy_var(dst)
                self.phi_new_var[dst] = new_var

                var_to_parents = get_phi_sources_parent_block(
                    self.ssa.graph,
                    irblock.loc_key,
                    sources.args
                )

                for src in sources.args:
                    parents = var_to_parents[src]
                    self.new_var_to_srcs_parents.setdefault(new_var, set()).update(parents)
                    for parent in parents:
                        self.phi_parent_sources.setdefault(dst, set()).add((parent, src))

            self.phi_destinations[irblock.loc_key] = set(irblock[0])
Exemplo n.º 12
0
def treat_element():
    "Display an element"
    global graphs, comments, sol_nb, settings, addr, ir_arch, ircfg

    try:
        graph = next(graphs)
    except StopIteration:
        comments = {}
        print("Done: %d solutions" % (sol_nb))
        return

    sol_nb += 1
    print("Get graph number %02d" % sol_nb)
    filename = os.path.join(tempfile.gettempdir(), "solution_0x%08x_%02d.dot" % (addr, sol_nb))
    print("Dump the graph to %s" % filename)
    open(filename, "w").write(graph.graph.dot())

    for node in graph.relevant_nodes:
        try:
            offset = ircfg.blocks[node.loc_key][node.line_nb].instr.offset
        except IndexError:
            print("Unable to highlight %s" % node)
            continue
        comments[offset] = comments.get(offset, []) + [node.element]
        idc.SetColor(offset, idc.CIC_ITEM, settings.color)

    if graph.has_loop:
        print('Graph has dependency loop: symbolic execution is inexact')
    else:
        print("Possible value: %s" % next(iter(viewvalues(graph.emul(ir_arch)))))

    for offset, elements in viewitems(comments):
        idc.MakeComm(offset, ", ".join(map(str, elements)))
Exemplo n.º 13
0
def index(data, offset=0):
    if isIndexed(data, offset):
        return data
    else:
        data = dict(zip(itertools.count(offset),
                        viewvalues(data)))
        return data
Exemplo n.º 14
0
 def test_collect_modules_provenance(self):
     metascript = self.prepare(name=MODULES)
     metascript.deployment._collect_modules_provenance()
     modules = {m.name
                for m in viewvalues(metascript.modules_store.store)}
     self.assertIn("ast", modules)
     self.assertIn("script", modules)
Exemplo n.º 15
0
def modSymbolsFromLabelInfo(labelDescriptor):
    """Returns a set of all modiciation symbols which were used in the
    labelDescriptor

    :param labelDescriptor: :class:`LabelDescriptor` describes the label setup
        of an experiment

    :returns: #TODO: docstring
    """
    modSymbols = set()
    for labelStateEntry in viewvalues(labelDescriptor.labels):
        for labelPositionEntry in viewvalues(labelStateEntry['aminoAcidLabels']):
            for modSymbol in aux.toList(labelPositionEntry):
                if modSymbol != '':
                    modSymbols.add(modSymbol)
    return modSymbols
Exemplo n.º 16
0
    def _blockData(self, data_d):

        blocks = core.TempShelve('blocks')

        if not self.loaded_indices:
            self.blocker.indexAll(data_d)

        block_groups = itertools.groupby(self.blocker(viewitems(data_d)),
                                         lambda x: x[1])

        
        for record_id, block in block_groups:
            record = data_d[record_id]
            block_ids = sorted(block_key for block_key, _ in block)
            while block_ids:
                id = block_ids.pop()
                if id in blocks:
                    blocks[id] += [(record_id, record, set(block_ids))]
                else:
                    blocks[id] = [(record_id, record, set(block_ids))]

        if not self.loaded_indices:
            self.blocker.resetIndices()

        for block in viewvalues(blocks):
            if len(block) > 1:
                yield block

        blocks.close()
Exemplo n.º 17
0
 def indexAll(self, data_d) :
     for field in self.index_fields :
         unique_fields = {record[field]
                          for record 
                          in viewvalues(data_d)
                          if record[field]}
         self.index(unique_fields, field)
Exemplo n.º 18
0
    def _blockData(self, data_d):

        blocks = defaultdict(dict)

        for field in self.blocker.index_fields :
            unique_fields = {record[field]
                             for record 
                             in viewvalues(data_d)
                             if record[field]}

            self.blocker.index(unique_fields, field)

        for block_key, record_id in self.blocker(viewitems(data_d)) :
            blocks[block_key][record_id] = data_d[record_id]

        self.blocker.resetIndices()

        blocks = (records for records in blocks.values()
                  if len(records) > 1)
        
        blocks = {frozenset(d.keys()) : d for d in blocks}
        blocks = blocks.values()

        for block in self._redundantFree(blocks) :
            yield block
Exemplo n.º 19
0
    def can_be_updated(self, columns):
        """Gets if the template can be updated

        Parameters
        ----------
        columns : set
            A set of the names of the columns to be updated

        Returns
        -------
        bool
            If the template can be updated

        Notes
        -----
        The prep template can be updated when (1) it has no preprocessed data
        or the prep template data-type is not part of TARGET_GENE_DATA_TYPES,
        (2) if is part of TARGET_GENE_DATA_TYPES then we will only update if
        the columns being updated are not part of
        PREP_TEMPLATE_COLUMNS_TARGET_GENE
        """
        if (not self.preprocessed_data or
           self.data_type() not in TARGET_GENE_DATA_TYPES):
            return True

        tg_columns = set(chain.from_iterable(
            [v.columns for v in
             viewvalues(PREP_TEMPLATE_COLUMNS_TARGET_GENE)]))

        if not columns & tg_columns:
            return True

        return False
Exemplo n.º 20
0
 def all_args(self):
     """List arguments of function call"""
     return list(itertools.chain(
         itertools.chain.from_iterable(self.args),
         self.starargs,
         self.kwargs,
         itertools.chain.from_iterable(viewvalues(self.keywords))
     ))
Exemplo n.º 21
0
 def test_collect_environment_provenance(self):
     metascript = self.prepare()
     metascript.deployment._collect_environment_provenance()
     env = {e.name: e.value
            for e in viewvalues(metascript.environment_attrs_store.store)}
     self.assertIn("PWD", env)
     self.assertIn("PYTHON_VERSION", env)
     self.assertIn("PYTHON_IMPLEMENTATION", env)
Exemplo n.º 22
0
 def check_dst_ad(self):
     for ad in self.lib_imp2dstad:
         all_ads = sorted(viewvalues(self.lib_imp2dstad[ad]))
         for i, x in enumerate(all_ads[:-1]):
             if x is None or all_ads[i + 1] is None:
                 return False
             if x + 4 != all_ads[i + 1]:
                 return False
     return True
Exemplo n.º 23
0
def find_call(ircfg):
    """Returns (irb, index) which call"""

    for irb in viewvalues(ircfg.blocks):
        out = set()
        if len(irb) < 2:
            continue
        assignblk = irb[-2]
        for src in viewvalues(assignblk):
            if not isinstance(src, ExprOp):
                continue
            if not src.op.startswith('call_func'):
                continue
            out.add((irb.loc_key, len(irb) - 2))
        if len(out) != 1:
            continue
        loc_key, index = out.pop()
        yield loc_key, index
Exemplo n.º 24
0
    def find_variables_rec(self, expr):
        """Recursive method called by find_variable to expand @expr.
        Set @var_names and @var_values.
        This implementation is faster than an expression visitor because
        we do not rebuild each expression.
        """

        if (expr in self.var_asked):
            # Expr has already been asked
            if expr not in viewvalues(self._vars):
                # Create var
                identifier = m2_expr.ExprId(
                    "%s%s" % (
                        self.var_prefix,
                        next(self.var_indice)
                    ),
                    size = expr.size
                )
                self._vars[identifier] = expr

            # Recursion stop case
            return
        else:
            # First time for @expr
            self.var_asked.add(expr)

        if isinstance(expr, m2_expr.ExprOp):
            for a in expr.args:
                self.find_variables_rec(a)

        elif isinstance(expr, m2_expr.ExprInt):
            pass

        elif isinstance(expr, m2_expr.ExprId):
            pass

        elif isinstance(expr, m2_expr.ExprLoc):
            pass

        elif isinstance(expr, m2_expr.ExprMem):
            self.find_variables_rec(expr.ptr)

        elif isinstance(expr, m2_expr.ExprCompose):
            for arg in expr.args:
                self.find_variables_rec(arg)

        elif isinstance(expr, m2_expr.ExprSlice):
            self.find_variables_rec(expr.arg)

        elif isinstance(expr, m2_expr.ExprCond):
            self.find_variables_rec(expr.cond)
            self.find_variables_rec(expr.src1)
            self.find_variables_rec(expr.src2)

        else:
            raise NotImplementedError("Type not handled: %s" % expr)
Exemplo n.º 25
0
def blockTraining(pairs,
                  predicate_set,
                  eta=.1,
                  epsilon=0,
                  matching = "Dedupe"):
    '''
    Takes in a set of training pairs and predicates and tries to find
    a good set of blocking rules.
    '''

    blocker = blocking.Blocker(predicate_set)
    prepare_index(blocker, pairs, matching)

    if len(pairs['match']) < 50 :
        compound_length = 2
    else :
        compound_length = 3

    dupe_cover = cover(blocker, pairs['match'], compound_length)
    distinct_cover = cover(blocker, pairs['distinct'], compound_length)

    distinct_count = defaultdict(int, {pred : len(pairs)
                                       for pred, pairs
                                       in viewitems(distinct_cover)})

    # Throw away the predicates that cover too many distinct pairs
    coverage_threshold = eta * len(pairs['distinct'])
    logger.info("coverage threshold: %s", coverage_threshold)
    dupe_cover = {pred : pairs
                  for pred, pairs
                  in viewitems(dupe_cover)
                  if distinct_count[pred] < coverage_threshold}

    if not dupe_cover : 
        raise ValueError(NO_PREDICATES_ERROR)

    uncoverable_dupes = set(pairs['match']) - set.union(*viewvalues(dupe_cover))

    if len(uncoverable_dupes) > epsilon :
        logger.warning(OUT_OF_PREDICATES_WARNING)
        logger.debug(uncoverable_dupes)
        epsilon = 0
    else :
        epsilon -= len(uncoverable_dupes)

    chvatal_set = greedy(dupe_cover.copy(), distinct_count, epsilon)

    dupe_cover = {pred : dupe_cover[pred] for pred in chvatal_set}
        
    final_predicates = tuple(dominating(dupe_cover))

    logger.info('Final predicate set:')
    for predicate in final_predicates :
        logger.info(predicate)

    return final_predicates
Exemplo n.º 26
0
    def test_rnn(self):
        from caffe2.python import rnn_cell
        T = 5
        model = model_helper.ModelHelper()
        seq_lengths, labels = \
            model.net.AddExternalInputs(
                'seq_lengths', 'labels',
            )
        init_blobs = []
        for i in range(2):
            hidden_init, cell_init = model.net.AddExternalInputs(
                "hidden_init_{}".format(i),
                "cell_init_{}".format(i)
            )
            init_blobs.extend([hidden_init, cell_init])
        model.param_init_net.ConstantFill([], ["input"], shape=[T, 4, 10])
        output, last_hidden, _, last_state = rnn_cell.LSTM(
            model=model,
            input_blob="input",
            seq_lengths=seq_lengths,
            initial_states=init_blobs,
            dim_in=10,
            dim_out=[10, 10],
            scope="lstm1",
            forward_only=False,
            drop_states=True,
            return_last_layer_only=True,
        )
        softmax, loss = model.net.SoftmaxWithLoss(
            [model.Flatten(output), "labels"],
            ['softmax', 'loss'],
        )

        model.AddGradientOperators([loss])
        blobs_before = count_blobs(model.net.Proto())
        optim_proto = memonger.share_grad_blobs(
            model.net,
            ["loss"],
            set(viewvalues(model.param_to_grad)),
            "",
            share_activations=True,
            dont_share_blobs=set(),
        )
        blobs_after = count_blobs(optim_proto)
        self.assertLess(blobs_after, blobs_before)

        # Run once to see all blobs are set up correctly
        for init_blob in init_blobs:
            workspace.FeedBlob(init_blob, np.zeros(
                [1, 4, 10], dtype=np.float32
            ))
        workspace.FeedBlob("seq_lengths", np.array([T] * 4, dtype=np.int32))
        workspace.FeedBlob("labels", np.random.rand(T).astype(np.int32))

        workspace.RunNetOnce(model.param_init_net)
        workspace.RunNetOnce(model.net)
Exemplo n.º 27
0
    def unindex(self, data):  # pragma: no cover

        for field in self.blocker.index_fields:
            self.blocker.unindex((record[field] for record in viewvalues(data)), field)

        for block_key, record_id in self.blocker(viewitems(data)):
            try:
                del self.blocked_records[block_key][record_id]
            except KeyError:
                pass
Exemplo n.º 28
0
def find_defaults(d, op):
    if isinstance(d, list):
        for i in d:
            find_defaults(i, op)
    elif isinstance(d, dict):
        if "default" in d:
            op(d)
        else:
            for i in viewvalues(d):
                find_defaults(i, op)
Exemplo n.º 29
0
    def _cleanup_record(cls, elt):
        for k, v in viewitems(elt):
            if isinstance(v, list) and len(v) == 1 and \
                    isinstance(v[0], dict) and \
                    all(x is None for x in viewvalues(v[0])):
                elt[k] = []

        cls.from_dbdict(cls._get_props(elt["elt"]))
        new_meta = {}
        if isinstance(elt["meta"], list):
            for rec in elt["meta"]:
                if rec["info"] is None and rec["link"] is None:
                    continue
                info = rec["info"] or {}
                info_props = cls._get_props(info)
                link = rec["link"] or {}
                link_tag = link.get("type",
                                    link.get("labels", [""])[0]).lower()
                link_props = cls._get_props(link)
                key = "%s%s" % (
                    "_".join(label
                             for label in cls._get_labels(info, info_props)
                             if label != "Intel"),
                    "_%s" % link_tag if link_tag else ""
                )
                new_data = dict(("%s_%s" % (link_tag, k), v)
                                for k, v in viewitems(link_props))
                new_data.update(info_props)
                new_meta.setdefault(key, []).append(new_data)
            if new_meta:
                elt["meta"] = new_meta
                for reclist in viewvalues(new_meta):
                    for rec in reclist:
                        cls.from_dbdict(rec)

        if ("times" in elt["meta"] and elt["meta"]["times"] and
                isinstance(elt["meta"]["times"], list) and
                isinstance(elt["meta"]["times"][0], float)):
            elt["meta"]["times"] = [datetime.fromtimestamp(val) for val in
                                    elt["meta"]["times"]]

        if not elt["meta"]:
            del elt["meta"]
Exemplo n.º 30
0
def _display_honeyd_conf(host, honeyd_routes, honeyd_entries, out=sys.stdout):
    addr = utils.int2ip(host['addr'])
    hname = "host_%s" % addr.replace('.', '_')
    out.write("create %s\n" % hname)
    defaction = HONEYD_DEFAULT_ACTION
    if 'extraports' in host:
        extra = host['extraports']
        defaction = max(
            max(viewvalues(extra),
                key=lambda state: viewitems(state['total'])['reasons']),
            key=lambda reason: reason[1],
        )[0]
        defaction = HONEYD_ACTION_FROM_NMAP_STATE.get(defaction)
    out.write('set %s default tcp action %s\n' % (hname, defaction))
    for p in host.get('ports', []):
        try:
            out.write('add %s %s port %d %s\n' % (
                hname, p['protocol'], p['port'],
                _nmap_port2honeyd_action(p),
            ))
        except KeyError:
            # let's skip pseudo-port records that are only containers for host
            # scripts.
            pass
    if 'traces' in host and len(host['traces']) > 0:
        trace = max(host['traces'], key=lambda x: len(x['hops']))['hops']
        if trace:
            trace.sort(key=lambda x: x['ttl'])
            curhop = trace[0]
            honeyd_entries.add(curhop['ipaddr'])
            for t in trace[1:]:
                key = (curhop['ipaddr'], t['ipaddr'])
                latency = max(t['rtt'] - curhop['rtt'], 0)
                route = honeyd_routes.get(key)
                if route is None:
                    honeyd_routes[key] = {
                        'count': 1,
                        'high': latency,
                        'low': latency,
                        'mean': latency,
                        'targets': set([host['addr']])
                    }
                else:
                    route['targets'].add(host['addr'])
                    honeyd_routes[key] = {
                        'count': route['count'] + 1,
                        'high': max(route['high'], latency),
                        'low': min(route['low'], latency),
                        'mean': (route['mean'] * route['count'] +
                                 latency) / (route['count'] + 1),
                        'targets': route['targets'],
                    }
                curhop = t
    out.write('bind %s %s\n\n' % (addr, hname))
    return honeyd_routes, honeyd_entries
Exemplo n.º 31
0
    def makeRequest_httplib(self, uri, data, verb, headers):
        """
        Make a request to the remote database. for a give URI. The type of
        request will determine the action take by the server (be careful with
        DELETE!). Data should be a dictionary of {dataname: datavalue}.

        Returns a tuple of the data from the server, decoded using the
        appropriate method the response status and the response reason, to be
        used in error handling.

        You can override the method to encode/decode your data by passing in an
        encoding/decoding function to this method. Your encoded data must end up
        as a string.

        """
        if verb == 'GET' and data:
            uri = "%s?%s" % (uri, data)

        assert isinstance(data, (str, bytes)), \
            "Data in makeRequest is %s and not encoded to a string" % type(data)

        # And now overwrite any headers that have been passed into the call:
        # WARNING: doesn't work with deplate so only accept gzip
        headers["Accept-Encoding"] = "gzip,identity"

        # httplib2 will allow sockets to close on remote end without retrying
        # try to send request - if this fails try again - should then succeed
        try:
            conn = self._getURLOpener()
            response, result = conn.request(uri,
                                            method=verb,
                                            body=data,
                                            headers=headers)
            if response.status == 408:  # timeout can indicate a socket error
                raise socket.error
        except ServerNotFoundError as ex:
            # DNS cannot resolve this domain name, let's call it 'Service Unavailable'
            e = HTTPException()
            setattr(e, 'url', uri)
            setattr(e, 'status', 503)
            setattr(e, 'reason', 'Service Unavailable')
            setattr(e, 'result', str(ex))
            raise e
        except (socket.error, AttributeError):
            self['logger'].warn("Http request failed, retrying once again..")
            # AttributeError implies initial connection error - need to close
            # & retry. httplib2 doesn't clear httplib state before next request
            # if this is threaded this may spoil things
            # only have one endpoint so don't need to determine which to shut
            for con in viewvalues(conn.connections):
                con.close()
            conn = self._getURLOpener()
            # ... try again... if this fails propagate error to client
            try:
                response, result = conn.request(uri,
                                                method=verb,
                                                body=data,
                                                headers=headers)
            except AttributeError:
                msg = traceback.format_exc()
                # socket/httplib really screwed up - nuclear option
                conn.connections = {}
                raise socket.error('Error contacting: %s: %s' %
                                   (self.getDomainName(), msg))
        if response.status >= 400:
            e = HTTPException()
            setattr(e, 'req_data', data)
            setattr(e, 'req_headers', headers)
            setattr(e, 'url', uri)
            setattr(e, 'result', result)
            setattr(e, 'status', response.status)
            setattr(e, 'reason', response.reason)
            setattr(e, 'headers', response)
            raise e

        return result, response
Exemplo n.º 32
0
 def itervalues(self):
     for src in viewvalues(self._assigns):
         yield src
Exemplo n.º 33
0
 def visit_group(self, group):
     """Visit Group Node (Visitor Pattern)"""
     result = []
     for element in viewvalues(group.nodes):
         result += element.visit(self)
     return result
Exemplo n.º 34
0
 def values(self):
     return viewvalues(self._d)
Exemplo n.º 35
0
    def __init__(self, custom_methods, *args, **kwargs):
        from miasm.jitter.loader.pe import vm_load_pe, vm_load_pe_libs,\
            preload_pe, libimp_pe, vm_load_pe_and_dependencies
        from miasm.os_dep import win_api_x86_32, win_api_x86_32_seh
        methods = dict((name.encode(),func) for name, func in viewitems(win_api_x86_32.__dict__))
        methods.update(custom_methods)

        super(OS_Win, self).__init__(methods, *args, **kwargs)

        # Import manager
        libs = libimp_pe()
        self.libs = libs
        win_api_x86_32.winobjs.runtime_dll = libs

        self.name2module = {}
        fname_basename = os.path.basename(self.fname).lower()

        # Load main pe
        with open(self.fname, "rb") as fstream:
            self.pe = vm_load_pe(
                self.jitter.vm,
                fstream.read(),
                load_hdr=self.options.load_hdr,
                name=self.fname,
                **kwargs
            )
            self.name2module[fname_basename] = self.pe

        # Load library
        if self.options.loadbasedll:

            # Load libs in memory
            self.name2module.update(
                vm_load_pe_libs(
                    self.jitter.vm,
                    self.ALL_IMP_DLL,
                    libs,
                    self.modules_path,
                    **kwargs
                )
            )

            # Patch libs imports
            for pe in viewvalues(self.name2module):
                preload_pe(self.jitter.vm, pe, libs)

        if self.options.dependencies:
            vm_load_pe_and_dependencies(
                self.jitter.vm,
                fname_basename,
                self.name2module,
                libs,
                self.modules_path,
                **kwargs
            )

        win_api_x86_32.winobjs.current_pe = self.pe

        # Fix pe imports
        preload_pe(self.jitter.vm, self.pe, libs)

        # Library calls handler
        self.jitter.add_lib_handler(libs, methods)

        # Manage SEH
        if self.options.use_windows_structs:
            win_api_x86_32_seh.main_pe_name = fname_basename
            win_api_x86_32_seh.main_pe = self.pe
            win_api_x86_32.winobjs.hcurmodule = self.pe.NThdr.ImageBase
            win_api_x86_32_seh.name2module = self.name2module
            win_api_x86_32_seh.set_win_fs_0(self.jitter)
            win_api_x86_32_seh.init_seh(self.jitter)

        self.entry_point = self.pe.rva2virt(
            self.pe.Opthdr.AddressOfEntryPoint)
Exemplo n.º 36
0
def _col_iterator():
    for r_set in ALL_RESTRICTIONS:
        for restriction in viewvalues(r_set):
            for cols in viewkeys(restriction.columns):
                yield cols
Exemplo n.º 37
0
    def test_gradient_optim(self, input_dim, output_dim, batch_size):
        m = model_helper.ModelHelper()
        with core.NameScope("name_x"):
            fc1 = brew.fc(m,
                          "data",
                          "fc1",
                          dim_in=input_dim,
                          dim_out=output_dim)
            fc2 = brew.fc(m, fc1, "fc2", dim_in=output_dim, dim_out=output_dim)
            fc3 = brew.fc(m, fc2, "fc3", dim_in=output_dim, dim_out=output_dim)
            fc4 = brew.fc(m, fc3, "fc4", dim_in=output_dim, dim_out=output_dim)
            fc5 = brew.fc(m, fc4, "fc5", dim_in=output_dim, dim_out=output_dim)
            fc5.Relu([], fc5)\
               .Softmax([], "pred") \
               .LabelCrossEntropy(["label"], ["xent"]) \
               .AveragedLoss([], "loss")
        input_to_grad = m.AddGradientOperators(["name_x/loss"])

        blobs_before = count_blobs(m.net.Proto())
        optim_proto = memonger.share_grad_blobs(
            m.net,
            ["name_x/loss"],
            set(viewvalues(m.param_to_grad)),
            "name_x/",
            share_activations=False,
        )
        blobs_after = count_blobs(optim_proto)
        self.assertLess(blobs_after, blobs_before)

        optim_proto_wacts = memonger.share_grad_blobs(
            m.net,
            ["name_x/loss"],
            set(viewvalues(m.param_to_grad)),
            "name_x/",
            share_activations=True,
            dont_share_blobs=set([str(input_to_grad["name_x/fc1_w"])]),
        )
        blobs_wact_optim = count_blobs(optim_proto_wacts)
        self.assertLessEqual(blobs_wact_optim, blobs_after)

        # Check that the last activations are not shared
        self.assertTrue(has_blob(optim_proto, "name_x/fc5"))
        self.assertTrue(
            has_blob(optim_proto_wacts, "name_x/fc5"),
            "Dont remap final activation",
        )

        # Test networks produce exactly same gradients
        data = np.random.randn(batch_size, input_dim).astype(np.float32)
        label = np.random.randint(low=0, high=output_dim,
                                  size=(batch_size, )).astype(np.int32)
        workspace.RunNetOnce(m.param_init_net)
        workspace.FeedBlob("name_x/data", data)
        workspace.FeedBlob("name_x/label", label)
        workspace.RunNetOnce(m.net)
        loss = workspace.FetchBlob("name_x/loss")
        grad = workspace.FetchBlob(str(input_to_grad["name_x/fc1_w"]))
        workspace.RunNetOnce(optim_proto)
        optimized_loss = workspace.FetchBlob("name_x/loss")
        optimized_grad = workspace.FetchBlob(str(
            input_to_grad["name_x/fc1_w"]))
        np.testing.assert_almost_equal(loss, optimized_loss)
        np.testing.assert_almost_equal(grad, optimized_grad)

        workspace.FeedBlob(str(input_to_grad["name_x/fc1_w"]), np.array([0.0]))

        # Run with the forward optimization
        workspace.RunNetOnce(optim_proto_wacts)
        optimized_loss = workspace.FetchBlob("name_x/loss")
        optimized_grad = workspace.FetchBlob(str(
            input_to_grad["name_x/fc1_w"]))
        np.testing.assert_almost_equal(loss, optimized_loss)
        np.testing.assert_almost_equal(grad, optimized_grad)
Exemplo n.º 38
0
 def iter_ranges(self):
     for start, length in sorted(viewvalues(self.ranges)):
         yield utils.int2ip(start), utils.int2ip(start + length - 1)
Exemplo n.º 39
0
 def iter_nets(self):
     for start, length in sorted(viewvalues(self.ranges)):
         for net in utils.range2nets(
             (utils.int2ip(start), utils.int2ip(start + length - 1))):
             yield net
Exemplo n.º 40
0
def index(data, offset=0):
    if isIndexed(data, offset):
        return data
    else:
        data = dict(zip(itertools.count(offset), viewvalues(data)))
        return data
Exemplo n.º 41
0
 def iter_int_ranges(self):
     for start, length in sorted(viewvalues(self.ranges)):
         yield start, start + length - 1
Exemplo n.º 42
0
 def _add_global_constants(self, init_net):
     for initializer_op in viewvalues(self.global_constant_initializers):
         init_net._net.op.extend([initializer_op])
Exemplo n.º 43
0
    def get_graph(self):
        simplify = self.simplify
        dontmodstack = self.dontmodstack
        loadmemint = self.loadmemint
        type_graph = self.type_graph

        bin_str = ""
        for s in self.data.segments:
            bin_str += self.data.read(s.start, len(s))
            # add padding between each segment
            if s.end != self.data.end:
                bin_str += '\x00' * (((s.end | 0xfff) + 1) - s.end)

        bs = bin_stream_str(input_str=bin_str, base_address=self.data.start)
        machine = Machine(archs[self.data.arch.name])
        mdis = machine.dis_engine(bs)

        asmcfg = mdis.dis_multiblock(self.function.start)
        entry_points = set(
            [mdis.loc_db.get_offset_location(self.function.start)])

        class IRADelModCallStack(machine.ira):
            def call_effects(self, addr, instr):
                assignblks, extra = super(IRADelModCallStack,
                                          self).call_effects(addr, instr)
                if not dontmodstack:
                    return assignblks, extra
                out = []
                for assignblk in assignblks:
                    dct = dict(assignblk)
                    dct = {
                        dst: src
                        for (dst, src) in viewitems(dct) if dst != self.sp
                    }
                    out.append(AssignBlock(dct, assignblk.instr))
                return out, extra

        ir_arch = IRADelModCallStack(mdis.loc_db)
        ircfg = ir_arch.new_ircfg_from_asmcfg(asmcfg)

        for irb in list(viewvalues(ircfg.blocks)):
            irs = []
            for assignblk in irb:
                new_assignblk = {
                    expr_simp(dst): expr_simp(src)
                    for dst, src in viewitems(assignblk)
                }
                irs.append(AssignBlock(new_assignblk, instr=assignblk.instr))
            ircfg.blocks[irb.loc_key] = IRBlock(irb.loc_key, irs)

        head = list(entry_points)[0]

        if simplify:
            ircfg_simplifier = IRCFGSimplifierCommon(ir_arch)
            ircfg_simplifier.simplify(ircfg, head)

        if type_graph == TYPE_GRAPH_IR:
            return MiasmIRGraph(self.add_names(ircfg))

        class IRAOutRegs(machine.ira):
            def get_out_regs(self, block):
                regs_todo = super(IRAOutRegs, self).get_out_regs(block)
                out = {}
                for assignblk in block:
                    for dst in assignblk:
                        reg = self.ssa_var.get(dst, None)
                        if reg is None:
                            continue
                        if reg in regs_todo:
                            out[reg] = dst
                return set(viewvalues(out))

        # Add dummy dependency to uncover out regs affectation
        for loc in ircfg.leaves():
            irblock = ircfg.blocks.get(loc)
            if irblock is None:
                continue
            regs = {}
            for reg in ir_arch.get_out_regs(irblock):
                regs[reg] = reg
            assignblks = list(irblock)
            new_assiblk = AssignBlock(regs, assignblks[-1].instr)
            assignblks.append(new_assiblk)
            new_irblock = IRBlock(irblock.loc_key, assignblks)
            ircfg.blocks[loc] = new_irblock

        class CustomIRCFGSimplifierSSA(IRCFGSimplifierSSA):
            def do_simplify(self, ssa, head):
                modified = super(CustomIRCFGSimplifierSSA,
                                 self).do_simplify(ssa, head)
                if loadmemint:
                    modified |= load_from_int(ssa.graph, bs,
                                              is_addr_ro_variable)
                return modified

            def simplify(self, ircfg, head):
                ssa = self.ircfg_to_ssa(ircfg, head)
                ssa = self.do_simplify_loop(ssa, head)

                if type_graph == TYPE_GRAPH_IRSSA:
                    ret = ssa.graph
                elif type_graph == TYPE_GRAPH_IRSSAUNSSA:
                    ircfg = self.ssa_to_unssa(ssa, head)
                    ircfg_simplifier = IRCFGSimplifierCommon(self.ir_arch)
                    ircfg_simplifier.simplify(ircfg, head)
                    ret = ircfg
                else:
                    raise ValueError("Unknown option")
                return ret

        # dirty patch to synchronize nodes and blocks lists in ircfg
        nodes_to_del = [
            node for node in ircfg.nodes() if not node in ircfg.blocks
        ]
        for node in nodes_to_del:
            ircfg.del_node(node)

        head = list(entry_points)[0]
        simplifier = CustomIRCFGSimplifierSSA(ir_arch)
        ircfg = simplifier.simplify(ircfg, head)

        return MiasmIRGraph(self.add_names(ircfg))
Exemplo n.º 44
0
 def test_collect_modules_provenance(self):
     metascript = self.prepare(name=MODULES)
     metascript.deployment._collect_modules_provenance()
     modules = {m.name for m in viewvalues(metascript.modules_store.store)}
     self.assertIn("ast", modules)
     self.assertIn("script", modules)
Exemplo n.º 45
0
    def create_qiime_mapping_file(self):
        """This creates the QIIME mapping file and links it in the db.

        Returns
        -------
        filepath : str
            The filepath of the created QIIME mapping file

        Raises
        ------
        ValueError
            If the prep template is not a subset of the sample template
        QiitaDBWarning
            If the QIIME-required columns are not present in the template

        Notes
        -----
        We cannot ensure that the QIIME-required columns are present in the
        metadata map. However, we have to generate a QIIME-compliant mapping
        file. Since the user may need a QIIME mapping file, but not these
        QIIME-required columns, we are going to create them and
        populate them with the value XXQIITAXX.
        """
        with qdb.sql_connection.TRN:
            rename_cols = {
                'barcode': 'BarcodeSequence',
                'primer': 'LinkerPrimerSequence',
                'description': 'Description',
            }

            if 'reverselinkerprimer' in self.categories():
                rename_cols['reverselinkerprimer'] = 'ReverseLinkerPrimer'
                new_cols = [
                    'BarcodeSequence', 'LinkerPrimerSequence',
                    'ReverseLinkerPrimer'
                ]
            else:
                new_cols = ['BarcodeSequence', 'LinkerPrimerSequence']

            # Retrieve the latest sample template
            # Since we sorted the filepath retrieval, the first result contains
            # the filepath that we want. `retrieve_filepaths` returns a
            # 3-tuple, in which the fp is the second element
            sample_template_fp = qdb.util.retrieve_filepaths(
                "sample_template_filepath",
                "study_id",
                self.study_id,
                sort='descending')[0][1]

            # reading files via pandas
            st = qdb.metadata_template.util.load_template_to_dataframe(
                sample_template_fp)
            pt = self.to_dataframe()

            st_sample_names = set(st.index)
            pt_sample_names = set(pt.index)

            if not pt_sample_names.issubset(st_sample_names):
                raise ValueError(
                    "Prep template is not a sub set of the sample template, "
                    "file: %s - samples: %s" %
                    (sample_template_fp,
                     ', '.join(pt_sample_names - st_sample_names)))

            mapping = pt.join(st, lsuffix="_prep")
            mapping.rename(columns=rename_cols, inplace=True)

            # Pre-populate the QIIME-required columns with the value XXQIITAXX
            index = mapping.index
            placeholder = ['XXQIITAXX'] * len(index)
            missing = []
            for val in viewvalues(rename_cols):
                if val not in mapping:
                    missing.append(val)
                    mapping[val] = pd.Series(placeholder, index=index)

            if missing:
                warnings.warn(
                    "Some columns required to generate a QIIME-compliant "
                    "mapping file are not present in the template. A "
                    "placeholder value (XXQIITAXX) has been used to populate "
                    "these columns. Missing columns: %s" %
                    ', '.join(sorted(missing)), qdb.exceptions.QiitaDBWarning)

            # Gets the orginal mapping columns and readjust the order to comply
            # with QIIME requirements
            cols = mapping.columns.values.tolist()
            cols.remove('BarcodeSequence')
            cols.remove('LinkerPrimerSequence')
            cols.remove('Description')
            new_cols.extend(cols)
            new_cols.append('Description')
            mapping = mapping[new_cols]

            # figuring out the filepath for the QIIME map file
            _id, fp = qdb.util.get_mountpoint('templates')[0]
            filepath = join(
                fp, '%d_prep_%d_qiime_%s.txt' %
                (self.study_id, self.id, strftime("%Y%m%d-%H%M%S")))

            # Save the mapping file
            mapping.to_csv(filepath,
                           index_label='#SampleID',
                           na_rep='',
                           sep='\t',
                           encoding='utf-8')

            # adding the fp to the object
            self.add_filepath(filepath,
                              fp_id=qdb.util.convert_to_id(
                                  "qiime_map", "filepath_type"))

            return filepath
Exemplo n.º 46
0
 def iter_addrs(self):
     for start, length in sorted(viewvalues(self.ranges)):
         for val in range(start, start + length):
             yield utils.int2ip(val)
Exemplo n.º 47
0
loc_db = LocationDB()
print('disasm...')
cont = Container.from_stream(open(args.filename, 'rb'), loc_db)
machine = Machine("x86_32")

mdis = machine.dis_engine(cont.bin_stream, loc_db=loc_db)
mdis.follow_call = True
asmcfg = mdis.dis_multiblock(ad)
print('ok')

print('generating dataflow graph for:')
ir_arch_analysis = machine.ira(loc_db)
ircfg = ir_arch_analysis.new_ircfg_from_asmcfg(asmcfg)
deadrm = DeadRemoval(ir_arch_analysis)

for irblock in viewvalues(ircfg.blocks):
    print(irblock)

if args.symb:
    block_flow_cb = intra_block_flow_symb
else:
    block_flow_cb = intra_block_flow_raw

gen_block_data_flow_graph(ir_arch_analysis, ircfg, ad, block_flow_cb)

print('*' * 40)
print("""
 View with:
dotty dataflow.dot
 or
 Generate ps with pdf:
Exemplo n.º 48
0
 def views(self):
     for t in viewvalues(self.tables):
         if t.isview:
             yield t
Exemplo n.º 49
0
    def test_gradient_optim_tree(self, input_dim, output_dim, batch_size):
        m = model_helper.ModelHelper()
        with core.NameScope("name_x"):
            fc1 = brew.fc(m,
                          "data",
                          "fc1",
                          dim_in=input_dim,
                          dim_out=output_dim)
            fc2 = brew.fc(m, fc1, "fc2", dim_in=output_dim, dim_out=output_dim)
            fc3 = brew.fc(m, fc2, "fc3", dim_in=output_dim, dim_out=output_dim)
            fc4 = brew.fc(m, fc3, "fc4", dim_in=output_dim, dim_out=output_dim)
            fc5 = brew.fc(m, fc4, "fc5", dim_in=output_dim, dim_out=output_dim)
            fc5.Relu([], fc5) \
               .Softmax([], "pred1") \
               .LabelCrossEntropy(["label"], ["xent1"]) \
               .AveragedLoss([], "loss1")
            fc6 = brew.fc(m, fc5, "fc6", dim_in=output_dim, dim_out=output_dim)
            fc6.Relu([], fc6) \
               .Softmax([], "pred2") \
               .LabelCrossEntropy(["label"], ["xent2"]) \
               .AveragedLoss([], "loss2")
        input_to_grad = m.AddGradientOperators(
            ["name_x/loss1", "name_x/loss2"])

        blobs_before = count_blobs(m.net.Proto())
        optim_proto = memonger.share_grad_blobs(
            m.net,
            ["name_x/loss1", "name_x/loss2"],
            set(viewvalues(m.param_to_grad)),
            "name_x",  # "name_x//shared_gradinp_0_shared" if using "name_x/"
            share_activations=True,
            dont_share_blobs=set([
                'name_x/fc6', 'name_x/fc5',
                str(input_to_grad["name_x/fc1_w"])
            ]),
        )
        blobs_after = count_blobs(optim_proto)
        self.assertLess(blobs_after, blobs_before)
        self.assertTrue(has_blob(optim_proto, "name_x/fc6"))

        # Test networks produce exactly same gradients
        data = np.random.randn(batch_size, input_dim).astype(np.float32)
        label = np.random.randint(low=0, high=output_dim,
                                  size=(batch_size, )).astype(np.int32)
        workspace.RunNetOnce(m.param_init_net)
        workspace.FeedBlob("name_x/data", data)
        workspace.FeedBlob("name_x/label", label)
        workspace.RunNetOnce(m.net)
        loss1 = workspace.FetchBlob("name_x/loss1")
        loss2 = workspace.FetchBlob("name_x/loss2")
        grad = workspace.FetchBlob(str(input_to_grad["name_x/fc1_w"]))
        workspace.FeedBlob(str(input_to_grad["name_x/fc1_w"]), np.array([0.0]))

        workspace.RunNetOnce(optim_proto)
        optimized_loss1 = workspace.FetchBlob("name_x/loss1")
        optimized_loss2 = workspace.FetchBlob("name_x/loss2")
        optimized_grad = workspace.FetchBlob(str(
            input_to_grad["name_x/fc1_w"]))
        np.testing.assert_almost_equal(loss1, optimized_loss1)
        np.testing.assert_almost_equal(loss2, optimized_loss2)
        np.testing.assert_almost_equal(grad, optimized_grad)
Exemplo n.º 50
0
 def columns(self):
     for t in viewvalues(self.tables):
         for c in t.columns:
             yield c
Exemplo n.º 51
0
def launch_depgraph():
    global graphs, comments, sol_nb, settings, addr, ir_arch, ircfg
    # Get the current function
    addr = idc.get_screen_ea()
    func = ida_funcs.get_func(addr)

    # Init
    machine = guess_machine(addr=func.start_ea)
    mn, dis_engine, ira = machine.mn, machine.dis_engine, machine.ira

    bs = bin_stream_ida()
    mdis = dis_engine(bs, dont_dis_nulstart_bloc=True)
    ir_arch = ira(mdis.loc_db)

    # Populate symbols with ida names
    for ad, name in idautils.Names():
        if name is None:
            continue
        mdis.loc_db.add_location(name, ad)

    asmcfg = mdis.dis_multiblock(func.start_ea)

    # Generate IR
    ircfg = ir_arch.new_ircfg_from_asmcfg(asmcfg)

    # Get settings
    settings = depGraphSettingsForm(ir_arch, ircfg, mn)
    settings.Execute()

    loc_key, elements, line_nb = settings.loc_key, settings.elements, settings.line_nb
    # Simplify assignments
    for irb in list(viewvalues(ircfg.blocks)):
        irs = []
        offset = ir_arch.loc_db.get_location_offset(irb.loc_key)
        fix_stack = offset is not None and settings.unalias_stack
        for assignblk in irb:
            if fix_stack:
                stk_high = m2_expr.ExprInt(idc.get_spd(assignblk.instr.offset),
                                           ir_arch.sp.size)
                fix_dct = {
                    ir_arch.sp: mn.regs.regs_init[ir_arch.sp] + stk_high
                }

            new_assignblk = {}
            for dst, src in viewitems(assignblk):
                if fix_stack:
                    src = src.replace_expr(fix_dct)
                    if dst != ir_arch.sp:
                        dst = dst.replace_expr(fix_dct)
                dst, src = expr_simp(dst), expr_simp(src)
                new_assignblk[dst] = src
            irs.append(AssignBlock(new_assignblk, instr=assignblk.instr))
        ircfg.blocks[irb.loc_key] = IRBlock(irb.loc_db, irb.loc_key, irs)

    # Get dependency graphs
    dg = settings.depgraph
    graphs = dg.get(loc_key, elements, line_nb,
                    set([ir_arch.loc_db.get_offset_location(func.start_ea)]))

    # Display the result
    comments = {}
    sol_nb = 0

    # Register and launch
    ida_kernwin.add_hotkey("Shift-N", next_element)
    treat_element()
Exemplo n.º 52
0
                    offset = mdis.loc_db.get_location_offset(dest.loc_key)
                    todo.append((mdis, instr, offset))

        if args.funcswatchdog is not None and args.funcswatchdog <= 0:
            finish = True

    if args.try_disasm_all:
        for a, b in done_interval.intervals:
            if b in done:
                continue
            log.debug('add func %s' % hex(b))
            todo.append((mdis, None, b))

# Generate dotty graph
all_asmcfg = AsmCFG(mdis.loc_db)
for blocks in viewvalues(all_funcs_blocks):
    all_asmcfg += blocks

log.info('generate graph file')
open('graph_execflow.dot', 'w').write(all_asmcfg.dot(offset=True))

log.info('generate intervals')

all_lines = []
total_l = 0

print(done_interval)
if args.image:
    log.info('build img')
    done_interval.show()
Exemplo n.º 53
0
    _PSetHolder_

    Dummy PSet object used to construct the Tweak object to mimic
    the config structure

    """
    def __init__(self, psetName):
        self.psetName_ = psetName
        self.parameters_ = []


#  //
# // Assistant lambda functions
#//
childPSets = lambda x: [
    value for value in viewvalues(x.__dict__)
    if value.__class__.__name__ == "PSetHolder"
]
childParameters = lambda p, x: ["%s.%s" % (p, i) for i in x.parameters_]

recursiveGetattr = lambda obj, attr: reduce(getattr, attr.split("."), obj)


def parameterIterator(obj):
    """
    _parameterIterator_

    Util to iterate through the parameters in a PSetHolder

    """
    x = None
Exemplo n.º 54
0
 def getTotalTopLevelJobsInWMBS(self):
     inWMBS = 0
     if "AgentJobInfo" in self.data:
         for agentRequestInfo in viewvalues(self.data["AgentJobInfo"]):
             inWMBS += agentRequestInfo['status'].get('inWMBS', 0)
     return inWMBS
Exemplo n.º 55
0
 def values(self):
     return list(viewvalues(self._assigns))
Exemplo n.º 56
0
Arquivo: cpu.py Projeto: tly000/miasm
 def __init__(self, reg_expr):
     self.dct_str_inv = dict((v.name, k) for k, v in viewitems(reg_expr))
     self.dct_expr = reg_expr
     self.dct_expr_inv = dict((v, k) for k, v in viewitems(reg_expr))
     reg_str = [v.name for v in viewvalues(reg_expr)]
     self.parser = literal_list(reg_str).setParseAction(self.cb_parse)
Exemplo n.º 57
0
    def testRecordInCouch(self):
        """
        _testRecordInCouch_

        Verify that jobs, state transitions and fwjrs are recorded correctly.
        """
        change = ChangeState(self.config, "changestate_t")

        locationAction = self.daoFactory(classname="Locations.New")
        locationAction.execute("site1", pnn="T2_CH_CERN")

        testWorkflow = Workflow(spec=self.specUrl,
                                owner="Steve",
                                name="wf001",
                                task=self.taskName)
        testWorkflow.create()
        testFileset = Fileset(name="TestFileset")
        testFileset.create()
        testSubscription = Subscription(fileset=testFileset,
                                        workflow=testWorkflow,
                                        split_algo="FileBased")
        testSubscription.create()

        testFileA = File(lfn="SomeLFNA",
                         events=1024,
                         size=2048,
                         locations=set(["T2_CH_CERN"]))
        testFileB = File(lfn="SomeLFNB",
                         events=1025,
                         size=2049,
                         locations=set(["T2_CH_CERN"]))
        testFileA.create()
        testFileB.create()

        testFileset.addFile(testFileA)
        testFileset.addFile(testFileB)
        testFileset.commit()

        splitter = SplitterFactory()
        jobFactory = splitter(package="WMCore.WMBS",
                              subscription=testSubscription)
        jobGroup = jobFactory(files_per_job=1)[0]

        assert len(jobGroup.jobs) == 2, \
            "Error: Splitting should have created two jobs."

        testJobA = jobGroup.jobs[0]
        testJobA["user"] = "******"
        testJobA["group"] = "DMWM"
        testJobA["taskType"] = "Merge"
        testJobB = jobGroup.jobs[1]
        testJobB["user"] = "******"
        testJobB["group"] = "DMWM"
        testJobB["taskType"] = "Processing"

        change.propagate([testJobA, testJobB], "new", "none")
        change.propagate([testJobA, testJobB], "created", "new")
        change.propagate([testJobA, testJobB], "executing", "created")

        testJobADoc = change.jobsdatabase.document(testJobA["couch_record"])

        for transition in viewvalues(testJobADoc["states"]):
            self.assertTrue(isinstance(transition["timestamp"], int))

        self.assertEqual(testJobADoc["jobid"], testJobA["id"],
                         "Error: ID parameter is incorrect.")
        assert testJobADoc["name"] == testJobA["name"], \
            "Error: Name parameter is incorrect."
        assert testJobADoc["jobgroup"] == testJobA["jobgroup"], \
            "Error: Jobgroup parameter is incorrect."
        assert testJobADoc["workflow"] == testJobA["workflow"], \
            "Error: Workflow parameter is incorrect."
        assert testJobADoc["task"] == testJobA["task"], \
            "Error: Task parameter is incorrect."
        assert testJobADoc["owner"] == testJobA["owner"], \
            "Error: Owner parameter is incorrect."

        assert testJobADoc["mask"]["FirstEvent"] == testJobA["mask"]["FirstEvent"], \
            "Error: First event in mask is incorrect."
        assert testJobADoc["mask"]["LastEvent"] == testJobA["mask"]["LastEvent"], \
            "Error: Last event in mask is incorrect."
        assert testJobADoc["mask"]["FirstLumi"] == testJobA["mask"]["FirstLumi"], \
            "Error: First lumi in mask is incorrect."
        assert testJobADoc["mask"]["LastLumi"] == testJobA["mask"]["LastLumi"], \
            "Error: First lumi in mask is incorrect."
        assert testJobADoc["mask"]["FirstRun"] == testJobA["mask"]["FirstRun"], \
            "Error: First run in mask is incorrect."
        assert testJobADoc["mask"]["LastEvent"] == testJobA["mask"]["LastRun"], \
            "Error: First event in mask is incorrect."

        assert len(testJobADoc["inputfiles"]) == 1, \
            "Error: Input files parameter is incorrect."

        testJobBDoc = change.jobsdatabase.document(testJobB["couch_record"])

        assert testJobBDoc["jobid"] == testJobB["id"], \
            "Error: ID parameter is incorrect."
        assert testJobBDoc["name"] == testJobB["name"], \
            "Error: Name parameter is incorrect."
        assert testJobBDoc["jobgroup"] == testJobB["jobgroup"], \
            "Error: Jobgroup parameter is incorrect."

        assert testJobBDoc["mask"]["FirstEvent"] == testJobB["mask"]["FirstEvent"], \
            "Error: First event in mask is incorrect."
        assert testJobBDoc["mask"]["LastEvent"] == testJobB["mask"]["LastEvent"], \
            "Error: Last event in mask is incorrect."
        assert testJobBDoc["mask"]["FirstLumi"] == testJobB["mask"]["FirstLumi"], \
            "Error: First lumi in mask is incorrect."
        assert testJobBDoc["mask"]["LastLumi"] == testJobB["mask"]["LastLumi"], \
            "Error: First lumi in mask is incorrect."
        assert testJobBDoc["mask"]["FirstRun"] == testJobB["mask"]["FirstRun"], \
            "Error: First run in mask is incorrect."
        assert testJobBDoc["mask"]["LastEvent"] == testJobB["mask"]["LastRun"], \
            "Error: First event in mask is incorrect."

        assert len(testJobBDoc["inputfiles"]) == 1, \
            "Error: Input files parameter is incorrect."

        changeStateDB = self.couchServer.connectDatabase(
            dbname="changestate_t/jobs")
        allDocs = changeStateDB.document("_all_docs")

        self.assertEqual(len(allDocs["rows"]), 3,
                         "Error: Wrong number of documents.")

        couchJobDoc = changeStateDB.document("1")

        assert couchJobDoc["name"] == testJobA["name"], \
            "Error: Name is wrong"
        assert len(couchJobDoc["inputfiles"]) == 1, \
            "Error: Wrong number of input files."

        result = changeStateDB.loadView("JobDump", "jobsByWorkflowName")

        self.assertEqual(len(result["rows"]), 2,
                         "Error: Wrong number of rows.")
        for row in result["rows"]:
            couchJobDoc = changeStateDB.document(row["value"]["id"])
            self.assertEqual(couchJobDoc["_rev"], row["value"]["rev"],
                             "Error: Rev is wrong.")

        return
Exemplo n.º 58
0
 def has_blobs(self):
     return all(field.has_blobs() for field in viewvalues(self.fields))
Exemplo n.º 59
0
Arquivo: api.py Projeto: vuchau/dedupe
    def _checkData(self, data):
        if len(data) == 0:
            raise ValueError('Dictionary of records is empty.')

        self.data_model.check(next(iter(viewvalues(data))))
Exemplo n.º 60
0
    def gen_c_assignments(self, assignblk):
        """
        Return C information used to generate the C code of the @assignblk
        @assignblk: an AssignBlock instance
        """
        c_var = []
        c_main = []
        c_mem = []
        c_updt = []
        c_prefetch = []

        dst_index = {8: 0, 16: 0, 32: 0, 64: 0, 128: 0}
        dst_var = {}

        prefetchers = self.get_mem_prefetch(assignblk)

        for expr, prefetcher in viewitems(prefetchers):
            str_src = self.id_to_c(expr)
            str_dst = self.id_to_c(prefetcher)
            c_prefetch.append('%s = %s;' % (str_dst, str_src))

        for var in viewvalues(prefetchers):
            if var.size <= self.translator.NATIVE_INT_MAX_SIZE:
                c_var.append("uint%d_t %s;" % (var.size, var))
            else:
                c_var.append("bn_t %s; // %d" % (var, var.size))

        for dst, src in viewitems(assignblk):
            src = src.replace_expr(prefetchers)
            if dst == self.ir_arch.IRDst:
                pass
            elif isinstance(dst, ExprId):
                new_dst = self.add_local_var(dst_var, dst_index, dst)
                if dst in self.ir_arch.arch.regs.regs_flt_expr:
                    # Don't mask float assignment
                    c_main.append('%s = (%s);' %
                                  (self.id_to_c(new_dst), self.id_to_c(src)))
                elif new_dst.size <= self.translator.NATIVE_INT_MAX_SIZE:
                    c_main.append('%s = (%s)&%s;' %
                                  (self.id_to_c(new_dst), self.id_to_c(src),
                                   SIZE_TO_MASK[src.size]))
                else:
                    c_main.append(
                        '%s = bignum_mask(%s, %d);' %
                        (self.id_to_c(new_dst), self.id_to_c(src), src.size))
            elif isinstance(dst, ExprMem):
                ptr = dst.ptr.replace_expr(prefetchers)
                if ptr.size <= self.translator.NATIVE_INT_MAX_SIZE:
                    new_dst = ExprMem(ptr, dst.size)
                    str_dst = self.id_to_c(new_dst).replace(
                        'MEM_LOOKUP', 'MEM_WRITE')
                    c_mem.append('%s, %s);' %
                                 (str_dst[:-1], self.id_to_c(src)))
                else:
                    ptr_str = self.id_to_c(ptr)
                    if ptr.size <= self.translator.NATIVE_INT_MAX_SIZE:
                        c_mem.append('%s, %s);' %
                                     (str_dst[:-1], self.id_to_c(src)))
                    else:
                        if src.size <= self.translator.NATIVE_INT_MAX_SIZE:
                            c_mem.append(
                                'MEM_WRITE_BN_INT(jitcpu, %d, %s, %s);' %
                                (src.size, ptr_str, self.id_to_c(src)))
                        else:
                            c_mem.append(
                                'MEM_WRITE_BN_BN(jitcpu, %d, %s, %s);' %
                                (src.size, ptr_str, self.id_to_c(src)))
            else:
                raise ValueError("Unknown dst")

        for dst, new_dst in viewitems(dst_var):
            if dst == self.ir_arch.IRDst:
                continue

            c_updt.append('%s = %s;' %
                          (self.id_to_c(dst), self.id_to_c(new_dst)))
            if dst.size <= self.translator.NATIVE_INT_MAX_SIZE:
                c_var.append("uint%d_t %s;" % (new_dst.size, new_dst))
            else:
                c_var.append("bn_t %s; // %d" % (new_dst, new_dst.size))

        return c_prefetch, c_var, c_main, c_mem, c_updt