Ejemplo n.º 1
0
    def collate(self, encoded, beams):
        ''' Collate beams into a batch '''
        batch = []
        cache = []
        beam_map = {}
        encoded_batch = []
        for i, beam in enumerate(beams):
            hypothesis_map = {}
            for hypothesis in beam.hypotheses:
                if beam.finished_decoding(hypothesis, self.eos_idx):
                    continue

                batch_idx = len(batch)
                cache.append(hypothesis.cache)
                encoded_batch.append(encoded[i])
                hypothesis_map[hypothesis] = batch_idx
                batch.append(hypothesis.sequence)

            if hypothesis_map:
                beam_map[beam] = hypothesis_map

        batch = torch.LongTensor(batch)
        encoded_batch = utils.cat(encoded_batch)
        cache = utils.cat(cache) if not self.config.disable_cache else None
        return encoded_batch, batch, beam_map, cache
Ejemplo n.º 2
0
    def losses(
            self,
            anchors,
            pred_objectness_logits: List[torch.Tensor],
            gt_labels: List[torch.Tensor],
            pred_anchor_deltas: List[torch.Tensor],
            gt_boxes,
    ):
        num_images = len(gt_labels)
        gt_labels = torch.stack(gt_labels)  # (N, sum(Hi*Wi*Ai))
        anchors = type(anchors[0]).cat(anchors).tensor  # Ax4
        gt_anchor_deltas = [self.box2box_transform.get_deltas(anchors, k) for k in gt_boxes]
        gt_anchor_deltas = torch.stack(gt_anchor_deltas)  # (N, sum(Hi*Wi*Ai), 4)

        pos_mask = gt_labels == 1

        localization_loss = smooth_l1_loss(
            cat(pred_anchor_deltas, dim=1)[pos_mask],
            gt_anchor_deltas[pos_mask],
            self.smooth_l1_beta,
            reduction="sum",
        )
        valid_mask = gt_labels >= 0
        objectness_loss = F.binary_cross_entropy_with_logits(
            cat(pred_objectness_logits, dim=1)[valid_mask],
            gt_labels[valid_mask].to(torch.float32),
            reduction="sum",
        )
        normalizer = self.batch_size_per_image * num_images
        return {
            "loss_rpn_cls": objectness_loss / normalizer,
            "loss_rpn_loc": localization_loss / normalizer,
        }
Ejemplo n.º 3
0
 def cat(self, other, axis=1):
     x, y = self._x, other._x
     x = x.reshape(x.shape + (1, ) * (axis + 1 - x.ndim))
     y = y.reshape(y.shape + (1, ) * (axis + 1 - y.ndim))
     x = cat((x, y), axis=axis)
     t = self._t
     if axis == 0:
         t = cat((t, other._t))
     return self.__class__(x, t, *self.args, **self.kw)
Ejemplo n.º 4
0
    def send_command(self, command, data=None):
        '''
        Push a command to the sever.
        '''
        _d = self.delim

        if data:
            pdata = pickle.dumps(data)
            self.push_socket.send(cat(command,_d,pdata))
            #logging.debug(command)
        else:
            self.push_socket.send(cat(command,_d))
Ejemplo n.º 5
0
    def send_command(self, command, data=None):
        '''
        Push a command to the sever.
        '''
        _d = self.delim

        if data:
            pdata = pickle.dumps(data)
            self.push_socket.send(cat(command, _d, pdata))
            #logging.debug(command)
        else:
            self.push_socket.send(cat(command, _d))
Ejemplo n.º 6
0
 def convert_to_roi_format(self, boxes):
     # concat_boxes = cat([b.bbox for b in boxes], dim=0)
     concat_boxes = cat(boxes, dim=0)
     device, dtype = concat_boxes.device, concat_boxes.dtype
     ids = cat(
         [
             torch.full((len(b), 1), i, dtype=dtype, device=device)
             for i, b in enumerate(boxes)
         ],
         dim=0,
     )
     rois = torch.cat([ids, concat_boxes], dim=1)
     return rois
Ejemplo n.º 7
0
def mask_rcnn_loss(pred_mask_logits: torch.Tensor, instances: List[Instances]):
    cls_agnostic_mask = pred_mask_logits.size(1) == 1
    total_num_masks = pred_mask_logits.size(0)
    mask_side_len = pred_mask_logits.size(2)
    assert pred_mask_logits.size(2) == pred_mask_logits.size(3), "Mask prediction must be square!"

    gt_classes = []
    gt_masks = []
    for instances_per_image in instances:
        if len(instances_per_image) == 0:
            continue
        if not cls_agnostic_mask:
            gt_classes_per_image = instances_per_image.gt_classes.to(dtype=torch.int64)
            gt_classes.append(gt_classes_per_image)

        gt_masks_per_image = instances_per_image.gt_masks.crop_and_resize(
            instances_per_image.proposal_boxes.tensor, mask_side_len
        ).to(device=pred_mask_logits.device)
        # A tensor of shape (N, M, M), N=#instances in the image; M=mask_side_len
        gt_masks.append(gt_masks_per_image)

    if len(gt_masks) == 0:
        return pred_mask_logits.sum() * 0

    gt_masks = cat(gt_masks, dim=0)

    if cls_agnostic_mask:
        pred_mask_logits = pred_mask_logits[:, 0]
    else:
        indices = torch.arange(total_num_masks)
        gt_classes = cat(gt_classes, dim=0)
        pred_mask_logits = pred_mask_logits[indices, gt_classes]

    if gt_masks.dtype == torch.bool:
        gt_masks_bool = gt_masks
    else:
        # Here we allow gt_masks to be float as well (depend on the implementation of rasterize())
        gt_masks_bool = gt_masks > 0.5
    gt_masks = gt_masks.to(dtype=torch.float32)

    # Log the training accuracy (using gt classes and 0.5 threshold)
    mask_incorrect = (pred_mask_logits > 0.0) != gt_masks_bool
    mask_accuracy = 1 - (mask_incorrect.sum().item() / max(mask_incorrect.numel(), 1.0))
    num_positive = gt_masks_bool.sum().item()
    false_positive = (mask_incorrect & ~gt_masks_bool).sum().item() / max(
        gt_masks_bool.numel() - num_positive, 1.0
    )
    false_negative = (mask_incorrect & gt_masks_bool).sum().item() / max(num_positive, 1.0)

    mask_loss = F.binary_cross_entropy_with_logits(pred_mask_logits, gt_masks, reduction="mean")
    return mask_loss
Ejemplo n.º 8
0
    def __init__(
        self,
        box2box_transform,
        pred_class_logits,
        pred_proposal_deltas,
        proposals,
        smooth_l1_beta=0,
    ):
        self.box2box_transform = box2box_transform
        self.num_preds_per_image = [len(p) for p in proposals]
        self.pred_class_logits = pred_class_logits
        self.pred_proposal_deltas = pred_proposal_deltas
        self.smooth_l1_beta = smooth_l1_beta
        self.image_shapes = [x.image_size for x in proposals]

        if len(proposals):
            box_type = type(proposals[0].proposal_boxes)
            # cat(..., dim=0) concatenates over all images in the batch
            self.proposals = box_type.cat(
                [p.proposal_boxes for p in proposals])
            assert (not self.proposals.tensor.requires_grad
                    ), "Proposals should not require gradients!"

            # The following fields should exist only when training.
            if proposals[0].has("gt_boxes"):
                self.gt_boxes = box_type.cat([p.gt_boxes for p in proposals])
                assert proposals[0].has("gt_classes")
                self.gt_classes = cat([p.gt_classes for p in proposals], dim=0)
        else:
            self.proposals = Boxes(
                torch.zeros(0, 4, device=self.pred_proposal_deltas.device))
        self._no_instances = len(proposals) == 0  # no instances found
Ejemplo n.º 9
0
def is_realizable(test):
    spec_status = cat(test)[-1].strip()
    if spec_status == 'realizable':
        return True
    if spec_status == 'unrealizable':
        return False
    assert 0, 'spec status is unknown'
Ejemplo n.º 10
0
 def check_size(x, n):
     if x.size == n + 1:
         return x
     elif x.size == n:
         return cat((x[:1], 0.5 * (x[:-1] + x[1:]), x[-1:]))
     else:
         raise ValueError('axes length must be n + 1 or n')
Ejemplo n.º 11
0
def assign_boxes_to_levels(box_lists, min_level: int, max_level: int,
                           canonical_box_size: int, canonical_level: int):
    """
    Map each box in `box_lists` to a feature map level index and return the assignment
    vector.

    Args:
        box_lists (list[Boxes] | list[RotatedBoxes]): A list of N Boxes or N RotatedBoxes,
            where N is the number of images in the batch.
        min_level (int): Smallest feature map level index. The input is considered index 0,
            the output of stage 1 is index 1, and so.
        max_level (int): Largest feature map level index.
        canonical_box_size (int): A canonical box size in pixels (sqrt(box area)).
        canonical_level (int): The feature map level index on which a canonically-sized box
            should be placed.

    Returns:
        A tensor of length M, where M is the total number of boxes aggregated over all
            N batch images. The memory layout corresponds to the concatenation of boxes
            from all images. Each element is the feature map index, as an offset from
            `self.min_level`, for the corresponding box (so value i means the box is at
            `self.min_level + i`).
    """
    eps = sys.float_info.epsilon
    box_sizes = torch.sqrt(cat([boxes.area() for boxes in box_lists]))

    level_assignments = torch.floor(canonical_level +
                                    torch.log2(box_sizes / canonical_box_size +
                                               eps))
    # clamp level to (min, max), in case the box size is too large or too small
    # for the available feature maps
    level_assignments = torch.clamp(level_assignments,
                                    min=min_level,
                                    max=max_level)
    return level_assignments.to(torch.int64) - min_level
Ejemplo n.º 12
0
def ellipse():
    values = []
    if Random.bool():
        values.extend([shape_radius(), shape_radius()])
        if Random.bool():
            values.extend(["at", position()])
    return "ellipse({})".format(cat(values))
Ejemplo n.º 13
0
def srcset():
    values = [image_url()]
    if Random.bool():
        values.append("{}w".format(Random.integer()))
    if Random.bool():
        values.append("{}x".format(Random.integer()))
    return cat(values)
Ejemplo n.º 14
0
def circle():
    values = []
    if Random.bool():
        values.append(shape_radius())
        if Random.bool():
            values.extend(["at", position()])
    return "circle({})".format(cat(values))
Ejemplo n.º 15
0
def getFile(in_file, out_file, aws_prog, fastq_dump_prog, openssl_prog, pw):
	## Set flags
	file_downloaded = False
	file_decrypted  = False
	
	## Convert to list so we can handle multiple files
	in_file = in_file.split(';')
	
	## Download file (if needed)
	if (in_file[0][0:5] == 's3://'):
		in_file = downloadS3File(in_file, aws_prog)
		file_downloaded = True
	elif(in_file[0][0:3] == 'SRR'):
		in_file = downloadSRAFile(in_file, fastq_dump_prog)
		file_downloaded = True

	## Decrypt file (if needed)
	if (in_file[0][-4:] == '.enc'):
		in_file = utils.decryptFile(in_file, openssl_prog, pw)
		file_decrypted = True
	
	## If we downloaded & decrypted, remove the encrypted file(s)
	if (file_downloaded and file_decrypted):
		[os.remove(s+'.enc') for s in in_file]
	
	## Merge if multiple downloaded files, only keep merged file
	if (len(in_file) > 1 and file_downloaded):
		utils.cat(in_file, out_file)
		[os.remove(s) for s in in_file]
	
	## Merge if multiple local files, keep unmerged files
	elif (len(in_file) > 1 and not file_downloaded):
		utils.cat(in_file, out_file)
	
	## Move if single downloaded file
	elif (len(in_file) == 1 and file_downloaded):
		os.rename(in_file[0], out_file)
	
	## Make a copy if single local file
	elif (len(in_file) == 1 and not file_downloaded):
		#cmd = 'cp '+in_file[0]+' '+out_file
		#subprocess.run(cmd, shell=True, check=True)
		shutil.copy2(in_file[0], out_file)
		
	
	## If everything runs successfully, return 0 
	return(0)
Ejemplo n.º 16
0
 def _mul(a, b):
     isnmbr = lambda n: isinstance(n, Number)
     if 1 in [a, b]:
         return a if b == 1 else b
     elif -1 in [a, b] and all(map(isnmbr, [a, b])):
         return -a if b == -1 else -b
     else:
         return Mult(*cat(a, b, flatten=Mult))
Ejemplo n.º 17
0
 def generate(self, _):
     if Random.bool():
         self.value = "accumulate"
     else:
         values = ["new"]
         if Random.bool():
             values.extend([Random.integer() for _ in range(4)])
         self.value = cat(values)
Ejemplo n.º 18
0
 def _mul(a, b):
     isnmbr = lambda n: isinstance(n, Number)
     if 1 in [a, b]:
         return a if b == 1 else b
     elif -1 in [a, b] and all(map(isnmbr, [a, b])):
         return -a if b == -1 else -b
     else:
         return Mult(*cat(a, b, flatten=Mult))
Ejemplo n.º 19
0
def border_radius():
    num = Random.range(1, 4)
    values = [length_percentage() for _ in range(num)]
    if Random.bool():
        values.append("/")
        num = Random.range(1, 4)
        values.extend([length_percentage() for _ in range(num)])
    return cat(values)
Ejemplo n.º 20
0
    def forward(self, z, y):
        # TODO CHECK
        z = utils.cat((z, y), -1)
        z = self.linear(z)
        z = torch.reshape(
            z, (*z.shape[:-1], 1, int(self.x_size / 8), int(self.x_size / 8)))

        loc_img = self.net(z)
        return loc_img
Ejemplo n.º 21
0
 def generate(self, _):
     values = [
         Random.choice([
             "xMinYMin", "xMidYMin", "xMaxYMin", "xMinYMid", "xMidYMid",
             "xMaxYMid", "xMinYMax", "xMidYMax", "xMaxYMax"
         ])
     ]
     if Random.bool():
         values.append(Random.choice(["meet", "slice"]))
     self.value = cat(values)
Ejemplo n.º 22
0
def path():
    num = list_size()
    if num == 0:
        return ""

    values = [move_to()]
    for _ in range(num - 1):
        values.append(path_cmd())

    return cat(values)
Ejemplo n.º 23
0
def position():
    c = Random.selector(3)
    if c == 0:
        values = [Random.choice(["left", "center", "right"])]
        if Random.bool():
            values.append(Random.choice(["top", "center", "bottom"]))
        return cat(values)
    elif c == 1:
        values = [
            Random.choice(["left", "center", "right"]),
            length_percentage()
        ]
        if Random.bool():
            values.extend([
                Random.choice(["top", "center", "bottom"]),
                length_percentage()
            ])
        return cat(values)
    else:
        return cat([length_percentage(), length_percentage()])
Ejemplo n.º 24
0
    def _overlap(self, bedFnp):
        printt("******************* GWAS overlap")
        self._setupOverlap()

        cresFnp = paths.path(self.assembly, "extras", "cREs.sorted.bed")
        if not os.path.exists(cresFnp):
            Utils.sortFile(paths.path(self.assembly, "raw", "cREs.bed"),
                           cresFnp)

        printt("running bedtools intersect...")
        cmds = [cat(bedFnp),
                '|', "cut -f -4,11-",
                '|', "bedtools intersect",
                "-a", "-",
                "-b", cresFnp,
                "-wo" ]
        snpsIntersecting = Utils.runCmds(cmds)
        print("example", snpsIntersecting[0].rstrip('\n').split('\t'))

        printt("rewriting...")
        outF = StringIO.StringIO()
        count = {}
        for r in snpsIntersecting:
            toks = r.rstrip('\n').split('\t')
            snp = toks[3]
            authorPubmedTrait = toks[4].replace('-', '_')
            accession = toks[9]

            if '_' not in authorPubmedTrait:
                print(r)
                print(toks)
                raise Exception("bad authorPubmedTrait?")
            if not snp.startswith("rs"):
                print(r)
                print(toks)
                raise Exception("bad rs?")
            if not accession.startswith("EH3"):
                print(r)
                print(toks)
                raise Exception("bad line?")
            outF.write('\t'.join([authorPubmedTrait, accession, snp]) + '\n')
            if authorPubmedTrait not in count: count[authorPubmedTrait] = 0
            count[authorPubmedTrait] += 1
        print("example", '\t'.join([authorPubmedTrait, accession, snp]))
        for k, v in count.iteritems():
            print("%s: %d" % (k, v))
        outF.seek(0)

        printt("copying into DB...")
        cols = "authorPubmedTrait accession snp".split(' ')
        self.curs.copy_from(outF, self.tableNameOverlap, '\t', columns=cols)
        importedNumRows(self.curs)

        makeIndex(self.curs, self.tableNameOverlap, ["authorPubmedTrait"])
Ejemplo n.º 25
0
    def forward(self, x, y):
        z = x - 0.222
        z = x / 0.156
        z = self.net(z)
        z = z.view(z.shape[0], -1)

        z = utils.cat((z, y), -1)

        z = self.linear(z)
        z_loc = self.loc(z)
        z_scale = torch.exp(self.scale(z))
        return z_loc, z_scale
Ejemplo n.º 26
0
 def generate(self, _):
     if Random.bool():
         self.value = "normal"
     else:
         selectors = Random.selectors(3)
         values = []
         if selectors[0]:
             values.append("fill")
         if selectors[1]:
             values.append("stroke")
         if selectors[2]:
             values.append("markers")
         self.value = cat(values)
Ejemplo n.º 27
0
def simpmult(cursor):
    """
        Takes the terms in an expression, seperates numerators and denominators, and simplifies the result.

        e.g. 3 * x / 4 * y = 3xy / 4
    """
    cursor = yield from simpchildren(cursor)

    # if there are fractions, combine into one big fraction and delegate to simpfrac

    constant = Nmbr(1)
    factors = OrderedDefaultDict(lambda: Nmbr(0))

    # flatten any nested mults
    node = flatten(cursor.node)
    cursor = yield from cursor.replaceyield(node)

    # count factors
    for factor in node:
        if isinstance(factor, Nmbr):
            constant *= factor.value
        elif isinstance(factor, Exp):
            factors[factor.base] += factor.exponent
        else:
            factors[factor] += 1

    # reconstruct
    factors = map(pow, factors.keys(), factors.values())
    variables = reduce(mul, factors, 1)
    cursor = yield from cursor.replaceyield(constant * variables)

    # after constant evaluation step
    constant = evalexpr(constant)
    cursor = yield from cursor.replaceyield(constant * variables)

    cursor = yield from simpchildren(cursor)

    # if there is a fraction, join into fraction and delegate to simpfrac
    isfrac = lambda obj: isinstance(obj, Frac)
    if any(map(isfrac, cat(cursor.node))):
        getnumer = lambda node: getattr(node, 'numer', node)
        getdenom = lambda node: getattr(node, 'denom', 1)

        # TODO: Simplification of entire line before separating into fractions
        numerator = reduce(mul, map(getnumer, cursor.node))
        denominator = reduce(mul, map(getdenom, cursor.node))

        cursor = yield from cursor.replaceyield(numerator / denominator)
        cursor = yield from simpfrac(cursor)

    return cursor
Ejemplo n.º 28
0
 def single():
     values = [
         "'{}'".format(
             Random.choice([
                 "smcp", "c2sc", "zero", "hist", "liga", "tnum", "frac",
                 "swsh", "ss07", "dlig", "vert", "hwid", "twid", "qwid",
                 "kern", "onum"
             ]))
     ]
     if Random.bool():
         if Random.bool():
             values.append(Random.choice(["on", "off"]))
         else:
             values.append(Random.integer())
     return cat(values)
Ejemplo n.º 29
0
def mask_rcnn_inference(pred_mask_logits: torch.Tensor, pred_instances: List[Instances]):
    cls_agnostic_mask = pred_mask_logits.size(1) == 1

    if cls_agnostic_mask:
        mask_probs_pred = pred_mask_logits.sigmoid()
    else:
        # Select masks corresponding to the predicted classes
        num_masks = pred_mask_logits.shape[0]
        class_pred = cat([i.pred_classes for i in pred_instances])
        indices = torch.arange(num_masks, device=class_pred.device)
        mask_probs_pred = pred_mask_logits[indices, class_pred][:, None].sigmoid()
    # mask_probs_pred.shape: (B, 1, Hmask, Wmask)

    num_boxes_per_image = [len(i) for i in pred_instances]
    mask_probs_pred = mask_probs_pred.split(num_boxes_per_image, dim=0)

    for prob, instances in zip(mask_probs_pred, pred_instances):
        instances.pred_masks = prob  # (1, Hmask, Wmask)
Ejemplo n.º 30
0
def convert_boxes_to_pooler_format(box_lists):
    """
    Convert all boxes in `box_lists` to the low-level format used by ROI pooling ops
    (see description under Returns).

    Args:
        box_lists (list[Boxes] | list[RotatedBoxes]):
            A list of N Boxes or N RotatedBoxes, where N is the number of images in the batch.

    Returns:
        When input is list[Boxes]:
            A tensor of shape (M, 5), where M is the total number of boxes aggregated over all
            N batch images.
            The 5 columns are (batch index, x0, y0, x1, y1), where batch index
            is the index in [0, N) identifying which batch image the box with corners at
            (x0, y0, x1, y1) comes from.
        When input is list[RotatedBoxes]:
            A tensor of shape (M, 6), where M is the total number of boxes aggregated over all
            N batch images.
            The 6 columns are (batch index, x_ctr, y_ctr, width, height, angle_degrees),
            where batch index is the index in [0, N) identifying which batch image the
            rotated box (x_ctr, y_ctr, width, height, angle_degrees) comes from.
    """
    def fmt_box_list(box_tensor, batch_index):
        repeated_index = torch.full((len(box_tensor), 1),
                                    batch_index,
                                    dtype=box_tensor.dtype,
                                    device=box_tensor.device)
        return cat((repeated_index, box_tensor), dim=1)

    pooler_fmt_boxes = cat([
        fmt_box_list(box_list.tensor, i)
        for i, box_list in enumerate(box_lists)
    ],
                           dim=0)

    return pooler_fmt_boxes
Ejemplo n.º 31
0
 def __hash__(self):
     return hash(cat(self, self.__class__))
Ejemplo n.º 32
0
def key():
	global _key
	if _key == None:
		_key = cat("~/.ssh/id_rsa.pub")
	return _key
Ejemplo n.º 33
0
print inFnp

genePredFnp = os.path.splitext(inFnp)[0] + '.genePred'
cmds = [Dirs.ToolsFnp("ucsc.2016-02Feb-16/gtfToGenePred"),
        inFnp, genePredFnp]
Utils.runCmds(cmds, True)
print genePredFnp

bedPlusFnp = os.path.splitext(inFnp)[0] + '.bedPlus'
cmds = [Dirs.ToolsFnp("ucsc.2016-02Feb-16/genePredToBigGenePred"),
        genePredFnp, bedPlusFnp]
Utils.runCmds(cmds, True)
print bedPlusFnp

bedPlusSortedFnp = os.path.splitext(inFnp)[0] + '.sorted.bedPlus'
cmds = [cat(bedPlusFnp),
        '|', "LC_COLLATE=C sort -k1,1 -k2,2n",
        '>', bedPlusSortedFnp]
Utils.runCmds(cmds, True)
print bedPlusFnp

bigPredFnp = os.path.splitext(inFnp)[0] + '.bigGenePred'
cmds = [Dirs.ToolsFnp("ucsc.2016-02Feb-16/bedToBigBed"),
        "-type=bed12+8",
        "-tab",
        "-as=" + Dirs.ToolsFnp("ucsc.2016-02Feb-16/bigGenePred.as"),
        bedPlusSortedFnp,
        AllHumanDataset.chr_lengths,
        bigPredFnp]
Utils.runCmds(cmds, True)
print bigPredFnp
Ejemplo n.º 34
0
def test_cat():
    assert cat([1, 2, 3], [4, 5], [6]) == (1,2,3,4,5,6)
    assert cat([1, 2, 3], [4, 5], 6) == (1,2,3,4,5,6)
Ejemplo n.º 35
0
def test_cat():
    assert cat([1, 2, 3], [4, 5], [6]) == (1, 2, 3, 4, 5, 6)
    assert cat([1, 2, 3], [4, 5], 6) == (1, 2, 3, 4, 5, 6)
Ejemplo n.º 36
0
__author__ = 'Thomas Kountis'


from utils import cat

MIN_EPHEMERAL_PORT = int(cat("/proc/sys/net/ipv4/ip_local_port_range", default="32788").split("\t")[0])
TCP_FLAG_ACK = '.'


class UnifiedPacket:

    def __init__(self):
        self.src = None
        self.src_port = None
        self.dst = None
        self.dst_port = None
        self.flags = None
        self.timestamp = None
        self.ack = None
        self.sequence = None
        self.length = None

    def is_outgoing(self):
        return self.src_port >= MIN_EPHEMERAL_PORT

    def remote_ip(self):
        if self.src_port < MIN_EPHEMERAL_PORT:
            return self.src

        return self.dst
Ejemplo n.º 37
0
__author__ = 'Thomas Kountis'

from utils import cat

MIN_EPHEMERAL_PORT = int(
    cat("/proc/sys/net/ipv4/ip_local_port_range",
        default="32788").split("\t")[0])
TCP_FLAG_ACK = '.'


class UnifiedPacket:
    def __init__(self):
        self.src = None
        self.src_port = None
        self.dst = None
        self.dst_port = None
        self.flags = None
        self.timestamp = None
        self.ack = None
        self.sequence = None
        self.length = None

    def is_outgoing(self):
        return self.src_port >= MIN_EPHEMERAL_PORT

    def remote_ip(self):
        if self.src_port < MIN_EPHEMERAL_PORT:
            return self.src

        return self.dst
Ejemplo n.º 38
0
 def _add(a, b):
     if 0 in [a, b]:
         return a if b == 0 else b
     else:
         return Plus(*cat(a, b, flatten=Plus))
Ejemplo n.º 39
0
 def __hash__(self):
     # returns an order agnostic hash
     sortedhashes = sorted(map(hash, self))
     sortednode = tuple(cat(sortedhashes, self.__class__))
     return hash(sortednode)
Ejemplo n.º 40
0
 def __str__(self):
     return stringify(cat(list(map(get_object_id, self.ths))))
Ejemplo n.º 41
0
 def upper(self):
     children = cat(self.left_siblings, (self.node,), self.right_siblings)
     return self.upnode.__class__(*children)