Пример #1
0
class ODCHeader(Packable):
    '''
    Sent over direct IM connections for text messages, typing notifications,
    and photos.
    '''

    # All DirectIM communication is preceded by these headers:

    fmt = strlist('''
        version    4s      # always ODC2
        hdrlen     H       # length of header
        one        H       # 1
        six        H       # 6
        zero       H       # 0
        cookie     Q       # sixteen byte rendezvous cookie
        null       Q
        length     I
        encoding   H
        subset     H
        flags      I
        zero       I
        screenname 16s
        null2      16s ''')

    @classmethod
    def make(cls, *a, **k):
        'Makes a packable with default values filled in.'

        k.update(
            dict(version='ODC2',
                 static=76,
                 one=1,
                 six=6,
                 zero=0,
                 null=0,
                 null2='\0' * 16))
        return cls(*a, **k)

    # for the "flags" field above.
    bitflags = lookup_table({
        0x01: 'autoresponse',
        0x02: 'typingpacket',  # has typing info
        0x04: 'typed',
        0x08: 'typing',
        0x20: 'confirmation',
        0x40: 'mac_confirmation',
    })

    invars = [
        lambda o: o.version == 'ODC2',
    ]
Пример #2
0
class OFTHeader(Packable):
    fmt = strlist('''
        protocol_version 4s  # Always 'OFT2'
        length           H   # includes all data, including version and length
        type             H   # one of "types" below
    ''')

    invars = [lambda self: self.protocol_version == 'OFT2',
              lambda self: self.type in self.types.values()]

    types = Storage(prompt          = 0x0101,
                    ack             = 0x0202,
                    done            = 0x0204,
                    receiver_resume = 0x0205,
                    sender_resume   = 0x0106,
                    rcv_resume_ack  = 0x0207,
                    )
Пример #3
0
class OFTBody(Packable):
    fmt = strlist('''
    cookie              Q
    encryption          H
    compression         H
    num_files           H
    files_left          H
    num_parts           H
    parts_left          H
    total_size          I
    file_size           I

    modification_time   I   # since unix epoch
    checksum            I   # see OscarFileTransferChecksum
    recv_fork_checksum  I
    fork_size           I
    creation_time       I
    fork_checksum       I
    bytes_received      I
    recv_checksum       I
    id_string           32s # 32 byte right padded string: usually 'CoolFileXfer'

    flags               B   # Flags: 0x20 - Negotiation (not complete), 0x01 - Done
    list_name_offset    B   # always 0x1c
    list_size_offset    B   # always 0x11

    dummy_block         69s # Dummy Block - large null block for future expansion of OFT

    mac_file_info       16s # Mac File Info

    charset             H   # charset
    subset              H   # subset: 0 for ASCII, 2 for UTF-16BE, 3 for ISO-8859-1
    ''')

    default_checksum = 0xffff0000

    @staticmethod
    def padfilename(filename):
        '''Following an OFT body is a padded filename which at least 64
        characters and maybe more.'''

        if len(filename) < 64:
            filename += '\0' * (64 - len(filename))
            assert len(filename) == 64
        return filename
Пример #4
0
class keystruct(packable.Packable):
    byteorder = '<'
    fmt = util.strlist('''
        size      L    # always 28
        mode      L    # always 1 for CBC
        cipheralg L    # always 0x6603 for 3DES
        hashalg   L    # always 0x8004 for SHA-1
        ivlen     L    # always 8
        hashlen   L    # always 20
        cipherlen L    # always 72

        iv        8s   # random data for initialization vector
        hash      20s  # SHA-1 result
        cipher    72s  # Crypted data
        ''')

    def __init__(self, *args):
        if len(args) == 3:
            iv, hash, cipher = args
            packable.Packable.__init__(self, 28, 1, 0x6603, 0x8004, 8, 20, 72,
                                       iv, hash, cipher)
        else:
            packable.Packable.__init__(self, *args)
Пример #5
0
infile = sys.argv[1]
threshold = int(sys.argv[2])
outfile = '%s.lp.%d' % (infile, threshold)
count = {}

print 'Reading', infile, 'to calculate item frequencies'

fin = file(infile, 'r')
for tid, l in enumerate(fin):
    txn = transaction.read_txn(l)
    for item in txn:
        if not item in count:
            count[item] = 0
        count[item] += 1
    if tid % 1000 == 0:
        sys.stdout.write('.')
        sys.stdout.flush()

print
print 'Writing', outfile

fout = file(outfile, 'w')
fin = file(infile, 'r')
for tid, l in enumerate(fin):
    txn = transaction.read_txn(l)
    txnout = [x for x in txn if count[x] < threshold]
    fout.write(util.strlist(txnout) + '\n')
    if tid % 1000 == 0:
        sys.stdout.write('.')
        sys.stdout.flush()
Пример #6
0
import pattern
from transaction import decode_items, encode_items

if len(sys.argv)!=2:
    print 'ERROR: need a pattern set to sort'
    exit(-1)
else:
    infile = sys.argv[1]

pat = []
fin = file(infile, 'r')
for l in fin:
    (items,count) = pattern.read_pat(l)
    pat.append( (items,count) )

def less_itemsets(a, b):
    if len(a)==len(b):
        for (x,y) in zip(a,b):
            if x < y:
                return -1
            else:
                if x > y:
                    return 1
        return 0
    else:
        return len(a)-len(b)

pat.sort(lambda (i1,c1),(i2,c2) : less_itemsets(i1,i2))
for (items, count) in pat:
    print '%s (%d)' % (util.strlist(items), count)
Пример #7
0
def gen_problems():

    if not os.access("analysis", os.F_OK):
        os.mkdir("analysis")

    for x in datafiles:
        xl = string.split(x)
        datafile = xl[0]
        numtxn = int(xl[1])
        minprocs = int(xl[2])
        dataname = perf.comp_dataname(datafile)
        f = open('analysis/' + dataname + '.f2stats', 'w')
        if len(xl) > 3:
            supports = xl[3:]
        else:
            supports = default_supports
        f.write('Support NumVertices AvgF2deg DevF2deg MinF2Deg MaxF2Deg\n')
        for epsilon in supports:
            key = datafile + ':' + epsilon
            (stats, est) = d[key]
            # print stats
            f.write(epsilon + ' ' + str(stats[4]) + ' ' + str(stats[7]) + ' ' +
                    str(stats[8]) + ' ' + str(stats[9]) + ' ' +
                    str(stats[10]) + '\n')
        f.close()

    # read all stats in memory
    all_stats = {}
    for x in datafiles:
        xl = string.split(x)
        datafile = xl[0]
        numtxn = int(xl[1])
        minprocs = int(xl[2])
        dataname = perf.comp_dataname(datafile)
        if len(xl) > 3:
            supports = xl[3:]
        else:
            supports = default_supports
        for epsilon in supports:
            key = datafile + ':' + epsilon
            (stats, est) = d[key]
            if not all_stats.has_key(epsilon):
                all_stats[epsilon] = {}
            all_stats[epsilon][dataname] = stats

    #print all_stats

    # write all stats

    datanames = [perf.comp_dataname(string.split(x)[0]) for x in datafiles]
    fnv = open('analysis/numvertices.data', 'w')
    fnv.write('Support ' + util.strlist(datanames) + '\n')
    fad = open('analysis/avgdegree.data', 'w')
    fad.write('Support ' + util.strlist(datanames) + '\n')

    supports = all_stats.keys()
    supports.sort()
    print 'supports: ', supports

    for epsilon in supports:
        stats_by_e = all_stats[epsilon]
        print '*', epsilon, stats_by_e
        fnv.write(epsilon + '\t')
        fad.write(epsilon + '\t')
        for dataname in datanames:
            if stats_by_e.has_key(dataname):
                fnv.write(str(int(stats_by_e[dataname][4])) + '\t')
                fad.write(str(stats_by_e[dataname][7]) + '\t')
            else:
                fnv.write('-\t')
                fad.write('-\t')
        fnv.write('\n')
        fad.write('\n')

    fnv.close()