Ejemplo n.º 1
0
    def do(self):
        if self.con == None:
            if self.connect() == False:
                return

            # Check crc
            rcrc = self.get_crc(False)
            if rcrc == None:
                return

            # Sync crc
            lcrc = crc16.crc16_buff(str(self.crc_idx), self.crc_val)
            if rcrc == self.crc_val or rcrc == lcrc:
                self.crc_val = rcrc
            if rcrc == lcrc:
                self.inc_crc_idx()

        # Next crc
        rcrc = self.get_crc(True)
        if rcrc == None:
            return

        lcrc = crc16.crc16_buff(str(self.crc_idx), self.crc_val)
        self.inc_crc_idx()
        self.crc_val = lcrc

        if lcrc != rcrc:
            self.consistency = False

        if self.verbose:
            print "%d l_%d r_%d" % (self.crc_idx, lcrc, rcrc)
Ejemplo n.º 2
0
    def test_dump_iterator_with_mig_conf_clearstart(self):
        util.print_frame()

        num_test = 100
        dict = {}
        server0 = self.cluster['servers'][0]
        redis0 = telnetlib.Telnet(server0['ip'], server0['redis_port'])

        for i in xrange(num_test):
            key = self.string_gen(random.randint(1, 64))
            val = self.string_gen(random.randint(1, 64))
            dict[key] = val

            redis0.write('*3\r\n$3\r\nset\r\n')
            redis0.write('$%d\r\n%s\r\n' % (len(key), key))
            redis0.write('$%d\r\n%s\r\n' % (len(val), val))
            ret = redis0.read_until('\r\n', 1)
            self.assertEqual(ret, '+OK\r\n')

        self.clearstart(redis0, 4096, 8191)

        self.bgsave(redis0)

        cmd = "./dump-util --dump-iterator dump.rdb ./dump2json_base32hex.so out.json"
        p = util.exec_proc_async(util.dump_util_dir(0), cmd, True, None,
                                 subprocess.PIPE, None)
        ret = p.wait()
        self.assertTrue(ret == 0)

        count = 0
        f = file("%s/out.json" % util.dump_util_dir(0), "r")
        for line in f.readlines():
            count += 1
            data = json.loads(line.strip())
            key = self.b32hexdecode(data['key'])
            val = self.b32hexdecode(data['value'])
            self.assertEqual(dict[key], val)
            if ((crc16.crc16_buff(key, 0) % 8192) >= 4096):
                print key
                print val
                print crc16.crc16_buff(key, 0) % 8192
                self.assertTrue(
                    False, "dump-util doesn't recognize keys on migration")

        print "Total Count of json output = %d" % count
        f.close()

        # Go back to initial configuration
        self.clearend(redis0)
Ejemplo n.º 3
0
    def do(self):
        if self.con == None:
            if self.connect() == False:
                return

            # Check crc
            rcrc = self.get_crc(False)
            if rcrc == None:
                return
            # First try
            if rcrc != 'not_init':
                # Sync crc
                if rcrc != self.crc_val:
                    self.crc_val = rcrc
                    self.inc_crc_idx()

        # Next crc
        rcrc = self.get_crc(True)
        if rcrc == None:
            return

        lcrc = crc16.crc16_buff(str(self.crc_idx), self.crc_val)
        self.inc_crc_idx()
        self.crc_val = lcrc

        if lcrc != rcrc:
            print "CRC16 inc  >>> key:%s, index:%d, l_%d != r_%d" % (self.key, self.crc_idx, lcrc, rcrc)
            self.consistency = False
            sys.exit(-1)

        if self.verbose:
            print "%d l_%d r_%d" % (self.crc_idx, lcrc, rcrc)
Ejemplo n.º 4
0
def afproto_get_data(raw_frame):
    '''
    Returns a tuple of (data, extra_data). The data is data which was decoded
    from the passed frame, the extra_data is data that was not considered for
    parsing (and should probably be sent in a subsequent call).

    If no valid frame was found, data is None

    extra will always be a string (empty string if all data was considered).
    '''
    start_ndx = raw_frame.find(START_BYTE)
    if start_ndx == -1:
        return (None, '')

    end_ndx = raw_frame.find(END_BYTE, start_ndx + 1)
    if end_ndx == -1:
        return (None, raw_frame[start_ndx:])

    contents = unescape_data(raw_frame[start_ndx+1:end_ndx])
    data = contents[:-2]

    sent_crc = struct.unpack('H', contents[-2:])[0]
    if sent_crc != crc16.crc16_buff(data):
        print 'invalid crc'
        return (None, raw_frame[end_ndx+1:])

    return (data, raw_frame[end_ndx+1:])
def afproto_get_data(raw_frame):
    '''
    Returns a tuple of (data, extra_data). The data is data which was decoded
    from the passed frame, the extra_data is data that was not considered for
    parsing (and should probably be sent in a subsequent call).

    If no valid frame was found, data is None

    extra will always be a string (empty string if all data was considered).
    '''
    start_ndx = raw_frame.find(START_BYTE)
    if start_ndx == -1:
        return (None, '')

    end_ndx = raw_frame.find(END_BYTE, start_ndx + 1)
    if end_ndx == -1:
        return (None, raw_frame[start_ndx:])

    contents = unescape_data(raw_frame[start_ndx + 1:end_ndx])
    data = contents[:-2]

    sent_crc = struct.unpack('H', contents[-2:])[0]
    if sent_crc != crc16.crc16_buff(data):
        print 'invalid crc'
        return (None, raw_frame[end_ndx + 1:])

    return (data, raw_frame[end_ndx + 1:])
Ejemplo n.º 6
0
    def do(self):
        if self.con == None:
            if self.connect() == False:
                return

            # Check crc
            rcrc = self.get_crc(False)
            if rcrc == None:
                return
            # First try
            if rcrc != 'not_init':
                # Sync crc
                if rcrc != self.crc_val:
                    self.crc_val = rcrc
                    self.inc_crc_idx()

        # Next crc
        rcrc = self.get_crc(True)
        if rcrc == None:
            return

        lcrc = crc16.crc16_buff(str(self.crc_idx), self.crc_val)
        self.inc_crc_idx()
        self.crc_val = lcrc

        if lcrc != rcrc:
            print "CRC16 inc  >>> key:%s, index:%d, l_%d != r_%d" % (
                self.key, self.crc_idx, lcrc, rcrc)
            self.consistency = False
            sys.exit(-1)

        if self.verbose:
            print "%d l_%d r_%d" % (self.crc_idx, lcrc, rcrc)
Ejemplo n.º 7
0
    def test_dump_iterator_with_mig_conf_clearstart(self):
        util.print_frame()

        num_test = 100
        dict = {}
        server0 = self.cluster['servers'][0]
        redis0 = telnetlib.Telnet(server0['ip'], server0['redis_port'])

        for i in xrange(num_test):
            key = self.string_gen(random.randint(1, 64))
            val = self.string_gen(random.randint(1, 64))
            dict[key] = val

            redis0.write('*3\r\n$3\r\nset\r\n')
            redis0.write('$%d\r\n%s\r\n' % (len(key), key))
            redis0.write('$%d\r\n%s\r\n' % (len(val), val))
            ret = redis0.read_until('\r\n', 1)
            self.assertEqual(ret, '+OK\r\n')

        self.clearstart(redis0, 4096, 8191)

        self.bgsave(redis0)

        cmd = "./dump-util --dump-iterator dump.rdb ./dump2json_base32hex.so out.json"
        p = util.exec_proc_async(util.dump_util_dir(0), cmd, True, None, subprocess.PIPE, None)
        ret = p.wait()
        self.assertTrue(ret == 0)

        count = 0
        f = file("%s/out.json" % util.dump_util_dir(0), "r")
        for line in f.readlines():
            count += 1
            data = json.loads(line.strip())
            key = self.b32hexdecode(data['key'])
            val = self.b32hexdecode(data['value'])
            self.assertEqual(dict[key], val)
            if ((crc16.crc16_buff(key, 0) % 8192) >= 4096):
                print key
                print val
                print crc16.crc16_buff(key, 0) % 8192
                self.assertTrue(False, "dump-util doesn't recognize keys on migration")

        print "Total Count of json output = %d" % count
        f.close()

        # Go back to initial configuration
        self.clearend(redis0)
Ejemplo n.º 8
0
def afproto_frame_data(data):
    '''
    Returns a raw frame which contains the supplied data
    '''
    ret = START_BYTE
    crc = struct.pack('H', crc16.crc16_buff(data))
    ret += escape_data(data)
    ret += escape_data(crc)
    ret += END_BYTE
    return ret
def afproto_frame_data(data):
    '''
    Returns a raw frame which contains the supplied data
    '''
    ret = START_BYTE
    crc = struct.pack('H', crc16.crc16_buff(data))
    ret += escape_data(data)
    ret += escape_data(crc)
    ret += END_BYTE
    return ret
Ejemplo n.º 10
0
    def pack_message(self, msg):
        if(len(msg)>1023):
            msg = msg[:1023]  # TODO: Split up messages nicely? Do we care?
            
        packet_length = len(msg) & 1023 # Packet Flags & Header

        # TODO: Nicer way of handling bitfields for the packet flags
        if(self.crc32_enabled):
            packet_length = packet_length | 0x8000 # Set the CRC32 flag bit.
            crc = struct.pack(">L",0xFFFF) # TODO: crc32 from python libraries?
        else:
            crc = struct.pack(">H", crc16.crc16_buff(struct.pack(">H", packet_length) + msg))

        # Construct the packet
        packet = self.sync_bytes + struct.pack(">H", packet_length) + msg + crc

        self.messages.append(packet)

        return packet
Ejemplo n.º 11
0
    def test_buffer(self):
        if len(self.buffer)> self.sync_length + 16: # Allow for different sync lengths.
            # Convert the first X bits to a string, and test against the sync bytes.
            buffer_head = np.packbits(self.buffer[0:self.sync_length]).tostring()

            if buffer_head == self.sync_bytes: # Maybe we have something?

                # Extract the packet flags and payload length.
                packet_flags = np.packbits(self.buffer[self.sync_length:self.sync_length+16]).tostring()
                packet_flags = struct.unpack(">H", packet_flags)[0]
                packet_length = packet_flags & 0x03FF # Extract just the packet length

                if(packet_length > self.payload_length_cap):
                    # Payload is bigger than our cap.
                    # At this point we assume the data is corrupt, and continue clocking through bits.
                    logging.debug("Packet length bigger than cap.")
                    return

                # Get the CRC type and length from the MSB of the packet flags.
                crc_type = "CRC32" if (packet_flags & 0x8000 == 1) else "CRC16"
                crc_length = 4 if crc_type == "CRC32" else 2

                # Check we have enough bits to test the entire packet.
                if(len(self.buffer) >= self.sync_length + 16 + packet_length*8 + crc_length*8):
                    # Convert the bit array to a string
                    packet_string = np.packbits(self.buffer[0:(self.sync_length + 16 + packet_length*8 + crc_length*8)]).tostring()
                    logging.debug("Possible Packet: " + packet_string)
                    logging.debug(str(np.packbits(self.buffer[0:(self.sync_length + 16 + packet_length*8 + crc_length*8)])))
                    # Extract CRC, and calc CRC
                    if crc_type == "CRC16":
                        calc_crc = crc16.crc16_buff(packet_string[len(self.sync_bytes):-2])
                        packet_crc = struct.unpack(">H",packet_string[-2:])[0]
                    else:
                        calc_crc = 0xFFFF
                        packet_crc = struct.unpack(">L", packet_string[-4:])[0]

                    logging.debug("Packet CRC: " + str(packet_crc) + " Calc CRC: "+ str(calc_crc))

                    # Test CRC
                    if packet_crc == calc_crc:
                        # Woohoo! We have a packet!
                        payload = packet_string[len(self.sync_bytes)+2:-2]

                        logging.info("Found complete packet: " + payload)
                        # Do somethign with the packet
                        if self.callback != False:
                            self.callback(payload)
                        # Clear the packet bits out of the buffer.
                        self.buffer_state = "SHIFT"
                        # TODO
                    else:
                        # Packet failed CRC. Continue clocking through bits in case this was a false positive.
                        logging.debug("CRC Check failed. False positive on sync?")
                        self.buffer_state = "SHIFT"
                        return
                else:
                    # We need more bits. Make sure new bits are appended, so we don't shift out our sync header.
                    self.buffer_state = "APPEND"
                    return
            else:
                # No sync header match. Continue clocking bits through.
                self.buffer_state = "SHIFT"
                return
        else:
            # We need more bits to check the sync header.
            self.buffer_state = "APPEND"
            return
Ejemplo n.º 12
0
    def run( self ):
        i = 0
        pipelined_multikey_cmd = 'pipelined_multikey 0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '

        pipelined_multikey_cmd += pipelined_multikey_cmd;
        pipelined_multikey_cmd += pipelined_multikey_cmd;

        pipelined_multikey_cmd = 'mset %s\r\n' % pipelined_multikey_cmd
        pipelined_multikey_cmd += pipelined_multikey_cmd;
        pipelined_multikey_cmd += pipelined_multikey_cmd;
        pipelined_multikey_cmd += pipelined_multikey_cmd;

        while self.quit is not True:
            if i > 50000:
                i = 0
            i = i + 1

            try:
                self.server.write( pipelined_multikey_cmd )
                response = self.server.read_until( '\r\n', self.timeout )
                response = self.server.read_until( '\r\n', self.timeout )
                response = self.server.read_until( '\r\n', self.timeout )
                response = self.server.read_until( '\r\n', self.timeout )
                response = self.server.read_until( '\r\n', self.timeout )
                response = self.server.read_until( '\r\n', self.timeout )
                response = self.server.read_until( '\r\n', self.timeout )
                response = self.server.read_until( '\r\n', self.timeout )
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % pipelined_multikey_cmd )
                self.consistency = False
                return

            cmd = 'mset 1%s 1 2%s 2 3%s 3 4%s 4 5%s 5 6%s 6\r\n' % (self.key, self.key, self.key, self.key, self.key, self.key)
            try:
                self.server.write( cmd )
                response = self.server.read_until( '\r\n', self.timeout )
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return

            cmd = 'mget 1%s 2%s 3%s 4%s 5%s 6%s\r\n' % (self.key, self.key, self.key, self.key, self.key, self.key)
            try:
                self.server.write( cmd )
                for read_loop in range(13):
                    response = self.server.read_until( '\r\n', self.timeout )
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return

            cmd = 'del 1%s 2%s 3%s 4%s 5%s 6%s\r\n' % (self.key, self.key, self.key, self.key, self.key, self.key)
            try:
                self.server.write( cmd )
                response = self.server.read_until( '\r\n', self.timeout )
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return

#      cmd = 'info all\r\ninfo all\r\ninfo all\r\n'
#      try:
#        self.server.write( cmd )
#        for read_loop in range(3):
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#      except:
#        util.log( 'Connection closed in LoadGenerator:%s' % cmd )
#        self.consistency = False
#        return

            cmd = 'crc16 %s %d\r\n' % (self.key, i)
            try:
                self.server.write( cmd )
                response = self.server.read_until( '\r\n', self.timeout )
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return


            self.value = crc16.crc16_buff(str(i), self.value)
            try:
                if (int(response[1:-2]) != self.value):
                    if self.consistency:
                        self.consistency = False
            except ValueError:
                #util.log( 'Value Error in LoadGenerator, ret:%s' % response[:-2] )
                self.consistency = False
                return
Ejemplo n.º 13
0
    def process(self):
        if self.i > 50000:
            self.i = 0
        self.i += 1

        # Pipelined multikey - Request
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        self.api.append_command(rqst, self.pipelined_multikey_cmd)

        # Pipelined multikey - Check reply
        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log( 'Connection closed in LoadGenerator:%s' % self.pipelined_multikey_cmd )
            self.consistency = False
            return False

        # Multi - MSET
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'mset 1%s 1 2%s 2 3%s 3 4%s 4 5%s 5 6%s 6' % (self.key, self.key, self.key, self.key, self.key, self.key)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log( 'Connection closed in LoadGenerator:%s' % cmd )
            self.consistency = False
            return False

        # Multi - MGET
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'mget 1%s 2%s 3%s 4%s 5%s 6%s' % (self.key, self.key, self.key, self.key, self.key, self.key)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log( 'Connection closed in LoadGenerator:%s' % cmd )
            self.consistency = False
            return False

        # Multi - DEL
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'del 1%s 2%s 3%s 4%s 5%s 6%s' % (self.key, self.key, self.key, self.key, self.key, self.key)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log( 'Connection closed in LoadGenerator:%s' % cmd )
            self.consistency = False
            return False

        # CRC
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'crc16 %s %d' % (self.key, self.i)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            be_errno, reply = self.api.get_reply(rqst)
            if be_errno < 0 or reply == None:
                if be_errno < 0:
                    self.consistency = False
                    return False

            if reply[0] != ARC_REPLY_INTEGER:
                self.consistency = False
                return False

            # CRC - Check consistency
            self.value = crc16.crc16_buff(str(self.i), self.value)
            try:
                if (reply[1] != self.value):
                    if self.verbose:
                        util.log('Value Error in LoadGenerator, cmd:"%s", reply:%s, value:%d' % (cmd, reply[1], self.value))
                    self.consistency = False
                    return False

            except ValueError:
                if self.verbose:
                    util.log( 'Value Error in LoadGenerator, ret:%s' % response[:-2] )
                self.consistency = False
                return False

        except:
            if self.verbose:
                util.log('Connection closed in LoadGenerator:%s, except' % cmd)
                util.log(sys.exc_info())
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type, exc_value, exc_traceback, limit=3, file=sys.stdout)
            self.consistency = False
            return False

        return True
Ejemplo n.º 14
0
    def test_timeout_in_partial_PG(self):
        util.print_frame()

        api = ARC_API(ZK_ADDR, CLUSTER_NAME, logFilePrefix = self.arcci_log, so_path = self.so_path)

        rqst = api.create_request()
        if rqst == None:
            return False

        # Request - PG 0
        api.append_command(rqst, "set haha0 haha0")
        api.append_command(rqst, "set haha2 haha2")
        dummy_value = ''
        for i in range(10): # 1 KB
            dummy_value += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
        large_value_for_timeout  = ''
        for i in range(1024): # 1 MB
            large_value_for_timeout += dummy_value
        large_value_for_timeout += large_value_for_timeout # 2 MB
        large_value_for_timeout += large_value_for_timeout # 4 MB
        large_value_for_timeout += large_value_for_timeout # 8 MB
        large_value_for_timeout += large_value_for_timeout # 16 MB
        large_value_for_timeout += large_value_for_timeout # 32 MG
        large_value_for_timeout += large_value_for_timeout # 64 MB
        large_value_for_timeout += large_value_for_timeout # 128 MB
        large_value_for_timeout += large_value_for_timeout # 256 MB
        util.log('Large value : %f MB' % (float(len(large_value_for_timeout))/1024/1024))
        total_cmd = 3
        api.append_command(rqst, "set haha %s", large_value_for_timeout)

        # Request - PG 1
        sent_for_pg1 = []
        for i in range(10):
            key = 'hahahoho%d' % i
            slot = crc16.crc16_buff(key) % 8192
            total_cmd += 1
            api.append_command(rqst, "set %s %s" % (key, key))

            if slot > 4095:
                sent_for_pg1.append(key)

        for i in range(6):
            util.log('waiting... %d' % i)
            time.sleep(1)

        total_reply = 0
        err_cnt = 0
        try:
            ret = api.do_request(rqst, 1000)
            if ret != 0:
                err_cnt += 1
                util.log('Partial error occurs')

            util.log('Reply : ')
            while True:
                be_errno, reply = api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    util.log('be_errno : %d' % be_errno)
                    break

                total_reply += 1
                util.log(reply)
                if reply is not None:
                    if reply[0] != ARC_REPLY_STATUS:
                        err_cnt += 1
                    elif reply[0] != ARC_REPLY_ERROR:
                        err_cnt += 1
                    elif reply[0] != ARC_REPLY_NIL:
                        err_cnt += 1

        except:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback, limit=3, file=sys.stdout)

        finally:
            util.log('total cmd:%d' % total_cmd)
            util.log('total reply:%d' % total_reply)
            api.destroy()

        self.assertEquals(be_errno, ARC_ERR_TIMEOUT, 'could not get timeout error. be_errno:%d' % be_errno)
Ejemplo n.º 15
0
    def run(self):
        i = 0
        pipelined_multikey_cmd = 'pipelined_multikey 0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789'
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '
        pipelined_multikey_cmd += '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789 '

        pipelined_multikey_cmd += pipelined_multikey_cmd
        pipelined_multikey_cmd += pipelined_multikey_cmd

        pipelined_multikey_cmd = 'mset %s\r\n' % pipelined_multikey_cmd
        pipelined_multikey_cmd += pipelined_multikey_cmd
        pipelined_multikey_cmd += pipelined_multikey_cmd
        pipelined_multikey_cmd += pipelined_multikey_cmd

        while self.quit is not True:
            if i > 50000:
                i = 0
            i = i + 1

            try:
                self.server.write(pipelined_multikey_cmd)
                response = self.server.read_until('\r\n', self.timeout)
                response = self.server.read_until('\r\n', self.timeout)
                response = self.server.read_until('\r\n', self.timeout)
                response = self.server.read_until('\r\n', self.timeout)
                response = self.server.read_until('\r\n', self.timeout)
                response = self.server.read_until('\r\n', self.timeout)
                response = self.server.read_until('\r\n', self.timeout)
                response = self.server.read_until('\r\n', self.timeout)
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % pipelined_multikey_cmd )
                self.consistency = False
                return

            cmd = 'mset 1%s 1 2%s 2 3%s 3 4%s 4 5%s 5 6%s 6\r\n' % (
                self.key, self.key, self.key, self.key, self.key, self.key)
            try:
                self.server.write(cmd)
                response = self.server.read_until('\r\n', self.timeout)
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return

            cmd = 'mget 1%s 2%s 3%s 4%s 5%s 6%s\r\n' % (
                self.key, self.key, self.key, self.key, self.key, self.key)
            try:
                self.server.write(cmd)
                for read_loop in range(13):
                    response = self.server.read_until('\r\n', self.timeout)
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return

            cmd = 'del 1%s 2%s 3%s 4%s 5%s 6%s\r\n' % (
                self.key, self.key, self.key, self.key, self.key, self.key)
            try:
                self.server.write(cmd)
                response = self.server.read_until('\r\n', self.timeout)
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return


#      cmd = 'info all\r\ninfo all\r\ninfo all\r\n'
#      try:
#        self.server.write( cmd )
#        for read_loop in range(3):
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#          response = self.server.read_until( '\r\n\r\n' )
#      except:
#        util.log( 'Connection closed in LoadGenerator:%s' % cmd )
#        self.consistency = False
#        return

            cmd = 'crc16 %s %d\r\n' % (self.key, i)
            try:
                self.server.write(cmd)
                response = self.server.read_until('\r\n', self.timeout)
            except:
                #util.log( 'Connection closed in LoadGenerator:%s' % cmd )
                self.consistency = False
                return

            self.value = crc16.crc16_buff(str(i), self.value)
            try:
                if (int(response[1:-2]) != self.value):
                    if self.consistency:
                        self.consistency = False
            except ValueError:
                #util.log( 'Value Error in LoadGenerator, ret:%s' % response[:-2] )
                self.consistency = False
                return
Ejemplo n.º 16
0
    def process(self):
        if self.i > 50000:
            self.i = 0
        self.i += 1

        # Pipelined multikey - Request
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        self.api.append_command(rqst, self.pipelined_multikey_cmd)

        # Pipelined multikey - Check reply
        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log('Connection closed in LoadGenerator:%s' %
                         self.pipelined_multikey_cmd)
            self.consistency = False
            return False

        # Multi - MSET
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'mset 1%s 1 2%s 2 3%s 3 4%s 4 5%s 5 6%s 6' % (
            self.key, self.key, self.key, self.key, self.key, self.key)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log('Connection closed in LoadGenerator:%s' % cmd)
            self.consistency = False
            return False

        # Multi - MGET
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'mget 1%s 2%s 3%s 4%s 5%s 6%s' % (self.key, self.key, self.key,
                                                self.key, self.key, self.key)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log('Connection closed in LoadGenerator:%s' % cmd)
            self.consistency = False
            return False

        # Multi - DEL
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'del 1%s 2%s 3%s 4%s 5%s 6%s' % (self.key, self.key, self.key,
                                               self.key, self.key, self.key)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            while True:
                be_errno, reply = self.api.get_reply(rqst)
                if be_errno < 0 or reply == None:
                    if be_errno < 0:
                        self.consistency = False
                        return False
                    break

                if is_reply_ok(reply) == False:
                    self.consistency = False
                    return False

        except:
            if self.verbose:
                util.log('Connection closed in LoadGenerator:%s' % cmd)
            self.consistency = False
            return False

        # CRC
        rqst = self.api.create_request()
        if rqst == None:
            self.consistency = False
            return False

        cmd = 'crc16 %s %d' % (self.key, self.i)
        self.api.append_command(rqst, cmd)

        try:
            ret = self.api.do_request(rqst, self.timeout)
            if ret != 0:
                self.consistency = False
                return False

            be_errno, reply = self.api.get_reply(rqst)
            if be_errno < 0 or reply == None:
                if be_errno < 0:
                    self.consistency = False
                    return False

            if reply[0] != ARC_REPLY_INTEGER:
                self.consistency = False
                return False

            # CRC - Check consistency
            self.value = crc16.crc16_buff(str(self.i), self.value)
            try:
                if (reply[1] != self.value):
                    if self.verbose:
                        util.log(
                            'Value Error in LoadGenerator, cmd:"%s", reply:%s, value:%d'
                            % (cmd, reply[1], self.value))
                    self.consistency = False
                    return False

            except ValueError:
                if self.verbose:
                    util.log('Value Error in LoadGenerator, ret:%s' %
                             response[:-2])
                self.consistency = False
                return False

        except:
            if self.verbose:
                util.log('Connection closed in LoadGenerator:%s, except' % cmd)
                util.log(sys.exc_info())
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type,
                                          exc_value,
                                          exc_traceback,
                                          limit=3,
                                          file=sys.stdout)
            self.consistency = False
            return False

        return True