def __read_metadata(self, column_type, column_schema, head): column_schema["REAL_TYPE"] = column_schema["TYPE_ID"] if column_schema["COLUMN_TYPE"].find("unsigned") != -1: column_schema["IS_UNSIGNED"] = True else: column_schema["IS_UNSIGNED"] = False if column_type == FieldType.VAR_STRING or column_type == FieldType.STRING: head, _ = self.__read_string_metadata(column_schema, head) elif column_type == FieldType.VARCHAR: head, column_schema["MAX_LENGTH"] = utils.read_unsigned_int(head, 2) elif column_type == FieldType.BLOB: head, column_schema["LENGTH_SIZE"] = utils.read_unsigned_int(head, 1) elif column_type == FieldType.GEOMETRY: head, column_schema["LENGTH_SIZE"] = utils.read_unsigned_int(head, 1) elif column_type == FieldType.NEWDECIMAL: head, column_schema["PRECISION"] = utils.read_unsigned_int(head, 1) head, column_schema["DECIMALS"] = utils.read_unsigned_int(head, 1) elif column_type == FieldType.DOUBLE: head, column_schema["SIZE"] = utils.read_unsigned_int(head, 1) elif column_type == FieldType.FLOAT: head, column_schema["SIZE"] = utils.read_unsigned_int(head, 1) elif column_type == FieldType.BIT: head, bit = utils.read_unsigned_int(head, 1) head, byte = utils.read_unsigned_int(head, 1) column_schema["BITS"] = (byte * 8) + bit column_schema["BYTES"] = int((bit + 7) / 8) return (head, None)
def __read_string_metadata(self, column_schema, head): head, byte0 = utils.read_unsigned_int(head, 1) head, byte1 = utils.read_unsigned_int(head, 1) metadata = (byte0 << 8) + byte1 real_type = metadata >> 8 if real_type == FieldType.SET or real_type == FieldType.ENUM: column_schema["TYPE_ID"] = real_type column_schema["SIZE"] = metadata & 0x00ff self.__read_enum_metadata(column_schema, real_type) else: column_schema["MAX_LENGTH"] = (((metadata >> 4) & 0x300) ^ 0x300) + (metadata & 0x00ff) return (head, None)
def __read_columns(self, head, null_bitmap, column_schemas): columns = [] for i in xrange(0, len(columns)): schema = column_schemas[i] type = schema["REAL_TYPE"] column = {"type":type, "value":None} null = True if (null_bitmap[i/8]>>(i%8))&0x01 else False unsigned = column_schemas[i]["IS_UNSIGNED"] if null: column["value"] = None elif type == FieldType.TINY: if unsigned: head, columns["value"] = utils.read_unsigned_int(head, 1, False) else: head, columns["value"] = utils.read_signed_int(head, 1, False) elif type == FieldType.SHORT: if unsigned: head, columns["value"] = utils.read_unsigned_int(head, 2, False) else: head, columns["value"] = utils.read_signed_int(head, 2, False) elif type == FieldType.LONG: if unsigned: head, columns["value"] = utils.read_unsigned_int(head, 4, False) else: head, columns["value"] = utils.read_signed_int(head, 4, False) elif type == FieldType.INT24: if unsigned: head, columns["value"] = utils.read_unsigned_int(head, 3, False) else: head, columns["value"] = utils.read_signed_int(head, 3, False) elif type == FieldType.FLOAT: head, columns["value"] = utils.read_float(head) elif type == FieldType.DOUBLE: head, columns["value"] = utils.read_double(head) elif type == FieldType.VARCHAR or column.type == FieldType.STRING: if schema["MAX_LENGTH"] > 255: head, columns["value"] = utils.read_lc_pascal_string(head, 2) else: head, columns["value"] = utils.read_lc_pascal_string(head, 1) elif type == FieldType.NEWDECIMAL: head, columns["value"] = self.__read_new_decimal(column) elif type == FieldType.BLOB: length_size = schema["LENGTH_SIZE"] charset = schema["CHARACTER_SET_NAME"] head, columns["value"] = utils.read_lc_pascal_string(head, length_size, charset) elif type == FieldType.DATETIME: head, columns["value"] = utils.read_datetime(head) elif type == FieldType.TIME: head, columns["value"] = utils.read_time(head) elif type == FieldType.DATE: head, columns["value"] = utils.read_date(head) elif type == FieldType.TIMESTAMP: head, timestamp = utils.read_unsigned_int(head, 4) columns["value"] = datetime.datetime.fromtimestamp(timestamp) elif type == FieldType.LONGLONG: if unsigned: head, columns["value"] = utils.read_unsigned_int(head, 8, False) else: head, columns["value"] = utils.read_signed_int(head, 8, False) elif type == FieldType.YEAR: head, year = utils.read_unsigned_int(head, 1, False) columns["value"] = year + 1900 elif type == FieldType.ENUM: size = schema["SIZE"] head, index = utils.read_unsigned_int(head, size, False) - 1 columns["value"] = schema["ENUM_VALUES"][index] elif type == FieldType.SET: size = schema["SIZE"] head, index = utils.read_unsigned_int(head, size, False) - 1 columns["value"] = schema["SET_VALUES"][index] elif type == FieldType.BIT: bytes = schema["BYTES"] bits = schema["BITS"] head, columns["value"] = utils.read_bits(head, bytes, bits) elif type == FieldType.GEOMETRY: length_size = schema["LENGTH_SIZE"] head, columns["value"] = utils.read_lc_pascal_string(head, length_size) else: raise NotImplementedError("Unknown MySQL column type: %d" % (type)) columns.append(column) return columns