def parse_db_header(first_page_hex_string): global DB_FILE_SIZE_IN_BYTES global HEADER_MAGIC_STRING global HEADER_DATABASE_PAGESIZE global HEADER_FILE_FORMAT_WRITE_VERSION global HEADER_FILE_FORMAT_READ_VERSION global HEADER_RESERVED_SPACE_PER_PAGE global HEADER_MAXIMUM_EMBEDDED_PAYLOAD_FRACTION global HEADER_MINIMUM_EMBEDDED_PAYLOAD_FRACTION global HEADER_LEAF_PAYLOAD_FRACTION global HEADER_FILE_CHANGE_COUNTER global HEADER_DATABASE_SIZE_IN_PAGES global HEADER_FIRST_FREE_TRUNK_PAGE global HEADER_TOTAL_NUMBER_OF_FREELIST_PAGES global HEADER_SCHEMA_COOKIE global HEADER_SCHEMA_FORMAT_NUMBER global HEADER_DEFAULT_PAGE_CACHE_SIZE global HEADER_LARGEST_ROOT_BTREE_PAGE_NUMBER global HEADER_DATABASE_TEXT_ENCODING global HEADER_USER_VERSION global HEADER_INCREMENTAL_VACCUM_MODE global HEADER_RESERVED_FOR_EXPANSION global HEADER_VERSION_VALID_FOR_NUMBER global HEADER_SQLITE_VERSION_NUMBER global HEADER_FILE_CHANGE_COUNTER_VALID # Parse sqlite3 header structure _adel_log.log("\nparseDBHeader: ----> parsing sqlite3 database file header", 3) # Header bytes [0:15]: sqlite3 magic string (without null terminator, thus 15 bytes only) HEADER_MAGIC_STRING = _helpersStringOperations.hexstring_to_ascii(first_page_hex_string[0 * 2:15 * 2]) if HEADER_MAGIC_STRING != "SQLite format 3": _adel_log.log("parse_db_header: WARNING! unknown sqlite3 magic string found: \"" + str(HEADER_MAGIC_STRING) + "\"", 2) else: _adel_log.log("parse_db_header: OK - sqlite3 magic string: \"" + str(HEADER_MAGIC_STRING) + "\"", 3) # Header bytes [16:18]: database page size HEADER_DATABASE_PAGESIZE = int(first_page_hex_string[16 * 2:18 * 2], 16) _adel_log.log("parse_db_header: OK - database page size: " + str(HEADER_DATABASE_PAGESIZE), 3) # Header byte [18:19]: file format write version (must be 1 or 2) HEADER_FILE_FORMAT_WRITE_VERSION = int(first_page_hex_string[18 * 2:19 * 2], 16) if HEADER_FILE_FORMAT_WRITE_VERSION != 1 and HEADER_FILE_FORMAT_WRITE_VERSION != 2: _adel_log.log("parse_db_header: WARNING! invalid file format write version (must be 1 or 2): " + str(HEADER_FILE_FORMAT_WRITE_VERSION), 2) else: _adel_log.log("parse_db_header: OK - file format write version (must be 1 or 2): " + str(HEADER_FILE_FORMAT_WRITE_VERSION), 3) # Header byte [19:20]: file format read version (must be 1 or 2) HEADER_FILE_FORMAT_READ_VERSION = int(first_page_hex_string[19 * 2:20 * 2], 16) if HEADER_FILE_FORMAT_READ_VERSION != 1 and HEADER_FILE_FORMAT_READ_VERSION != 2: _adel_log.log("parse_db_header: WARNING! invalid file format read version (must be 1 or 2): " + str(HEADER_FILE_FORMAT_READ_VERSION), 2) else: _adel_log.log("parse_db_header: OK - file format read version (must be 1 or 2): " + str(HEADER_FILE_FORMAT_READ_VERSION), 3) # Header byte [20:21]: reserved space per page (usually 0) HEADER_RESERVED_SPACE_PER_PAGE = int(first_page_hex_string[20 * 2:21 * 2], 16) _adel_log.log("parse_db_header: OK - reserved space per page (usually 0): " + str(HEADER_RESERVED_SPACE_PER_PAGE), 3) # Header byte [21:22]: maximum embedded payload fraction (must be 64) HEADER_MAXIMUM_EMBEDDED_PAYLOAD_FRACTION = int(first_page_hex_string[21 * 2:22 * 2], 16) if HEADER_MAXIMUM_EMBEDDED_PAYLOAD_FRACTION != 64: _adel_log.log("parse_db_header: WARNING! invalid maximum embedded payload fraction (must be 64): " + str(HEADER_MAXIMUM_EMBEDDED_PAYLOAD_FRACTION), 2) else: _adel_log.log("parse_db_header: OK - maximum embedded payload fraction (must be 64): " + str(HEADER_MAXIMUM_EMBEDDED_PAYLOAD_FRACTION), 3) # Header byte [22:23]: minimum embedded payload fraction (must be 32) HEADER_MINIMUM_EMBEDDED_PAYLOAD_FRACTION = int(first_page_hex_string[22 * 2:23 * 2], 16) if HEADER_MINIMUM_EMBEDDED_PAYLOAD_FRACTION != 32: _adel_log.log("parse_db_header: WARNING! invalid minimum embedded payload fraction (must be 32): " + str(HEADER_MINIMUM_EMBEDDED_PAYLOAD_FRACTION), 2) else: _adel_log.log("parse_db_header: OK - minimum embedded payload fraction (must be 32): " + str(HEADER_MINIMUM_EMBEDDED_PAYLOAD_FRACTION), 3) # Header byte [23:24]: leaf payload fraction (must be 32) HEADER_LEAF_PAYLOAD_FRACTION = int(first_page_hex_string[23 * 2:24 * 2], 16) if HEADER_LEAF_PAYLOAD_FRACTION != 32: _adel_log.log("parse_db_header: WARNING! invalid leaf payload fraction (must be 32): " + str(HEADER_LEAF_PAYLOAD_FRACTION), 2) else: _adel_log.log("parse_db_header: OK - leaf payload fraction (must be 32): " + str(HEADER_LEAF_PAYLOAD_FRACTION), 3) # Header bytes [24:28]: file change counter HEADER_FILE_CHANGE_COUNTER = int(first_page_hex_string[24 * 2:28 * 2], 16) _adel_log.log("parse_db_header: OK - file change counter: " + str(HEADER_FILE_CHANGE_COUNTER), 3) # Header bytes [28:32]: database size in pages HEADER_DATABASE_SIZE_IN_PAGES = int(first_page_hex_string[28 * 2:32 * 2], 16) # Check if database file size in header is valid if ((DB_FILE_SIZE_IN_BYTES / HEADER_DATABASE_PAGESIZE) != HEADER_DATABASE_SIZE_IN_PAGES): if HEADER_DATABASE_SIZE_IN_PAGES == 0: # Header field is not set (e.g. through older versions of SQLite) _adel_log.log("parse_db_header: OK - database header field for size in pages is not set (e.g. by older SQLite versions): " + str(HEADER_DATABASE_SIZE_IN_PAGES) + " pages", 3) HEADER_DATABASE_SIZE_IN_PAGES = DB_FILE_SIZE_IN_BYTES / HEADER_DATABASE_PAGESIZE _adel_log.log(" determined database size in pages through calculation (file size / page size): " + str(HEADER_DATABASE_SIZE_IN_PAGES) + " pages", 3) else: # Raise warning with old size _adel_log.log("parse_db_header: WARNING! header field for database size in pages incorrect: " + str(HEADER_DATABASE_SIZE_IN_PAGES) + " pages", 2) # Calculate correct size HEADER_DATABASE_SIZE_IN_PAGES = DB_FILE_SIZE_IN_BYTES / HEADER_DATABASE_PAGESIZE # Raise new size _adel_log.log(" determined database size in pages through calculation (file size / page size): " + str(HEADER_DATABASE_SIZE_IN_PAGES) + " pages", 3) else: _adel_log.log("parse_db_header: OK - database size in pages is: " + str(HEADER_DATABASE_SIZE_IN_PAGES) + " pages", 3) # Header bytes [32:36]: first freelist trunk page HEADER_FIRST_FREE_TRUNK_PAGE = int(first_page_hex_string[32 * 2:36 * 2], 16) _adel_log.log("parse_db_header: OK - first freelist trunk page: " + str(HEADER_FIRST_FREE_TRUNK_PAGE), 3) # header bytes [36:40]: total number of freelist pages HEADER_TOTAL_NUMBER_OF_FREELIST_PAGES = int(first_page_hex_string[36 * 2:40 * 2], 16) _adel_log.log("parse_db_header: OK - total number of freelist pages: " + str(HEADER_TOTAL_NUMBER_OF_FREELIST_PAGES), 3) # Header bytes [40:44]: schema cookie HEADER_SCHEMA_COOKIE = int(first_page_hex_string[40 * 2:44 * 2], 16) _adel_log.log("parse_db_header: OK - schema cookie: " + str(HEADER_SCHEMA_COOKIE), 3) # Header bytes [44:48]: schema format number (must be 1-4) HEADER_SCHEMA_FORMAT_NUMBER = int(first_page_hex_string[44 * 2:48 * 2], 16) if HEADER_SCHEMA_FORMAT_NUMBER < 1 and HEADER_SCHEMA_FORMAT_NUMBER > 4: _adel_log.log("parse_db_header: WARNING! invalid schema format number (must be 1-4): " + str(HEADER_SCHEMA_FORMAT_NUMBER), 2) else: _adel_log.log("parse_db_header: OK - schema format number (must be 1-4): " + str(HEADER_SCHEMA_FORMAT_NUMBER), 3) # Header bytes [48:52]: default page cache size HEADER_DEFAULT_PAGE_CACHE_SIZE = int(first_page_hex_string[48 * 2:52 * 2], 16) _adel_log.log("parse_db_header: OK - default page cache size: " + str(HEADER_DEFAULT_PAGE_CACHE_SIZE), 3) # Header bytes [52:56]: largest root b-tree page number HEADER_LARGEST_ROOT_BTREE_PAGE_NUMBER = int(first_page_hex_string[52 * 2:56 * 2], 16) _adel_log.log("parse_db_header: OK - largest root b-tree page number: " + str(HEADER_LARGEST_ROOT_BTREE_PAGE_NUMBER), 3) # Header bytes [56:60]: database text encoding (must be 1-3) HEADER_DATABASE_TEXT_ENCODING = int(first_page_hex_string[56 * 2:60 * 2], 16) if HEADER_DATABASE_TEXT_ENCODING < 1 and HEADER_SCHEMA_FORMAT_NUMBER > 3: _adel_log.log("parse_db_header: WARNING! invalid database text encoding (must be 1-3): " + str(HEADER_DATABASE_TEXT_ENCODING), 2) else: _adel_log.log("parse_db_header: OK - database text encoding (must be 1-3): " + str(HEADER_DATABASE_TEXT_ENCODING), 3) # Header bytes [60:64]: user version HEADER_USER_VERSION = int(first_page_hex_string[60 * 2:64 * 2], 16) _adel_log.log("parse_db_header: OK - user version: " + str(HEADER_USER_VERSION), 3) # header bytes [64:68]: incremental-vacuum mode (1, zero otherwise) HEADER_INCREMENTAL_VACCUM_MODE = int(first_page_hex_string[64 * 2:68 * 2], 16) if HEADER_INCREMENTAL_VACCUM_MODE != 0 and HEADER_INCREMENTAL_VACCUM_MODE != 1: _adel_log.log("parse_db_header: WARNING! invalid incremental-vacuum mode (1, zero otherwise): " + str(HEADER_INCREMENTAL_VACCUM_MODE), 2) else: _adel_log.log("parse_db_header: OK - incremental-vacuum mode (1, zero otherwise): " + str(HEADER_INCREMENTAL_VACCUM_MODE), 3) # Header bytes [68:92]: reservation for expansion (must be 0) HEADER_RESERVED_FOR_EXPANSION = int(first_page_hex_string[68 * 2:92 * 2], 16) if HEADER_RESERVED_FOR_EXPANSION != 0: _adel_log.log("parse_db_header: WARNING! invalid reservation for expansion (must be 0): " + str(HEADER_RESERVED_FOR_EXPANSION), 2) else: _adel_log.log("parse_db_header: OK - reservation for expansion (must be 0): " + str(HEADER_RESERVED_FOR_EXPANSION), 3) # Header bytes [92:96]: version valid for number HEADER_VERSION_VALID_FOR_NUMBER = int(first_page_hex_string[92 * 2:96 * 2], 16) _adel_log.log("parse_db_header: OK - version valid for number: " + str(HEADER_VERSION_VALID_FOR_NUMBER), 3) # Header bytes [96:100]: sqlite version number HEADER_SQLITE_VERSION_NUMBER = int(first_page_hex_string[96 * 2:100 * 2], 16) _adel_log.log("parse_db_header: OK - sqlite version number: " + str(HEADER_SQLITE_VERSION_NUMBER), 3) # Check whether the file change counter is valid if (HEADER_FILE_CHANGE_COUNTER == HEADER_VERSION_VALID_FOR_NUMBER): # field valid HEADER_FILE_CHANGE_COUNTER_VALID = 0 else: # Field not valid HEADER_FILE_CHANGE_COUNTER_VALID = 1 _adel_log.log("parse_db_header: ----> sqlite3 database file header parsed", 3) #-----------------Example------------------- #if __name__ == "__main__": # # Set date and time # DATE = str(datetime.datetime.today()).split(' ')[0] # TIME = str(datetime.datetime.today()).split(' ')[1].split('.')[0].split(':') # log_file = DATE + "__" + TIME[0] + "-" + TIME[1] + "-" + TIME[2] + "__sqliteParser.log" # _adel_log.FILE_HANDLE = open(log_file, "a+") # # # Check database file name # number_of_args = len(sys.argv[1:]) # if number_of_args > 0: # file_name = sys.argv[1] # else: # _adel_log.log("_sqliteParser: WARNING! no database file given --> using test database \"sql3_test.db\"", 2) # # initialize the db file for testing purposes if not given as command line argument # file_name = "sql3_test.db" # if number_of_args > 1: # _adel_log.LOG_LEVEL_GLOBAL = sys.argv[2] # else: # _adel_log.LOG_LEVEL_GLOBAL = 4 # # parse_db(file_name) # # # Test databases # file_names = [] # file_names.append(file_name) # #file_names.append("sql3_test.db") # #file_names.append("contacts.db") # # file_names.append("testDBs/T1_cal.sqlite") # file_names.append("testDBs/T2_cookies.sqlite") # file_names.append("testDBs/T3_dl.sqlite") # file_names.append("testDBs/T4_global-messages.sqlite") # file_names.append("testDBs/T5_urls.sqlite") # # file_names.append("testDBs/SE_01_account.db") # file_names.append("testDBs/SE_02_user_dict.db") # file_names.append("testDBs/SE_03_calendar.db") # file_names.append("testDBs/SE_04_contacts.db") # file_names.append("testDBs/SE_05_downloads.db") # file_names.append("testDBs/SE_06_settings.db") # file_names.append("testDBs/SE_07_mmssms.db") # file_names.append("testDBs/SE_08_telephony.db") # # file_names.append("testDBs/1_accounts.db") # file_names.append("testDBs/2_alarms.db") # file_names.append("testDBs/3_auto_dict.db") # file_names.append("testDBs/4_calender.db") # file_names.append("testDBs/5_colornote.db") # file_names.append("testDBs/6_contacts2.db") # file_names.append("testDBs/7_downloads.db") # file_names.append("testDBs/8_EmailProvider.db") # file_names.append("testDBs/9_gmail.db") # file_names.append("testDBs/11_mailstore.spreitzenbarth-at-googlemail.com.db") # file_names.append("testDBs/22_mmssms.db") # file_names.append("testDBs/33_talk.db") # file_names.append("testDBs/44_telephony.db") # file_names.append("testDBs/55_twitter.db") # file_names.append("testDBs/66_user_dict.db") # file_names.append("testDBs/77_weather.db") # # for file_name in file_names: # _adel_log.log("parseDBs: ----> starting to parse " + str(file_name), 0) # result_list = parse_db(file_name) # # output to log for test purposes only # if result_list != 1: # i = 1 # for result in result_list: # _adel_log.log("\nparseDBs: ----> printing DATABASE ELEMENT " + str(i) + " for test purposes....", 0) # _adel_log.log(str(result), 0) # i += 1 # else: # _adel_log.log("parseDBs: ERROR! could not parse database file \"" + str(file_name) + "\"", 1) # _adel_log.log("\nparseDBs: ----> all SQLite databases parsed", 0) # _adel_log.log("", 3) #-----------------Example-------------------
def parse_content_entry(serial_type, record_hex_string, content_offset): # initial checks if serial_type < 0: _adel_log.log( "getEntryContent: WARNING! invalid serial type (must be >= 0): %(serial_type)s" % vars(), 2) return None _adel_log.log( "getEntryContent: ----> retrieving serial type content at relative offset: %(content_offset)s...." % vars(), 4) # Initialise result list entry_content_list = [] if (serial_type == 0): # Defined as NULL, zero bytes in length entry_content_list.append(None) entry_content_list.append(0) _adel_log.log( "getEntryContent: OK - serial type is: NULL, zero bytes in length", 4) return entry_content_list if (serial_type == 1): entryContent = _helpersBinaryOperations.twos_complement_to_int( int(record_hex_string[(content_offset):(content_offset + 2)], 16), 1 * 8) entry_content_list.append(entryContent) entry_content_list.append(1) _adel_log.log( "getEntryContent: OK - serial type is: 8-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 2): entryContent = _helpersBinaryOperations.twos_complement_to_int( int(record_hex_string[(content_offset):(content_offset + 4)], 16), 2 * 8) entry_content_list.append(entryContent) entry_content_list.append(2) _adel_log.log( "getEntryContent: OK - serial type is: Big-endian 16-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 3): entryContent = _helpersBinaryOperations.twos_complement_to_int( int(record_hex_string[(content_offset):(content_offset + 6)], 16), 3 * 8) entry_content_list.append(entryContent) entry_content_list.append(3) _adel_log.log( "getEntryContent: OK - serial type is: Big-endian 24-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 4): entryContent = _helpersBinaryOperations.twos_complement_to_int( int(record_hex_string[(content_offset):(content_offset + 8)], 16), 4 * 8) entry_content_list.append(entryContent) entry_content_list.append(4) _adel_log.log( "getEntryContent: OK - serial type is: Big-endian 32-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 5): entryContent = _helpersBinaryOperations.twos_complement_to_int( int(record_hex_string[(content_offset):(content_offset + 12)], 16), 6 * 8) entry_content_list.append(entryContent) entry_content_list.append(6) _adel_log.log( "getEntryContent: OK - serial type is: Big-endian 48-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 6): entryContent = _helpersBinaryOperations.twos_complement_to_int( int(record_hex_string[(content_offset):(content_offset + 16)], 16), 8 * 8) entry_content_list.append(entryContent) entry_content_list.append(8) _adel_log.log( "getEntryContent: OK - serial type is: Big-endian 64-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 7): entryContent = struct.unpack( 'd', struct.pack( 'Q', int(record_hex_string[(content_offset):(content_offset + 16)], 16)))[0] entry_content_list.append(entryContent) entry_content_list.append(8) _adel_log.log( "getEntryContent: OK - serial type is: Big-endian IEEE 754-2008 64-bit floating point number: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 8): # Integer constant 0 (only schema format > 4), zero bytes in length entry_content_list.append(0) entry_content_list.append(0) _adel_log.log( "getEntryContent: OK - serial type is an integer constant: 0, zero bytes in length", 4) return entry_content_list if (serial_type == 9): # Integer constant 1 (only schema format > 4), zero bytes in length entry_content_list.append(1) entry_content_list.append(0) _adel_log.log( "getEntryContent: OK - serial type is an integer constant: 1, zero bytes in length", 4) return entry_content_list if (serial_type == 10): # Not used, reserved for expansion _adel_log.log( "getEntryContent: WARNING! invalid serial type (not used, reserved for expansion): %(serial_type)s" % vars(), 2) entry_content_list.append(None) entry_content_list.append(0) return entry_content_list if (serial_type == 11): # Not used, reserved for expansion _adel_log.log( "getEntryContent: WARNING! invalid serial type (not used, reserved for expansion): %(serial_type)s" % vars(), 2) entry_content_list.append(None) entry_content_list.append(0) return entry_content_list if (serial_type >= 12): # either a STRING or a BLOB entrySize = determine_serial_type_content_size(serial_type) entryContent = record_hex_string[(content_offset):(content_offset + (entrySize * 2))] # build return list entry_content_list.append( _helpersStringOperations.hexstring_to_ascii(entryContent)) entry_content_list.append(entrySize) return entry_content_list
def parse_content_entry(serial_type, record_hex_string, content_offset): # initial checks if serial_type < 0: _adel_log.log("getEntryContent: WARNING! invalid serial type (must be >= 0): %(serial_type)s" % vars(), 2) return None _adel_log.log("getEntryContent: ----> retrieving serial type content at relative offset: %(content_offset)s...." % vars(), 4) # Initialise result list entry_content_list = [] if (serial_type == 0): # Defined as NULL, zero bytes in length entry_content_list.append(None) entry_content_list.append(0) _adel_log.log("getEntryContent: OK - serial type is: NULL, zero bytes in length", 4) return entry_content_list if (serial_type == 1): entryContent = _helpersBinaryOperations.twos_complement_to_int(int(record_hex_string[(content_offset):(content_offset + 2)], 16), 1 * 8) entry_content_list.append(entryContent) entry_content_list.append(1) _adel_log.log("getEntryContent: OK - serial type is: 8-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 2): entryContent = _helpersBinaryOperations.twos_complement_to_int(int(record_hex_string[(content_offset):(content_offset + 4)], 16), 2 * 8) entry_content_list.append(entryContent) entry_content_list.append(2) _adel_log.log("getEntryContent: OK - serial type is: Big-endian 16-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 3): entryContent = _helpersBinaryOperations.twos_complement_to_int(int(record_hex_string[(content_offset):(content_offset + 6)], 16), 3 * 8) entry_content_list.append(entryContent) entry_content_list.append(3) _adel_log.log("getEntryContent: OK - serial type is: Big-endian 24-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 4): entryContent = _helpersBinaryOperations.twos_complement_to_int(int(record_hex_string[(content_offset):(content_offset + 8)], 16), 4 * 8) entry_content_list.append(entryContent) entry_content_list.append(4) _adel_log.log("getEntryContent: OK - serial type is: Big-endian 32-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 5): entryContent = _helpersBinaryOperations.twos_complement_to_int(int(record_hex_string[(content_offset):(content_offset + 12)], 16), 6 * 8) entry_content_list.append(entryContent) entry_content_list.append(6) _adel_log.log("getEntryContent: OK - serial type is: Big-endian 48-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 6): entryContent = _helpersBinaryOperations.twos_complement_to_int(int(record_hex_string[(content_offset):(content_offset + 16)], 16), 8 * 8) entry_content_list.append(entryContent) entry_content_list.append(8) _adel_log.log("getEntryContent: OK - serial type is: Big-endian 64-bit twos-complement integer: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 7): entryContent = struct.unpack('d', struct.pack('Q', int(record_hex_string[(content_offset):(content_offset + 16)], 16)))[0] entry_content_list.append(entryContent) entry_content_list.append(8) _adel_log.log("getEntryContent: OK - serial type is: Big-endian IEEE 754-2008 64-bit floating point number: %(entryContent)s" % vars(), 4) return entry_content_list if (serial_type == 8): # Integer constant 0 (only schema format > 4), zero bytes in length entry_content_list.append(0) entry_content_list.append(0) _adel_log.log("getEntryContent: OK - serial type is an integer constant: 0, zero bytes in length", 4) return entry_content_list if (serial_type == 9): # Integer constant 1 (only schema format > 4), zero bytes in length entry_content_list.append(1) entry_content_list.append(0) _adel_log.log("getEntryContent: OK - serial type is an integer constant: 1, zero bytes in length", 4) return entry_content_list if (serial_type == 10): # Not used, reserved for expansion _adel_log.log("getEntryContent: WARNING! invalid serial type (not used, reserved for expansion): %(serial_type)s" % vars(), 2) entry_content_list.append(None) entry_content_list.append(0) return entry_content_list if (serial_type == 11): # Not used, reserved for expansion _adel_log.log("getEntryContent: WARNING! invalid serial type (not used, reserved for expansion): %(serial_type)s" % vars(), 2) entry_content_list.append(None) entry_content_list.append(0) return entry_content_list if (serial_type >= 12): # either a STRING or a BLOB entrySize = determine_serial_type_content_size(serial_type) entryContent = record_hex_string[(content_offset):(content_offset + (entrySize * 2))] # build return list entry_content_list.append(_helpersStringOperations.hexstring_to_ascii(entryContent)) entry_content_list.append(entrySize) return entry_content_list