def get_appleMapsApplication(files_found, report_folder, seeker):
    versionnum = 0
    file_found = str(files_found[0])
    
    with open(file_found, 'rb') as f:
        deserialized_plist = plistlib.load(f)
        
        types = {'1': {'type': 'double', 'name': 'Latitude'},
                '2': {'type': 'double', 'name': 'Longitude'}, 
                '3': {'type': 'double', 'name': ''}, 
                '4': {'type': 'fixed64', 'name': ''}, 
                '5': {'type': 'double', 'name': ''}
                }    
        
        internal_deserialized_plist, di = blackboxprotobuf.decode_message((deserialized_plist['__internal__LastActivityCamera']),types)
        latitude = (internal_deserialized_plist['Latitude'])
        longitude = (internal_deserialized_plist['Longitude'])
        
        data_list = []
        data_list.append((latitude, longitude))
        report = ArtifactHtmlReport('Apple Maps App')
        report.start_artifact_report(report_folder, 'Apple Maps App')
        report.add_script()
        data_headers = ('Latitude','Longitude' )     
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()
        
        tsvname = 'Apple Maps Application'
        tsv(report_folder, data_headers, data_list, tsvname)
    

            
def test_modify(x, modify_num):
    modify_key = testMessage_typedef[modify_num]["name"]
    message = Test_pb2.TestMessage()
    for key,value in x.items():
        setattr(message, key, value)
    encoded = message.SerializeToString()
    decoded, typedef = blackboxprotobuf.decode_message(encoded, testMessage_typedef)

    # eliminate any cases where protobuf defaults out a field
    hypothesis.assume(modify_key in decoded)

    if isinstance(decoded[modify_key], str):
        mod_func = lambda x: "test"
    elif six.PY2 and isinstance(decoded[modify_key], unicode):
        mod_func = lambda x: six.u("test")
    elif isinstance(decoded[modify_key], bytes):
        mod_func = lambda x: b'test'
    elif isinstance(decoded[modify_key], six.integer_types):
        mod_func = lambda x: 10
    elif isinstance(decoded[modify_key], float):
        mod_func = lambda x: 10
    else:
        hypothesis.note("Failed to modify key: %s (%r)" % (modify_key,type(decoded[modify_key]) ))
        assert False

    decoded[modify_key] = mod_func(decoded[modify_key])
    x[modify_key] = mod_func(x[modify_key])

    encoded = blackboxprotobuf.encode_message(decoded, testMessage_typedef)
    message = Test_pb2.TestMessage()
    message.ParseFromString(encoded)

    for key in decoded.keys():
        assert getattr(message, key) == x[key]
def test_encode_json(x):
    # Test with JSON payload
    if "testBytes" in x:
        x["testBytes"] = x["testBytes"].decode("latin1")
    json_str = json.dumps(x)

    hypothesis.note("JSON Str Input:")
    hypothesis.note(json_str)
    hypothesis.note(json.loads(json_str))

    encoded = blackboxprotobuf.protobuf_from_json(json_str,
                                                  testMessage_typedef)
    hypothesis.note("BBP decoding:")

    test_decode, _ = blackboxprotobuf.decode_message(encoded,
                                                     testMessage_typedef)
    hypothesis.note(test_decode)

    message = Test_pb2.TestMessage()
    message.ParseFromString(encoded)
    hypothesis.note("Message:")
    hypothesis.note(message)

    for key in x.keys():
        hypothesis.note("Message value")
        hypothesis.note(type(getattr(message, key)))
        hypothesis.note("Original value")
        hypothesis.note(type(x[key]))
        if key == "testBytes":
            x[key] = six.ensure_binary(x[key], encoding="latin1")
        assert getattr(message, key) == x[key]
Exemple #4
0
        def get_response(url,
                         message,
                         types,
                         force_str_paths=(),
                         xmessage_type=None):
            query = blackboxprotobuf.encode_message(message, types)
            query = len(query).to_bytes(5, byteorder='big') + query
            query = base64.b64encode(query)
            page = REQ.get(url,
                           post=query,
                           content_type='application/grpc-web-text',
                           headers={'accept': 'application/grpc-web-text'})

            data = base64.b64decode(page)
            size = int.from_bytes(data[:5], 'big')
            data = data[5:5 + size]

            message_type = {}
            for path in force_str_paths:
                d = message_type
                *path, key = path
                for k in path:
                    d = d.setdefault(k, {
                        'type': 'message',
                        'message_typedef': {}
                    })
                    d = d['message_typedef']
                d[key] = {'type': 'bytes'}

            message, types = blackboxprotobuf.decode_message(
                data, message_type)
            return message, types
def test_decode(x):
    message = Test_pb2.TestMessage()
    for key,value in x.items():
        setattr(message, key, value)
    encoded = message.SerializeToString()
    decoded, typedef = blackboxprotobuf.decode_message(encoded, testMessage_typedef)
    hypothesis.note("Decoded: %r" % decoded)
    for key in decoded.keys():
        assert x[key] == decoded[key]
Exemple #6
0
def decode_sfx_request(request_content: bytes) -> Dict[str, any]:
    """
    Decode signalfx requests message
    :param request_content: Gziped message
    :type request_content: bytes
    :return: Decoded protobuf message as dict
    :rtype: dict
    """
    gunziped_message = gzip.GzipFile(
        fileobj=io.BytesIO(request_content)).read()
    decoded_messge, _ = blackboxprotobuf.decode_message(gunziped_message)

    return decoded_messge
Exemple #7
0
def protobuf_parse_not_completed(data):
    pb = blackboxprotobuf.decode_message(data, 'None')
    completed = pb[0].get('2', {}).get('5', {}).get('1', '')
    created = datetime.utcfromtimestamp(pb[0].get('11', {}).get(
        '1', '')).strftime('%Y-%m-%d %H:%M:%S')
    modified = datetime.utcfromtimestamp(pb[0].get('3', {}).get(
        '1', '')).strftime('%Y-%m-%d %H:%M:%S')
    task = pb[0].get('2', {}).get('2', '').decode()
    task_details = b2s(pb[0].get('2', {}).get('3', ''))
    # duetime = pb[0].get('9',{}).get('1',{}).get('6',{}).get('1','')
    # duetime = datetime.fromtimestamp(duetime).strftime('%Y-%m-%d %H:%M:%S') # TypeError: an integer is required (got type str)
    timezone = b2s(pb[0].get('9', {}).get('1', {}).get('4', ''))
    return task, task_details, created, completed, modified, timezone
Exemple #8
0
def test_message_json_inverse(x):
    config = Config()
    typedef, message = x
    encoded = length_delim.encode_message(message, config, typedef)
    decoded_json, typedef_out = blackboxprotobuf.protobuf_to_json(
        encoded, config=config, message_type=typedef)
    encoded_json = blackboxprotobuf.protobuf_from_json(
        decoded_json, config=config, message_type=typedef_out)
    decoded, typedef_out = blackboxprotobuf.decode_message(
        encoded_json, config=config, message_type=typedef)
    assert isinstance(encoded, bytearray)
    assert isinstance(decoded, dict)
    assert message == decoded
    def test_emit_metrics(self):
        with requests_mock.Mocker() as m:
            m.post(requests_mock.ANY, status_code=200)

            @metrics.emit_metrics()
            def main(req, context):
                return None

            main(req="aaaa", context=CONTEXT)
            req_realm = m.last_request.netloc.split(".")[1]

            content = gzip.GzipFile(
                fileobj=io.BytesIO(m.last_request.body)).read()
            message, typedef = blackboxprotobuf.decode_message(content)
            message_dimensions = message["1"]["6"]
            dp_dimensions = helpers.pbuf_dimensions_to_namespace(
                message_dimensions)

            # Check if history request contains azure.function.invocations
            # and azure.function.duration which must be sent when everything
            # is ok
            req_history = m.request_history
            metric_names = list()
            for hist in req_history:
                decoded_msg = helpers.decode_sfx_request(hist.body)
                # print(decoded_msg)
                metric_name = decoded_msg["1"]["2"]
                metric_names.append(metric_name)

            self.assertEqual(req_realm, os.environ.get("SIGNALFX_REALM"))
            self.assertEqual(dp_dimensions.azure_region,
                             utils.REGIONS[ENVVARS["Location"]])
            self.assertEqual(dp_dimensions.azure_function_name,
                             CONTEXT.function_name)
            self.assertEqual(dp_dimensions.azure_resource_name,
                             ENVVARS["WEBSITE_SITE_NAME"])
            self.assertEqual(dp_dimensions.function_wrapper_version,
                             f"{version.name}-{version.version}")
            self.assertEqual(dp_dimensions.is_Azure_Function, "true")
            self.assertEqual(dp_dimensions.metric_source,
                             "azure_function_wrapper")
            self.assertEqual(dp_dimensions.resource_group,
                             ENVVARS["WEBSITE_RESOURCE_GROUP"])
            self.assertEqual(dp_dimensions.subscription_id,
                             ENVVARS["WEBSITE_OWNER_NAME"].split('+')[0])
            self.assertIn(b"azure.function.duration", metric_names)
            self.assertIn(b"azure.function.invocations", metric_names)
            self.assertNotIn(b"azure.function.errors", metric_names)

            m.reset_mock()
    def test_emit_metrics_missing_dim(self):
        @metrics.emit_metrics()
        def main(req, context):
            return None

        with requests_mock.Mocker() as m:
            m.post(requests_mock.ANY, status_code=200)

            main(req="blabla", context=CONTEXT)

            content = gzip.GzipFile(
                fileobj=io.BytesIO(m.last_request.body)).read()
            message, typedef = blackboxprotobuf.decode_message(content)
            message_dimensions = message["1"]["6"]
            dp_dimensions = helpers.pbuf_dimensions_to_namespace(
                message_dimensions)
            self.assertEqual(dp_dimensions.subscription_id, "Unknown")
            self.assertEqual(dp_dimensions.resource_group, "Unknown")
    def test_emit_metrics_with_extra_dimensions(self):
        with requests_mock.Mocker() as m:
            m.post(requests_mock.ANY, status_code=200)

            @metrics.emit_metrics(extra_dimensions={
                "extraDim1": "valueOfExtraDim1",
                "extraDim2": "valueOfExtraDim2"
            })
            def main(req, context):
                return None

            main(req="aaaaaa", context=CONTEXT)

            content = gzip.GzipFile(
                fileobj=io.BytesIO(m.last_request.body)).read()
            message, typedef = blackboxprotobuf.decode_message(content)
            message_dimensions = message["1"]["6"]
            dp_dimensions = helpers.pbuf_dimensions_to_namespace(
                message_dimensions)

            self.assertTrue(hasattr(dp_dimensions, "extraDim1"))
            self.assertTrue(hasattr(dp_dimensions, "extraDim2"))
            self.assertEqual(dp_dimensions.extraDim1, "valueOfExtraDim1")
            self.assertEqual(dp_dimensions.extraDim2, "valueOfExtraDim2")
Exemple #12
0
def get_googleChat(files_found, report_folder, seeker, wrap_text):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('dynamite.db'):
            continue  # Skip all other files

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(topic_messages.create_time/1000000,'unixepoch') AS "Message Time (UTC)",
        Groups.name AS "Group Name",
        users.name AS "Sender",
        topic_messages.text_body AS "Message",
        topic_messages.annotation AS "Message Attachment"
        FROM
        topic_messages
        JOIN Groups on Groups.group_id=topic_messages.group_id
        JOIN users ON users.user_id=topic_messages.creator_id
        ORDER BY "Timestamp (UTC)" ASC
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        if usageentries > 0:
            for x in all_rows:
                values = blackboxprotobuf.decode_message(x[4])
                if x[4] == b'':
                    data_list.append((x[0], x[1], x[2], x[3], '', '', '', '',
                                      '', '', '', ''))
                else:
                    #images section
                    try:
                        item11 = (
                            values[0]['1']['10'].get('3').decode('utf-8'))
                        item12 = (
                            values[0]['1']['10'].get('4').decode('utf-8'))
                        item13 = (values[0]['1']['10']['5']['1'])
                        item14 = (values[0]['1']['10']['5']['2'])
                        data_list.append((x[0], x[1], x[2], x[3], '', '', '',
                                          '', item11, item12, item13, item14))
                        continue
                    except:
                        pass
                    #meeting plain section
                    try:
                        item8 = (
                            values[0]['1']['12']['1']['1'].decode('utf-8'))
                        item9 = (
                            values[0]['1']['12']['1']['3'].decode('utf-8'))
                        item10 = (
                            values[0]['1']['12']['1']['2'].decode('utf-8'))
                        data_list.append((x[0], x[1], x[2], x[3], item9,
                                          item10, '', '', '', '', '', ''))
                        continue
                    except:
                        pass

                    #meeting with sender name
                    try:
                        item4 = (
                            values[0]['1'][0]['12']['1']['1'].decode('utf-8'))
                        item5 = (
                            values[0]['1'][0]['12']['1']['3'].decode('utf-8'))
                        item6 = (values[0]['1'][0]['12']['1']['6']['16']
                                 ['1'].decode('utf-8'))
                        item7 = (values[0]['1'][0]['12']['1']['6']['16']
                                 ['2'].decode('utf-8'))
                        data_list.append((x[0], x[1], x[2], x[3], item5, item6,
                                          item7, '', '', '', '', ''))
                        continue
                    except:
                        pass

                    try:
                        item1 = (
                            values[0]['1'][0]['12']['1']['1'].decode('utf-8'))
                        item2 = (
                            values[0]['1'][0]['12']['1']['3'].decode('utf-8'))
                        item3 = (
                            values[0]['1'][0]['12']['1']['2'].decode('utf-8'))
                        data_list.append((x[0], x[1], x[2], x[3], item2, item3,
                                          '', '', '', '', '', ''))
                    except:
                        pass

        if usageentries > 0:
            report = ArtifactHtmlReport('Google Chat Messages')
            report.start_artifact_report(report_folder, 'Chat Messages')
            report.add_script()
            data_headers = ('Message Timestamp (UTC)', 'Group Name', 'Sender',
                            'Message', 'Meeting Code', 'Meeting URL',
                            'Meeting Sender', 'Meeting Sender Profile Pic URL',
                            'Filename', 'File Type', 'Width', 'Height')

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Chat Messages'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Google Chat Messages'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Google Chat Messages data available')

        cursor.execute('''
        SELECT
        datetime(Groups.create_time/1000000,'unixepoch') AS "Group Created Time (UTC)",
        Groups.name AS "Group Name",
        users.name AS "Group Creator",
        datetime(Groups.last_view_time/1000000,'unixepoch') AS "Time Group Last Viewed (UTC)"
        FROM
        Groups
        JOIN users ON users.user_id=Groups.creator_id
        ORDER BY "Group Created Time (UTC)" ASC
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            report = ArtifactHtmlReport('Google Chat Group Information')
            report.start_artifact_report(report_folder, 'Group Information')
            report.add_script()
            data_headers = ('Group Created Time (UTC)', 'Group Name',
                            'Group Creator', 'Time Group Last Viewed (UTC)')
            data_list = []
            for row in all_rows:
                data_list.append((row[0], row[1], row[2], row[3]))

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'Google Chat Group Information'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = f'Google Chat Group Information'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Google Chat Group Information data available')

        db.close()
Exemple #13
0
def get_quicksearch(files_found, report_folder, seeker, wrap_text):
    sessions = []
    base_folder = ''
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif os.path.isdir(
                file_found):  # skip folders (there shouldn't be any)
            continue

        base_folder = os.path.dirname(file_found)
        file_name = os.path.basename(file_found)
        with open(file_found, 'rb') as f:
            pb = f.read()
            values, types = blackboxprotobuf.decode_message(pb)
            file_last_mod_date = str(ReadUnixTime(
                os.path.getmtime(file_found)))
            s = parse_session_data(values, file_name, file_last_mod_date,
                                   report_folder)
            sessions.append(s)

    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)
    entries = len(sessions)
    if entries > 0:
        description = "Recently searched terms from the Google Search widget and any interaction with the Google Personal Assistant / app (previously "\
                        "known as 'Google Now') appear here. This can include previously searched items from another device too!"
        report = ArtifactHtmlReport('Google App & Quick Search queries')
        report.start_artifact_report(report_folder,
                                     'Searches & Personal assistant',
                                     description)
        report.add_script()
        data_headers = ('File Timestamp', 'Type', 'Queries', 'Response',
                        'Source File')
        data_list = []
        for s in sessions:
            response = ''
            if s.mp3_path:
                filename = os.path.basename(s.mp3_path)
                response = f'<audio controls><source src="{folder_name}/{filename}"></audio>'
            data_list.append((s.file_last_mod_date, s.session_type,
                              escape(', '.join(s.session_queries)), response,
                              s.source_file))

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         base_folder,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'google quick search box'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Google Quick Search Box'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc('No recent quick search or now data available')
Exemple #14
0
def get_googleNowPlaying(files_found, report_folder, seeker, wrap_text):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif not file_found.endswith('history_db'):
            continue # Skip all other files (-wal)

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        Select
        CASE
            timestamp 
            WHEN
                "0" 
            THEN
                "" 
            ELSE
                datetime(timestamp / 1000, "unixepoch")
        END AS "timestamp",
        history_entry
        FROM
        recognition_history
        ''')
        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            description = 'This is data stored by the Now Playing feature in Pixel phones, which '\
                        'shows song data on the lock screen for any music playing nearby. It\'s ' \
                        'part of <a href="https://play.google.com/store/apps/details?id=com.google.intelligence.sense"'\
                        ' target="_blank">Pixel Ambient Services</a> or part of <a href="https://play.google.com/store/apps/details?id=com.google.android.as"'\
                        ' target="_blank">Pixel Device Personalization Services</a> depending on OS version.'
            report = ArtifactHtmlReport('Now Playing History')
            report.start_artifact_report(report_folder, 'Now Playing', description)
            report.add_script()
            
            data_headers = ('Timestamp', 'Timezone', 'Song Title', 'Artist', 'Duration',
                            'Album', 'Album Year')
            data_list = []

            pb_types = {'9': {'type': 'message', 'message_typedef': 
                        {
                        '6': {'type': 'double', 'name': ''} # This definition converts field to a double from generic fixed64
                        } }
                        }
            last_data_set = [] # Since there are a lot of similar entries differing only in timestamp, we can combine them.
            
            for row in all_rows:
                timestamp = row[0]
                pb = row[1]

                data, actual_types = blackboxprotobuf.decode_message(pb, pb_types)
                data = recursive_convert_bytes_to_str(data)
                
                try:             timezones = FilterInvalidValue(data["7"])
                except KeyError: timezones = ''

                try:             songtitle = FilterInvalidValue(data["9"]["3"])
                except KeyError: songtitle = ''

                try:             artist = FilterInvalidValue(data["9"]["4"]) 
                except KeyError: artist = ''

                try:             durationinsecs = data["9"]["6"]
                except KeyError: durationinsecs = ''

                try:             album = FilterInvalidValue(data["9"]["13"])
                except KeyError: album = ''

                try:             year = FilterInvalidValue(data["9"]["14"])
                except KeyError: year = ''
                
                if durationinsecs:
                    duration = time.strftime('%H:%M:%S', time.gmtime(durationinsecs))
                if not last_data_set:
                    last_data_set = [timestamp, escape(timezones), escape(songtitle), escape(artist), duration, escape(album), year]
                elif AreContentsSame(last_data_set, timezones, songtitle, artist, duration, album, year):
                    if last_data_set[0] == timestamp: # exact duplicate, do not add
                        pass
                    else:
                        last_data_set[0] += ',<br />' + timestamp
                else:
                    data_list.append(last_data_set)
                    last_data_set = []
            if last_data_set:
                data_list.append(last_data_set)
            logfunc("{} entries grouped into {}".format(usageentries, len(data_list)))
            report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
            report.end_artifact_report()
            
            tsvname = f'Google Now Playing'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'Google Now Playing'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Google Now Playing history')

        db.close()
        return
Exemple #15
0
def run(unfurl, node):
    def parse_protobuf_into_nodes(pb_value_dict, pb_types, edge_type=None):
        assert isinstance(pb_value_dict, dict), \
            f'"parse_protobuf_into_nodes" expects a dict, but got {type(pb_value_dict)} as input'

        if len(pb_value_dict) > 0:
            for field_number, field_value in pb_value_dict.items():
                if isinstance(field_value,
                              (str, int, float, bytes, bytearray)):
                    unfurl.add_to_queue(
                        data_type='proto',
                        key=field_number,
                        value=str(field_value),
                        hover=f'Field number <b>{field_number}</b> has a wire '
                        f'type of {wire_types[pb_types[field_number]["type"]]}',
                        parent_id=node.node_id,
                        incoming_edge_config=edge_type)
                elif isinstance(field_value, dict):
                    unfurl.add_to_queue(
                        data_type='proto.dict',
                        key=field_number,
                        value={
                            'field_values':
                            field_value,
                            'field_types':
                            pb_types[field_number]["message_typedef"]
                        },
                        label=f'{field_number}: {field_value}',
                        hover=f'Field number <b>{field_number}</b> '
                        f'is a nested message; it will be parsed further into more nodes',
                        parent_id=node.node_id,
                        incoming_edge_config=edge_type)
                elif isinstance(field_value, list):
                    nested_types = pb_types[field_number]
                    if pb_types[field_number].get("message_typedef"):
                        nested_types = pb_types[field_number][
                            "message_typedef"]

                    unfurl.add_to_queue(
                        data_type='proto.list',
                        key=field_number,
                        value={
                            'field_values': field_value,
                            'field_types': nested_types
                        },
                        label=f'{field_number}: {field_value}',
                        hover=f'Field number <b>{field_number}</b> '
                        f'is a nested message; it will be parsed further into more nodes',
                        parent_id=node.node_id,
                        incoming_edge_config=edge_type)

    if node.data_type == 'proto.dict':
        parse_protobuf_into_nodes(node.value.get('field_values'),
                                  node.value.get('field_types'), proto_edge)
        return

    if node.data_type == 'proto.list':
        for field_value in node.value['field_values']:
            field_types = node.value.get('field_types')
            if not isinstance(field_value, dict):
                field_value = {node.key: field_value}
                field_types = {node.key: field_types}
            parse_protobuf_into_nodes(field_value, field_types, proto_edge)
        return

    if node.data_type == 'bytes':
        try:
            protobuf_values, protobuf_values_types = blackboxprotobuf.decode_message(
                node.value)
            parse_protobuf_into_nodes(protobuf_values, protobuf_values_types,
                                      proto_edge)
            return

        # This will often fail for a wide array of reasons when it tries to parse a non-pb as a pb
        except Exception:
            pass

    if not isinstance(node.value, str):
        return False

    if len(node.value) % 4 == 1:
        # A valid b64 string will not be this length
        return False

    if node.data_type.startswith(('uuid', 'hash')):
        return False

    urlsafe_b64_m = utils.urlsafe_b64_re.fullmatch(node.value)
    standard_b64_m = utils.standard_b64_re.fullmatch(node.value)
    hex_m = utils.hex_re.fullmatch(node.value)
    long_int_m = utils.long_int_re.fullmatch(node.value)
    all_digits_m = utils.digits_re.fullmatch(node.value)
    all_letters_m = utils.digits_re.fullmatch(node.value)

    if hex_m and not (all_digits_m or all_letters_m):
        decoded = bytes.fromhex(node.value)
        try:
            protobuf_values, protobuf_values_types = blackboxprotobuf.decode_message(
                decoded)
            parse_protobuf_into_nodes(protobuf_values, protobuf_values_types,
                                      hex_proto_edge)
            return

        # This will often fail for a wide array of reasons when it tries to parse a non-pb as a pb
        except Exception:
            return

    elif urlsafe_b64_m and not (all_digits_m or all_letters_m):
        try:
            decoded = base64.urlsafe_b64decode(
                unfurl.add_b64_padding(node.value))
            protobuf_values, protobuf_values_types = blackboxprotobuf.decode_message(
                decoded)
            parse_protobuf_into_nodes(protobuf_values, protobuf_values_types,
                                      b64_proto_edge)
            return

        # This will often fail for a wide array of reasons when it tries to parse a non-pb as a pb
        except Exception:
            return

    elif standard_b64_m and not (all_digits_m or all_letters_m):
        try:
            decoded = base64.b64decode(unfurl.add_b64_padding(node.value))
            protobuf_values, protobuf_values_types = blackboxprotobuf.decode_message(
                decoded)
            parse_protobuf_into_nodes(protobuf_values, protobuf_values_types,
                                      b64_proto_edge)
            return

        # This will often fail for a wide array of reasons when it tries to parse a non-pb as a pb
        except Exception:
            return
def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text):
    recents = []
    for file_found in files_found:
        file_found = str(file_found)
        if file_found.endswith('.jpg'):
            continue  # Skip jpg files, all others should be protobuf
        elif file_found.find('{0}mirror{0}'.format(slash)) >= 0:
            # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data
            continue
        elif os.path.isdir(file_found):  # skip folders
            continue

        with open(file_found, 'rb') as f:
            pb = f.read()
            types = {
                '1': {
                    'type': 'message',
                    'message_typedef': {
                        '1': {
                            'type': 'uint',
                            'name': 'id'
                        },
                        '4': {
                            'type': 'uint',
                            'name': 'timestamp1'
                        },
                        '5': {
                            'type': 'str',
                            'name': 'search-query'
                        },
                        '7': {
                            'type': 'message',
                            'message_typedef': {
                                '1': {
                                    'type': 'str',
                                    'name': 'url'
                                },
                                '2': {
                                    'type': 'str',
                                    'name': 'url-domain'
                                },
                                '3': {
                                    'type': 'str',
                                    'name': 'title'
                                }
                            },
                            'name': 'page'
                        },
                        '8': {
                            'type': 'message',
                            'message_typedef': {
                                '1': {
                                    'type': 'str',
                                    'name': 'category'
                                },
                                '2': {
                                    'type': 'str',
                                    'name': 'engine'
                                }
                            },
                            'name': 'search'
                        },
                        '9': {
                            'type': 'int',
                            'name': 'screenshot-id'
                        },
                        '17': {
                            'type': 'uint',
                            'name': 'timestamp2'
                        },
                    },
                    'name': ''
                }
            }
            values, types = blackboxprotobuf.decode_message(pb, types)
            items = values.get('1', None)
            if items:
                if isinstance(items, dict):
                    # this means only one element was found
                    # No array, just a dict of that single element
                    recents.append((file_found, [items]))
                else:
                    # Array of dicts found
                    recents.append((file_found, items))

    if report_folder[-1] == slash:
        folder_name = os.path.basename(report_folder[:-1])
    else:
        folder_name = os.path.basename(report_folder)
    recent_entries = len(recents)
    if recent_entries > 0:
        description = "Recently searched terms from the Google Search widget and webpages read from Google app (previously known as 'Google Now') appear here."
        report = ArtifactHtmlReport('Google Now & Quick Search recent events')
        report.start_artifact_report(report_folder,
                                     'Recent Searches & Google Now',
                                     description)
        report.add_script()
        data_headers = ('Screenshot', 'Protobuf Data')
        data_list = []
        for file_path, items in recents:
            dir_path, base_name = os.path.split(file_path)
            for item in items:
                screenshot_id = str(item.get('screenshot-id', ''))
                screenshot_file_path = os.path.join(
                    dir_path, f'{base_name}-{screenshot_id}.jpg')
                if os.path.exists(screenshot_file_path):
                    shutil.copy2(screenshot_file_path, report_folder)
                img_html = '<a href="{1}/{0}"><img src="{1}/{0}" class="img-fluid" style="max-height:600px; min-width:300px" title="{0}"></a>'.format(
                    f'{base_name}-{screenshot_id}.jpg', folder_name)

                platform = is_platform_windows()
                if platform:
                    img_html = img_html.replace('?', '')

                recursive_convert_bytes_to_str(
                    item)  # convert all 'bytes' to str
                data_list.append((
                    img_html, '<pre id="json" style="font-size: 110%">' +
                    escape(json.dumps(item, indent=4)).replace('\\n', '<br>') +
                    '</pre>'))

        report.write_artifact_data_table(data_headers,
                                         data_list,
                                         dir_path,
                                         html_escape=False)
        report.end_artifact_report()

        tsvname = f'google quick search box recent'
        tsv(report_folder, data_headers, data_list, tsvname)
    else:
        logfunc('No recent quick search or now data available')
Exemple #17
0
def get_kikGroupadmins(files_found, report_folder, seeker):
    for file_found in files_found:
        file_found = str(file_found)

        if file_found.endswith('kik.sqlite'):
            break

    db = open_sqlite_db_readonly(file_found)
    cursor = db.cursor()
    cursor.execute('''
    Select ZKIKUSER.Z_PK, /*User ID*/
        ZKIKUSER.ZDISPLAYNAME, /*Display Name*/
        ZKIKUSER.ZUSERNAME, /*Username, if available*/
        ZKIKUSER.ZPPURL, /*Profile Picture URL*/
        Z_9ADMINSINVERSE.Z_9ADMINSINVERSE, /*Group ID of group where user is an administrator. */
        ZKIKUSEREXTRA.ZENTITYUSERDATA, /*BLOB from ZKIKUSEREXTRA that contains additional user information. */
        ZKIKUSEREXTRA.ZROSTERENTRYDATA /*Field from ZKIKUSEREXTRA that contains additional user information*/
    From ZKIKUSER
    Inner Join Z_9ADMINSINVERSE On ZKIKUSER.Z_PK = Z_9ADMINSINVERSE.Z_9ADMINS /*(matched Z_PK from ZKIKUSER table with Z_9ADMINS from Z_9ADMINSINVERSE table)*/
    LEFT JOIN ZKIKUSEREXTRA On ZKIKUSER.Z_PK = ZKIKUSEREXTRA.ZUSER /*(matched Z_PK from ZKIKUSER with ZUSER from ZKIKUSEREXTRA)*/
order by Z_9ADMINSINVERSE
    ''')

    all_rows = cursor.fetchall()
    usageentries = len(all_rows)
    data_list = []
    finalintlist = ''
    blobuser = ''
    blobdesc = ''
    addinfousera = ''
    addinfouserb = ''
    addinfodisp = ''
    addinfdesc = ''
    if usageentries > 0:

        for row in all_rows:

            cursor2 = db.cursor()
            cursor2.execute(f'''
            SELECT ZGROUPTAG,
                ZDISPLAYNAME,
                ZJID,
                ZPPURL
            FROM ZKIKUSER 
            WHERE Z_PK = {row[4]}
            ''')

            all_rows2 = cursor2.fetchall()
            for rows2 in all_rows2:
                grouptag = rows2[0]
                groupdname = rows2[1]
                zjid = rows2[2]
                zpurl = rows2[3]

            if row[6] is None:
                pass
            else:
                addinfousera = ''
                addinfouserb = ''
                addinfodisp = ''
                addinfdesc = ''

                data, typess = blackboxprotobuf.decode_message(row[6])
                addinfousera = data['1']['1']
                if type(addinfousera) is dict:
                    addinfousera = ''
                else:
                    addinfousera = data['1']['1'].decode('utf-8')

                if data.get('2') is not None:
                    addinfdesc = data.get('2')['1']['1']
                    if type(addinfdesc) is dict:
                        addinfdesc = ''
                    else:
                        addinfdesc = data.get('2')['1']['1'].decode('utf-8')

                addinfouserb = data['3']['1'].decode('utf-8')

                addinfodisp = data['4']['1']
                if type(addinfodisp) is dict:
                    addinfodisp = ''
                else:
                    addinfodisp = data['4']['1'].decode('utf-8')

            if row[5] is not None:

                finalintlist = ''
                blobuser = ''
                blobdesc = ''

                data, typess = blackboxprotobuf.decode_message(row[5])

                if (data['1'].get('5')) is not None:
                    listofinterests = (data['1']['5']['1'])
                    for x in listofinterests:
                        finalintlist = (x['2'].decode('utf-8')
                                        ) + ', ' + finalintlist  #interests
                    finalintlist = finalintlist[:-2]

                if type(data['1'].get('1')) is bytes:
                    blobuser = (data['1'].get('1').decode('utf-8'))  #Username?
                if type(data['1'].get('1')) is dict:
                    if (data['1']['1'].get('1')) is not None:
                        blobdesc = (data['1']['1'].get('1').decode('utf-8')
                                    )  #Description

                if (data['1'].get('7')) is not None:
                    if type(data['1']['7'].get('1')) is dict:
                        pass  #has a dictionary with values i dont care about
                    else:
                        blobname = ((data['1']['7'].get('1').decode('utf-8'))
                                    )  #some name?
                if (data.get('102')) is not None:
                    blobpicfull = (data['102']['1']['2']['1'].decode('utf-8')
                                   )  #profilepic full
                    blobpicthu = (data['102']['1']['2']['2'].decode('utf-8')
                                  )  #profile pic thumb
                if (data.get('104')) is not None:
                    listofinterests = data['104']['1']
                    for x in listofinterests:
                        finalintlist = (x['2'].decode('utf-8')
                                        ) + ', ' + finalintlist  #interests
                    finalintlist = finalintlist[:-2]

            data_list.append(
                (row[0], row[1], row[2], row[3], row[4], grouptag, groupdname,
                 zjid, zpurl, blobuser, blobdesc, finalintlist, addinfousera,
                 addinfouserb, addinfodisp, addinfdesc))

        description = 'Kik users that are Administrators of a group.'
        report = ArtifactHtmlReport('Kik Group Administrators')
        report.start_artifact_report(report_folder, 'Kik Group Administrators',
                                     description)
        report.add_script()
        data_headers = ('User ID', 'Display Name', 'Username',
                        'Profile Pic URL', 'Member Group ID', 'Group Tag',
                        'Group Name', 'Group ID', 'Group Pic URL', 'Blob User',
                        'Blob Description', 'Blob Interests',
                        'Additional Info User', 'Additional Info User',
                        'Additional Info Display', 'Additional Info Value')
        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = 'Kik Group Administrators'
        tsv(report_folder, data_headers, data_list, tsvname)

    else:
        logfunc('No Kik Group Administrators data available')
Exemple #18
0
        return dict(
            (convert(key), convert(value)) for key, value in input.iteritems())
    elif isinstance(input, list):
        return [convert(element) for element in input]
    elif isinstance(input, unicode):
        return input.encode('utf-8')
    elif isinstance(input, int):
        return long(input)
    else:
        return input


with open('payload.pickle', 'rb') as handle:
    deserialized = pickle.load(handle)

with open('filename.pickle', 'rb') as handle:
    path = pickle.load(handle)
#	print(path)

#Dirty hack to have a typedef
f = open('../../%s' % (path))
message, typedef = blackboxprotobuf.decode_message(f.read())

#ready=blackboxprotobuf.encode_message(deserialized,typedef)

#toencode,typedef = convert(deserialized)
data = blackboxprotobuf.encode_message(convert(deserialized), typedef)
#print(data)
with open('final.pickle', 'wb') as handle:
    pickle.dump(data, handle, protocol=pickle.HIGHEST_PROTOCOL)
Exemple #19
0
#! /usr/bin/python
import sys
sys.path.insert(1, './blackboxprotobuf')
sys.path.insert(1, '../../modules/protobuf-3.11.4/python')
import blackboxprotobuf
import pickle

if len(sys.argv) != 2:
    print "Usage:", sys.argv[0], "PROTOBUF FILE TO DECODE"
    sys.exit(-1)

f = open(sys.argv[1], "rb")
data = f.read()
message, typedef = blackboxprotobuf.decode_message(data)
with open('decoded.pickle', 'wb') as handle:
    pickle.dump(message, handle, protocol=pickle.HIGHEST_PROTOCOL)

json, typedef = blackboxprotobuf.protobuf_to_json(data)
print(json)
print(type(json))
f2 = open(sys.argv[1] + '.json', 'w')
f2.write(json)
Exemple #20
0
def get_mapsSync(files_found, report_folder, seeker):

    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('MapsSync_0.0.1'):
            continue  # Skip all other files

        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        SELECT
        datetime(ZHISTORYITEM.ZCREATETIME+978307200,'UNIXEPOCH','localtime') AS 'Time Created',
        datetime(ZHISTORYITEM.ZMODIFICATIONTIME+978307200,'UNIXEPOCH','localtime') AS 'Time Modified',
        ZHISTORYITEM.z_pk AS 'Item Number',
        CASE
        when ZHISTORYITEM.z_ent = 14 then 'coordinates of search'
        when ZHISTORYITEM.z_ent = 16 then 'location search'
        when ZHISTORYITEM.z_ent = 12 then 'navigation journey'
        end AS 'Type',
        ZHISTORYITEM.ZQUERY AS 'Location Search',
        ZHISTORYITEM.ZLOCATIONDISPLAY AS 'Location City',
        ZHISTORYITEM.ZLATITUDE AS 'Latitude',
        ZHISTORYITEM.ZLONGITUDE AS 'Longitude',
        ZHISTORYITEM.ZROUTEREQUESTSTORAGE AS 'Journey BLOB',
        ZMIXINMAPITEM.ZMAPITEMSTORAGE as 'Map Item Storage BLOB'
        from ZHISTORYITEM
        left join ZMIXINMAPITEM on ZMIXINMAPITEM.Z_PK=ZHISTORYITEM.ZMAPITEM
        ''')

        # Above query courtesy of CheekyForensicsMonkey
        # https://cheeky4n6monkey.blogspot.com/2020/11/ios14-maps-history-blob-script.html

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        if usageentries > 0:
            data_list = []
            for row in all_rows:
                #print(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7])
                directa = ''
                directb = ''
                mapitem = ''
                agg1 = ''
                if row[8] is None:
                    pass
                #pp = pprint.PrettyPrinter(indent = 1)
                #pp.pprint(message)
                else:
                    message, types = blackboxprotobuf.decode_message(row[8])

                    for x in message['1']:
                        for y in x['2']['1']['4']:
                            z = y.get('8')
                            if z == None:
                                pass
                            else:
                                if isinstance(z, dict):
                                    w = z.get('31')
                                    if w == None:
                                        pass
                                    else:
                                        three = get_recursively(w, '3')
                                        if three[1] == b'create':
                                            #print(f'Three: {three[1]}')
                                            if message['1'][1]['1'].get(
                                                    '2') is not None:
                                                for address in (
                                                        message['1'][1]['1']
                                                    ['2']['6']):
                                                    directa = directa + ' ' + (
                                                        address.decode(
                                                            'latin-1'))
                                                    #print(row[0],directa, 'directa')
                                                if agg1 == '':
                                                    agg1 = directa
                                                    directa = ''
                                                else:
                                                    agg1 = agg1 + ' <---> ' + directa

                                            else:
                                                for address in (w['1']['101']
                                                                ['2']['11']):
                                                    directa = directa + ' ' + (
                                                        address.decode(
                                                            'latin-1'))
                                                    #print(row[0], directb, 'directb')
                                                if agg1 == '':
                                                    agg1 = directa
                                                    directa = ''
                                                else:
                                                    agg1 = agg1 + ' <---> ' + directa

                if row[9] is None:
                    pass
                else:
                    message, types = blackboxprotobuf.decode_message(row[9])
                    #pp = pprint.PrettyPrinter(indent = 1)
                    #pp.pprint(message['1']['4'])#[7]['8']['31']['1']['101']['2']['11'])
                    get101 = (get_recursively(message, '101'))

                    for address in (get101[0]['2']['11']):
                        mapitem = mapitem + ' ' + (address.decode('latin-1'))

                data_list.append((row[0], row[1], row[2], row[3], row[4],
                                  row[5], row[6], row[7], agg1, mapitem))
                agg1 = ''

        if usageentries > 0:
            description = 'Disclaimer: Entries should be corroborated. Locations and searches from other linked devices might show up here. Travel should be confirmed. Medium confidence.'
            report = ArtifactHtmlReport('MapsSync')
            report.start_artifact_report(report_folder, 'MapsSync',
                                         description)
            report.add_script()
            data_headers = ('Timestamp', 'Modified Time', 'Item Number',
                            'Type', 'Location Search', 'Location City',
                            'Latitude', 'Longitude', 'Journey BLOB Item',
                            'Map Item Storage BLOB item')

            report.write_artifact_data_table(data_headers, data_list,
                                             file_found)
            report.end_artifact_report()

            tsvname = f'MapsSync'
            tsv(report_folder, data_headers, data_list, tsvname)

            tlactivity = 'MapsSync'
            timeline(report_folder, tlactivity, data_list, data_headers)

            kmlactivity = 'MapsSync'
            kmlgen(report_folder, kmlactivity, data_list, data_headers)
        else:
            logfunc('No MapsSync data available')
Exemple #21
0
def get_googleCallScreen(files_found, report_folder, seeker, wrap_text):
    
    for file_found in files_found:
        file_found = str(file_found)
        if not file_found.endswith('callscreen_transcripts'):
            continue # Skip all other files
    
        db = open_sqlite_db_readonly(file_found)
        cursor = db.cursor()
        cursor.execute('''
        select
        datetime(lastModifiedMillis/1000,'unixepoch'),
        audioRecordingFilePath,
        conversation,
        id,
        replace(audioRecordingFilePath, rtrim(audioRecordingFilePath, replace(audioRecordingFilePath, '/', '')), '') as 'File Name'
        from Transcript
        ''')

        all_rows = cursor.fetchall()
        usageentries = len(all_rows)
        data_list = []
        
        pb_types = {'1': {'type': 'message', 'message_typedef':
                    {   
                        '1': {'name': 'timestamp1', 'type': 'int'},
                        '2': {'name': '', 'type': 'int'},
                        '3': {'name': 'convo_text', 'type': 'str'},
                        '4': {'name': '', 'type': 'int'},
                        '5': {'name': '', 'type': 'int'},
                        '6': {'name': '', 'type': 'bytes'},
                        '7': {'name': '', 'type': 'int'},
                        '9': {'name': '', 'type': 'int'}},
                    'name': '',
                    'type': 'message'}}
        
        if usageentries > 0:
            for row in all_rows:
            
                lm_ts = row[0]
                recording_path = row[1]
                pb = row[2]
                convo_id = row[3]
                recording_filename = row[4]
                audio_clip = ''
                conversation = ''
                
                data, actual_types = blackboxprotobuf.decode_message(pb, pb_types)
                
                for x in data['1']:
    
                    convo_timestamp = str(datetime.fromtimestamp(x['timestamp1']/1000)) + '<br>'
                    convo_transcript = x['convo_text'] + '<br><br>'
                    conversation += convo_timestamp + convo_transcript
                    
                for match in files_found:
                    if recording_filename in match:
                        shutil.copy2(match, report_folder)
                        audio_file_path = os.path.abspath(match)
                        audio_clip = ''' 
                            <audio controls>
                                <source src={} type="audio/wav">
                                <p>Your browser does not support HTML5 audio elements.</p>
                            </audio> 
                            '''.format(audio_file_path)
                                
                data_list.append((lm_ts,recording_path,conversation,audio_clip))
        
            description = 'Transcripts and recordings from the use of Google Assistant\'s Call Screen feature. '\
                          'Timestamps found in the Conversation column are in the local timezone offset.'
            report = ArtifactHtmlReport('Google Call Screen')
            report.start_artifact_report(report_folder, 'Google Call Screen', description)
            report.add_script()
            data_headers = ('Timestamp','Recording File Path','Conversation','Audio') # Don't remove the comma, that is required to make this a tuple as there is only 1 element

            report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Audio','Conversation'])
            report.end_artifact_report()
            
            tsvname = f'Google Call Screen'
            tsv(report_folder, data_headers, data_list, tsvname)
            
            tlactivity = f'Google Call Screen'
            timeline(report_folder, tlactivity, data_list, data_headers)
        else:
            logfunc('No Google Call Screen data available')
    
        db.close()
    return
Exemple #22
0
def test_anon_json_decode(x):
    config = Config()
    typedef, message = x
    encoded = blackboxprotobuf.encode_message(message,
                                              config=config,
                                              message_type=typedef)
    decoded_json, typedef_out = blackboxprotobuf.protobuf_to_json(
        encoded, config=config)
    encoded_json = blackboxprotobuf.protobuf_from_json(
        decoded_json, config=config, message_type=typedef_out)
    decoded, typedef_out = blackboxprotobuf.decode_message(encoded_json,
                                                           config=config)
    note("Original message: %r" % message)
    note("Decoded JSON: %r" % decoded_json)
    note("Decoded message: %r" % decoded)
    note("Original typedef: %r" % typedef)
    note("Decoded typedef: %r" % typedef_out)

    def check_message(orig, orig_typedef, new, new_typedef):
        for field_number in set(orig.keys()) | set(new.keys()):
            # verify all fields are there
            assert field_number in orig
            assert field_number in orig_typedef
            assert field_number in new
            assert field_number in new_typedef

            orig_values = orig[field_number]
            new_values = new[field_number]
            orig_type = orig_typedef[field_number]["type"]
            new_type = new_typedef[field_number]["type"]

            note("Parsing field# %s" % field_number)
            note("orig_values: %r" % orig_values)
            note("new_values: %r" % new_values)
            note("orig_type: %s" % orig_type)
            note("new_type: %s" % new_type)
            # Fields might be lists. Just convert everything to a list
            if not isinstance(orig_values, list):
                orig_values = [orig_values]
                assert not isinstance(new_values, list)
                new_values = [new_values]
            assert isinstance(orig_values, list)
            assert isinstance(new_values, list)

            # if the types don't match, then try to convert them
            if new_type == "message" and orig_type in ["bytes", "string"]:
                # if the type is a message, we want to convert the orig type to a message
                # this isn't ideal, we'll be using the unintended type, but
                # best way to compare. Re-encoding a  message to binary might
                # not keep the field order
                new_field_typedef = new_typedef[field_number][
                    "message_typedef"]
                for i, orig_value in enumerate(orig_values):
                    if orig_type == "bytes":
                        (
                            orig_values[i],
                            orig_field_typedef,
                            _,
                        ) = length_delim.decode_lendelim_message(
                            length_delim.encode_bytes(orig_value),
                            config,
                            new_field_typedef,
                        )
                    else:
                        # string value
                        (
                            orig_values[i],
                            orig_field_typedef,
                            _,
                        ) = length_delim.decode_lendelim_message(
                            length_delim.encode_string(orig_value),
                            config,
                            new_field_typedef,
                        )
                    orig_typedef[field_number][
                        "message_typedef"] = orig_field_typedef
                orig_type = "message"

            if new_type == "string" and orig_type == "bytes":
                # our bytes were accidently valid string
                new_type = "bytes"
                for i, new_value in enumerate(new_values):
                    new_values[i], _ = length_delim.decode_bytes(
                        length_delim.encode_string(new_value), 0)
            note("New values: %r" % new_values)
            # sort the lists with special handling for dicts
            orig_values.sort(
                key=lambda x: x if not isinstance(x, dict) else x.items())
            new_values.sort(
                key=lambda x: x if not isinstance(x, dict) else x.items())
            for orig_value, new_value in zip(orig_values, new_values):
                if orig_type == "message":
                    check_message(
                        orig_value,
                        orig_typedef[field_number]["message_typedef"],
                        new_value,
                        new_typedef[field_number]["message_typedef"],
                    )
                else:
                    assert orig_value == new_value

    check_message(message, typedef, decoded, typedef_out)
Exemple #23
0
def read_trainingcache2(file_found, report_folder, seeker):
    db = open_sqlite_db_readonly(file_found)
    db.row_factory = sqlite3.Row  # For fetching columns by name
    cursor = db.cursor()

    keyboard_events = []
    if does_table_exist(db, 'training_input_events_table'):
        try:
            cursor.execute('''
                SELECT _id, _payload, f2 as app, f4 as textbox_name, f5 as textbox_id, datetime(f9/1000, "unixepoch") as ts
                FROM training_input_events_table 
            ''')
            pb_types = {
                '7': {
                    'type': 'message',
                    'message_typedef': {
                        '2': {
                            'type': 'message',
                            'message_typedef': {
                                '1': {
                                    'name': '',
                                    'type': 'bytes'
                                }
                            }
                        }
                    }
                }
            }
            all_rows = cursor.fetchall()
            for row in all_rows:
                pb = row['_payload']
                data, actual_types = blackboxprotobuf.decode_message(
                    pb, pb_types)
                texts = data.get('7', {}).get('2', [])
                text_typed = ''
                if texts:
                    if isinstance(texts, list):
                        for t in texts:
                            text_typed += t.get('1',
                                                b'').decode('utf8', 'ignore')
                    else:
                        text_typed = texts.get('1',
                                               b'').decode('utf8', 'ignore')

                # Not filtering out blanks for now
                textbox_name = row['textbox_name']
                textbox_id = row['textbox_id']
                if len(textbox_id) > len(textbox_name):
                    textbox_id = textbox_id[len(textbox_name) + 4:]
                keyboard_events.append(
                    keyboard_event(row['_id'], row['app'], text_typed,
                                   row['textbox_name'], row['textbox_id'],
                                   row['ts']))
        except (sqlite3.Error, TypeError, ValueError) as ex:
            logfunc(f'read_trainingcache2 had an error reading {file_found} ' +
                    str(ex))

    elif does_table_exist(db, 'tf_table'):
        try:
            cursor.execute('''
                SELECT s._id, ts, f3_concat as text_entered, s.f7 as textbox_name, s.f8 as app, s.f9, 
                datetime(s.f10/1000, 'unixepoch') as start_ts, datetime(s.f11/1000, 'unixepoch') as end_ts
                FROM 
                (select datetime(_timestamp/1000, 'unixepoch') as ts, f1,
                group_concat(f3, '') as f3_concat 
                FROM tf_table GROUP BY f1) x
                LEFT JOIN s_table s on s.f1=x.f1
            ''')

            all_rows = cursor.fetchall()
            for row in all_rows:
                keyboard_events.append(
                    keyboard_event(row['_id'], row['app'], row['text_entered'],
                                   row['textbox_name'], '', row['ts'],
                                   row['start_ts'], row['end_ts']))
        except (sqlite3.Error, TypeError, ValueError) as ex:
            logfunc(f'read_trainingcache2 had an error reading {file_found} ' +
                    str(ex))

    file_name = os.path.basename(file_found)
    if keyboard_events:
        description = "Keystrokes typed by the user in various input fields of apps, that have been temporarily cached by the Gboard keyboard app are seen here."
        report = ArtifactHtmlReport(f'Gboard Keystroke cache - {file_name}')
        report.start_artifact_report(report_folder, f'{file_name}',
                                     description)
        report.add_script()

        data_headers = ('Id', 'Text', 'App', 'Input Name', 'Input ID',
                        'Event Timestamp')
        data_list = []
        for ke in keyboard_events:
            data_list.append((ke.id, ke.text, ke.app, ke.textbox_name,
                              ke.textbox_id, ke.event_date))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Gboard Keystroke cache - {file_name}'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Gboard Keystroke cache - {file_name}'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc(f'No Gboard data available in {file_name}')

    db.close()
Exemple #24
0
def read_trainingcachev2(file_found, report_folder, seeker):
    db = open_sqlite_db_readonly(file_found)
    db.row_factory = sqlite3.Row  # For fetching columns by name
    cursor = db.cursor()
    keyboard_events = []
    try:
        cursor.execute('''
            SELECT i._payload as data_proto,  s._payload as desc_proto, 
            datetime(i._timestamp/1000, 'unixepoch') as ts1, datetime(s._timestamp/1000, 'unixepoch') as ts2
            , s._id as session, i._id as id
            FROM input_action_table i LEFT JOIN session_table s ON s._session_id=i._session_id
        ''')

        all_rows = cursor.fetchall()
        last_session = None
        ke = None
        for row in all_rows:
            session = row['session']
            if last_session != session:
                # write out last_session
                if ke and ke.text:
                    keyboard_events.append(ke)
                last_session = session
                ke = keyboard_event(row['id'], '', '', '', '', row['ts2'],
                                    row['ts1'], row['ts1'])
                desc_proto = row['desc_proto']
                if desc_proto:
                    desc, actual_types = blackboxprotobuf.decode_message(
                        desc_proto, None)
                    try:
                        ke.textbox_name = desc.get('6', b'').decode(
                            'utf8', 'ignore')
                    except AttributeError:
                        pass
                    try:
                        ke.app = desc.get('7', b'').decode('utf8', 'ignore')
                    except AttributeError:
                        pass
            ke.end_date = row['ts1']
            data_proto = row['data_proto']
            if data_proto:
                data, actual_types = blackboxprotobuf.decode_message(
                    data_proto, None)
                input_dict = data.get(
                    '6', None
                )  # It's either an input or an output (suggested words) proto type
                if input_dict:
                    index = input_dict.get('1', {}).get('1', -1)
                    chars_items = input_dict.get('4', {})
                    chars = ''
                    if isinstance(chars_items, list):
                        for item in chars_items:
                            try:
                                chars += item.get('1', b'').decode(
                                    'utf8', 'ignore')
                            except AttributeError:
                                pass
                    else:
                        try:
                            chars = chars_items.get('1', b'').decode(
                                'utf8', 'ignore')
                        except AttributeError:
                            pass
                    ke.text += chars
        if ke and ke.text:  # write out last event
            keyboard_events.append(ke)
    except (sqlite3.Error, TypeError, ValueError) as ex:
        logfunc(f'read_trainingcache2 had an error reading {file_found} ' +
                str(ex))

    file_name = os.path.basename(file_found)
    if keyboard_events:
        description = "Keystrokes typed by the user in various input fields of apps, that have been temporarily cached by the Gboard keyboard app are seen here."
        report = ArtifactHtmlReport(f'Gboard Keystroke cache - {file_name}')
        report.start_artifact_report(report_folder, f'{file_name}',
                                     description)
        report.add_script()

        data_headers = ('Id', 'Text', 'App', 'Input Name', 'Input ID',
                        'Event Timestamp')
        data_list = []
        for ke in keyboard_events:
            data_list.append((ke.id, ke.text, ke.app, ke.textbox_name,
                              ke.textbox_id, ke.event_date))

        report.write_artifact_data_table(data_headers, data_list, file_found)
        report.end_artifact_report()

        tsvname = f'Gboard Keystroke cache - {file_name}'
        tsv(report_folder, data_headers, data_list, tsvname)

        tlactivity = f'Gboard Keystroke cache - {file_name}'
        timeline(report_folder, tlactivity, data_list, data_headers)
    else:
        logfunc(f'No Gboard data available in {file_name}')

    db.close()